mirror of
https://github.com/ansible/awx.git
synced 2026-01-11 10:00:01 -03:30
Merge pull request #4265 from AlanCoding/branch_feature_phase_2
Allow JT specification and prompting for project branch Reviewed-by: https://github.com/softwarefactory-project-zuul[bot]
This commit is contained in:
commit
43d816b6e4
@ -1285,8 +1285,8 @@ class OrganizationSerializer(BaseSerializer):
|
||||
class ProjectOptionsSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
fields = ('*', 'local_path', 'scm_type', 'scm_url', 'scm_branch',
|
||||
'scm_clean', 'scm_delete_on_update', 'credential', 'timeout',)
|
||||
fields = ('*', 'local_path', 'scm_type', 'scm_url', 'scm_branch', 'scm_refspec',
|
||||
'scm_clean', 'scm_delete_on_update', 'credential', 'timeout', 'scm_revision')
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(ProjectOptionsSerializer, self).get_related(obj)
|
||||
@ -1311,6 +1311,8 @@ class ProjectOptionsSerializer(BaseSerializer):
|
||||
attrs.pop('local_path', None)
|
||||
if 'local_path' in attrs and attrs['local_path'] not in valid_local_paths:
|
||||
errors['local_path'] = _('This path is already being used by another manual project.')
|
||||
if attrs.get('scm_refspec') and scm_type != 'git':
|
||||
errors['scm_refspec'] = _('SCM refspec can only be used with git projects.')
|
||||
|
||||
if errors:
|
||||
raise serializers.ValidationError(errors)
|
||||
@ -1338,7 +1340,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = ('*', 'organization', 'scm_update_on_launch',
|
||||
'scm_update_cache_timeout', 'scm_revision', 'custom_virtualenv',) + \
|
||||
'scm_update_cache_timeout', 'allow_override', 'custom_virtualenv',) + \
|
||||
('last_update_failed', 'last_updated') # Backwards compatibility
|
||||
|
||||
def get_related(self, obj):
|
||||
@ -1388,6 +1390,21 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
|
||||
elif self.instance:
|
||||
organization = self.instance.organization
|
||||
|
||||
if 'allow_override' in attrs and self.instance:
|
||||
# case where user is turning off this project setting
|
||||
if self.instance.allow_override and not attrs['allow_override']:
|
||||
used_by = set(
|
||||
JobTemplate.objects.filter(
|
||||
models.Q(project=self.instance),
|
||||
models.Q(ask_scm_branch_on_launch=True) | ~models.Q(scm_branch="")
|
||||
).values_list('pk', flat=True)
|
||||
)
|
||||
if used_by:
|
||||
raise serializers.ValidationError({
|
||||
'allow_override': _('One or more job templates depend on branch override behavior for this project (ids: {}).').format(
|
||||
' '.join([str(pk) for pk in used_by])
|
||||
)})
|
||||
|
||||
view = self.context.get('view', None)
|
||||
if not organization and not view.request.user.is_superuser:
|
||||
# Only allow super users to create orgless projects
|
||||
@ -2701,7 +2718,7 @@ class LabelsListMixin(object):
|
||||
class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
fields = ('*', 'job_type', 'inventory', 'project', 'playbook',
|
||||
fields = ('*', 'job_type', 'inventory', 'project', 'playbook', 'scm_branch',
|
||||
'forks', 'limit', 'verbosity', 'extra_vars', 'job_tags',
|
||||
'force_handlers', 'skip_tags', 'start_at_task', 'timeout',
|
||||
'use_fact_cache',)
|
||||
@ -2748,16 +2765,28 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
|
||||
|
||||
def validate(self, attrs):
|
||||
if 'project' in self.fields and 'playbook' in self.fields:
|
||||
project = attrs.get('project', self.instance and self.instance.project or None)
|
||||
project = attrs.get('project', self.instance.project if self.instance else None)
|
||||
playbook = attrs.get('playbook', self.instance and self.instance.playbook or '')
|
||||
scm_branch = attrs.get('scm_branch', self.instance.scm_branch if self.instance else None)
|
||||
ask_scm_branch_on_launch = attrs.get(
|
||||
'ask_scm_branch_on_launch', self.instance.ask_scm_branch_on_launch if self.instance else None)
|
||||
if not project:
|
||||
raise serializers.ValidationError({'project': _('This field is required.')})
|
||||
if project and project.scm_type and playbook and force_text(playbook) not in project.playbook_files:
|
||||
raise serializers.ValidationError({'playbook': _('Playbook not found for project.')})
|
||||
if project and not project.scm_type and playbook and force_text(playbook) not in project.playbooks:
|
||||
playbook_not_found = bool(
|
||||
(
|
||||
project and project.scm_type and (not project.allow_override) and
|
||||
playbook and force_text(playbook) not in project.playbook_files
|
||||
) or
|
||||
(project and not project.scm_type and playbook and force_text(playbook) not in project.playbooks) # manual
|
||||
)
|
||||
if playbook_not_found:
|
||||
raise serializers.ValidationError({'playbook': _('Playbook not found for project.')})
|
||||
if project and not playbook:
|
||||
raise serializers.ValidationError({'playbook': _('Must select playbook for project.')})
|
||||
if scm_branch and not project.allow_override:
|
||||
raise serializers.ValidationError({'scm_branch': _('Project does not allow overriding branch.')})
|
||||
if ask_scm_branch_on_launch and not project.allow_override:
|
||||
raise serializers.ValidationError({'ask_scm_branch_on_launch': _('Project does not allow overriding branch.')})
|
||||
|
||||
ret = super(JobOptionsSerializer, self).validate(attrs)
|
||||
return ret
|
||||
@ -2799,7 +2828,8 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
|
||||
class Meta:
|
||||
model = JobTemplate
|
||||
fields = ('*', 'host_config_key', 'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch',
|
||||
fields = ('*', 'host_config_key', 'ask_scm_branch_on_launch', 'ask_diff_mode_on_launch', 'ask_variables_on_launch',
|
||||
'ask_limit_on_launch', 'ask_tags_on_launch',
|
||||
'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch',
|
||||
'ask_credential_on_launch', 'survey_enabled', 'become_enabled', 'diff_mode',
|
||||
'allow_simultaneous', 'custom_virtualenv', 'job_slice_count')
|
||||
@ -3365,6 +3395,7 @@ class WorkflowJobCancelSerializer(WorkflowJobSerializer):
|
||||
|
||||
|
||||
class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||
job_type = serializers.ChoiceField(allow_blank=True, allow_null=True, required=False, default=None,
|
||||
choices=NEW_JOB_TYPE_CHOICES)
|
||||
job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||
@ -3377,7 +3408,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
fields = ('*', 'extra_data', 'inventory', # Saved launch-time config fields
|
||||
'job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags', 'diff_mode', 'verbosity')
|
||||
'scm_branch', 'job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags', 'diff_mode', 'verbosity')
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(LaunchConfigurationBaseSerializer, self).get_related(obj)
|
||||
@ -3960,6 +3991,7 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
required=False, write_only=True
|
||||
)
|
||||
credential_passwords = VerbatimField(required=False, write_only=True)
|
||||
scm_branch = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||
diff_mode = serializers.BooleanField(required=False, write_only=True)
|
||||
job_tags = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||
job_type = serializers.ChoiceField(required=False, choices=NEW_JOB_TYPE_CHOICES, write_only=True)
|
||||
@ -3970,13 +4002,15 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = JobTemplate
|
||||
fields = ('can_start_without_user_input', 'passwords_needed_to_start',
|
||||
'extra_vars', 'inventory', 'limit', 'job_tags', 'skip_tags', 'job_type', 'verbosity', 'diff_mode',
|
||||
'credentials', 'credential_passwords', 'ask_variables_on_launch', 'ask_tags_on_launch',
|
||||
'extra_vars', 'inventory', 'scm_branch', 'limit', 'job_tags', 'skip_tags', 'job_type', 'verbosity', 'diff_mode',
|
||||
'credentials', 'credential_passwords',
|
||||
'ask_scm_branch_on_launch', 'ask_variables_on_launch', 'ask_tags_on_launch',
|
||||
'ask_diff_mode_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_limit_on_launch',
|
||||
'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch',
|
||||
'survey_enabled', 'variables_needed_to_start', 'credential_needed_to_start',
|
||||
'inventory_needed_to_start', 'job_template_data', 'defaults', 'verbosity')
|
||||
read_only_fields = (
|
||||
'ask_scm_branch_on_launch',
|
||||
'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch',
|
||||
'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch',
|
||||
'ask_inventory_on_launch', 'ask_credential_on_launch',)
|
||||
|
||||
@ -328,6 +328,16 @@ register(
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_COLLECTIONS_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
default=True,
|
||||
label=_('Enable Collection(s) Download'),
|
||||
help_text=_('Allows collections to be dynamically downloaded from a requirements.yml file for SCM projects.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'STDOUT_MAX_BYTES_DISPLAY',
|
||||
field_class=fields.IntegerField,
|
||||
|
||||
60
awx/main/migrations/0083_v360_job_branch_overrirde.py
Normal file
60
awx/main/migrations/0083_v360_job_branch_overrirde.py
Normal file
@ -0,0 +1,60 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.20 on 2019-06-14 15:08
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import awx.main.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0082_v360_webhook_http_method'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Add fields for user-provided project refspec
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='scm_refspec',
|
||||
field=models.CharField(blank=True, default='', help_text='For git projects, an additional refspec to fetch.', max_length=1024, verbose_name='SCM refspec'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='projectupdate',
|
||||
name='scm_refspec',
|
||||
field=models.CharField(blank=True, default='', help_text='For git projects, an additional refspec to fetch.', max_length=1024, verbose_name='SCM refspec'),
|
||||
),
|
||||
# Add fields for job specification of project branch
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='scm_branch',
|
||||
field=models.CharField(blank=True, default='', help_text='Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.', max_length=1024),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='ask_scm_branch_on_launch',
|
||||
field=awx.main.fields.AskForField(blank=True, default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='scm_branch',
|
||||
field=models.CharField(blank=True, default='', help_text='Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.', max_length=1024),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='allow_override',
|
||||
field=models.BooleanField(default=False, help_text='Allow changing the SCM branch or revision in a job template that uses this project.'),
|
||||
),
|
||||
# Fix typo in help_text
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_update_cache_timeout',
|
||||
field=models.PositiveIntegerField(blank=True, default=0, help_text='The number of seconds after the last project update ran that a new project update will be launched as a job dependency.'),
|
||||
),
|
||||
# Start tracking the fetched revision on project update model
|
||||
migrations.AddField(
|
||||
model_name='projectupdate',
|
||||
name='scm_revision',
|
||||
field=models.CharField(blank=True, default='', editable=False, help_text='The SCM Revision discovered by this update for the given project and branch.', max_length=1024, verbose_name='SCM Revision'),
|
||||
),
|
||||
]
|
||||
@ -96,6 +96,13 @@ class JobOptions(BaseModel):
|
||||
default='',
|
||||
blank=True,
|
||||
)
|
||||
scm_branch = models.CharField(
|
||||
max_length=1024,
|
||||
default='',
|
||||
blank=True,
|
||||
help_text=_('Branch to use in job run. Project default used if blank. '
|
||||
'Only allowed if project allow_override field is set to true.'),
|
||||
)
|
||||
forks = models.PositiveIntegerField(
|
||||
blank=True,
|
||||
default=0,
|
||||
@ -234,6 +241,11 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
default=False,
|
||||
allows_field='credentials'
|
||||
)
|
||||
ask_scm_branch_on_launch = AskForField(
|
||||
blank=True,
|
||||
default=False,
|
||||
allows_field='scm_branch'
|
||||
)
|
||||
job_slice_count = models.PositiveIntegerField(
|
||||
blank=True,
|
||||
default=1,
|
||||
@ -387,7 +399,21 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
# no-op case: Fields the same as template's value
|
||||
# counted as neither accepted or ignored
|
||||
continue
|
||||
elif field_name == 'scm_branch' and old_value == '' and self.project and new_value == self.project.scm_branch:
|
||||
# special case of "not provided" for branches
|
||||
# job template does not provide branch, runs with default branch
|
||||
continue
|
||||
elif getattr(self, ask_field_name):
|
||||
# Special case where prompts can be rejected based on project setting
|
||||
if field_name == 'scm_branch':
|
||||
if not self.project:
|
||||
rejected_data[field_name] = new_value
|
||||
errors_dict[field_name] = _('Project is missing.')
|
||||
continue
|
||||
if kwargs['scm_branch'] != self.project.scm_branch and not self.project.allow_override:
|
||||
rejected_data[field_name] = new_value
|
||||
errors_dict[field_name] = _('Project does not allow override of branch.')
|
||||
continue
|
||||
# accepted prompt
|
||||
prompted_data[field_name] = new_value
|
||||
else:
|
||||
@ -396,7 +422,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
# Not considered an error for manual launch, to support old
|
||||
# behavior of putting them in ignored_fields and launching anyway
|
||||
if 'prompts' not in exclude_errors:
|
||||
errors_dict[field_name] = _('Field is not configured to prompt on launch.').format(field_name=field_name)
|
||||
errors_dict[field_name] = _('Field is not configured to prompt on launch.')
|
||||
|
||||
if ('prompts' not in exclude_errors and
|
||||
(not getattr(self, 'ask_credential_on_launch', False)) and
|
||||
|
||||
@ -106,6 +106,13 @@ class ProjectOptions(models.Model):
|
||||
verbose_name=_('SCM Branch'),
|
||||
help_text=_('Specific branch, tag or commit to checkout.'),
|
||||
)
|
||||
scm_refspec = models.CharField(
|
||||
max_length=1024,
|
||||
blank=True,
|
||||
default='',
|
||||
verbose_name=_('SCM refspec'),
|
||||
help_text=_('For git projects, an additional refspec to fetch.'),
|
||||
)
|
||||
scm_clean = models.BooleanField(
|
||||
default=False,
|
||||
help_text=_('Discard any local changes before syncing the project.'),
|
||||
@ -241,7 +248,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')]
|
||||
FIELDS_TO_PRESERVE_AT_COPY = ['labels', 'instance_groups', 'credentials']
|
||||
FIELDS_TO_DISCARD_AT_COPY = ['local_path']
|
||||
FIELDS_TRIGGER_UPDATE = frozenset(['scm_url', 'scm_branch', 'scm_type'])
|
||||
FIELDS_TRIGGER_UPDATE = frozenset(['scm_url', 'scm_branch', 'scm_type', 'scm_refspec'])
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
@ -261,9 +268,14 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
scm_update_cache_timeout = models.PositiveIntegerField(
|
||||
default=0,
|
||||
blank=True,
|
||||
help_text=_('The number of seconds after the last project update ran that a new'
|
||||
help_text=_('The number of seconds after the last project update ran that a new '
|
||||
'project update will be launched as a job dependency.'),
|
||||
)
|
||||
allow_override = models.BooleanField(
|
||||
default=False,
|
||||
help_text=_('Allow changing the SCM branch or revision in a job template '
|
||||
'that uses this project.'),
|
||||
)
|
||||
|
||||
scm_revision = models.CharField(
|
||||
max_length=1024,
|
||||
@ -471,6 +483,14 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage
|
||||
choices=PROJECT_UPDATE_JOB_TYPE_CHOICES,
|
||||
default='check',
|
||||
)
|
||||
scm_revision = models.CharField(
|
||||
max_length=1024,
|
||||
blank=True,
|
||||
default='',
|
||||
editable=False,
|
||||
verbose_name=_('SCM Revision'),
|
||||
help_text=_('The SCM Revision discovered by this update for the given project and branch.'),
|
||||
)
|
||||
|
||||
def _get_parent_field_name(self):
|
||||
return 'project'
|
||||
|
||||
@ -20,6 +20,8 @@ from distutils.dir_util import copy_tree
|
||||
from distutils.version import LooseVersion as Version
|
||||
import yaml
|
||||
import fcntl
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
try:
|
||||
import psutil
|
||||
except Exception:
|
||||
@ -41,6 +43,10 @@ from django.core.exceptions import ObjectDoesNotExist
|
||||
# Django-CRUM
|
||||
from crum import impersonate
|
||||
|
||||
# GitPython
|
||||
import git
|
||||
from gitdb.exc import BadName as BadGitName
|
||||
|
||||
# Runner
|
||||
import ansible_runner
|
||||
|
||||
@ -67,7 +73,7 @@ from awx.main.utils import (get_ssh_version, update_scm_url,
|
||||
ignore_inventory_computed_fields,
|
||||
ignore_inventory_group_removal, extract_ansible_vars, schedule_task_manager,
|
||||
get_awx_version)
|
||||
from awx.main.utils.common import _get_ansible_version, get_custom_venv_choices
|
||||
from awx.main.utils.common import get_ansible_version, _get_ansible_version, get_custom_venv_choices
|
||||
from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja
|
||||
from awx.main.utils.reload import stop_local_services
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
@ -694,9 +700,11 @@ class BaseTask(object):
|
||||
model = None
|
||||
event_model = None
|
||||
abstract = True
|
||||
cleanup_paths = []
|
||||
proot_show_paths = []
|
||||
|
||||
def __init__(self):
|
||||
self.cleanup_paths = []
|
||||
|
||||
def update_model(self, pk, _attempt=0, **updates):
|
||||
"""Reload the model instance from the database and update the
|
||||
given fields.
|
||||
@ -769,9 +777,11 @@ class BaseTask(object):
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
if settings.AWX_CLEANUP_PATHS:
|
||||
self.cleanup_paths.append(path)
|
||||
# Ansible Runner requires that this directory exists.
|
||||
# Specifically, when using process isolation
|
||||
os.mkdir(os.path.join(path, 'project'))
|
||||
runner_project_folder = os.path.join(path, 'project')
|
||||
if not os.path.exists(runner_project_folder):
|
||||
# Ansible Runner requires that this directory exists.
|
||||
# Specifically, when using process isolation
|
||||
os.mkdir(runner_project_folder)
|
||||
return path
|
||||
|
||||
def build_private_data_files(self, instance, private_data_dir):
|
||||
@ -860,7 +870,10 @@ class BaseTask(object):
|
||||
'''
|
||||
process_isolation_params = dict()
|
||||
if self.should_use_proot(instance):
|
||||
show_paths = self.proot_show_paths + [private_data_dir, cwd] + \
|
||||
local_paths = [private_data_dir]
|
||||
if cwd != private_data_dir and Path(private_data_dir) not in Path(cwd).parents:
|
||||
local_paths.append(cwd)
|
||||
show_paths = self.proot_show_paths + local_paths + \
|
||||
settings.AWX_PROOT_SHOW_PATHS
|
||||
|
||||
# Help the user out by including the collections path inside the bubblewrap environment
|
||||
@ -1030,7 +1043,7 @@ class BaseTask(object):
|
||||
expect_passwords[k] = passwords.get(v, '') or ''
|
||||
return expect_passwords
|
||||
|
||||
def pre_run_hook(self, instance):
|
||||
def pre_run_hook(self, instance, private_data_dir):
|
||||
'''
|
||||
Hook for any steps to run before the job/task starts
|
||||
'''
|
||||
@ -1157,7 +1170,8 @@ class BaseTask(object):
|
||||
try:
|
||||
isolated = self.instance.is_isolated()
|
||||
self.instance.send_notification_templates("running")
|
||||
self.pre_run_hook(self.instance)
|
||||
private_data_dir = self.build_private_data_dir(self.instance)
|
||||
self.pre_run_hook(self.instance, private_data_dir)
|
||||
if self.instance.cancel_flag:
|
||||
self.instance = self.update_model(self.instance.pk, status='canceled')
|
||||
if self.instance.status != 'running':
|
||||
@ -1173,7 +1187,6 @@ class BaseTask(object):
|
||||
# store a record of the venv used at runtime
|
||||
if hasattr(self.instance, 'custom_virtualenv'):
|
||||
self.update_model(pk, custom_virtualenv=getattr(self.instance, 'ansible_virtualenv_path', settings.ANSIBLE_VENV_PATH))
|
||||
private_data_dir = self.build_private_data_dir(self.instance)
|
||||
|
||||
# Fetch "cached" fact data from prior runs and put on the disk
|
||||
# where ansible expects to find it
|
||||
@ -1256,9 +1269,6 @@ class BaseTask(object):
|
||||
module_args = ansible_runner.utils.args2cmdline(
|
||||
params.get('module_args'),
|
||||
)
|
||||
else:
|
||||
# otherwise, it's a playbook, so copy the project dir
|
||||
copy_tree(cwd, os.path.join(private_data_dir, 'project'))
|
||||
shutil.move(
|
||||
params.pop('inventory'),
|
||||
os.path.join(private_data_dir, 'inventory')
|
||||
@ -1464,6 +1474,15 @@ class RunJob(BaseTask):
|
||||
if authorize:
|
||||
env['ANSIBLE_NET_AUTH_PASS'] = network_cred.get_input('authorize_password', default='')
|
||||
|
||||
for env_key, folder in (
|
||||
('ANSIBLE_COLLECTIONS_PATHS', 'requirements_collections'),
|
||||
('ANSIBLE_ROLES_PATH', 'requirements_roles')):
|
||||
paths = []
|
||||
if env_key in env:
|
||||
paths.append(env[env_key])
|
||||
paths.append(os.path.join(private_data_dir, folder))
|
||||
env[env_key] = os.pathsep.join(paths)
|
||||
|
||||
return env
|
||||
|
||||
def build_args(self, job, private_data_dir, passwords):
|
||||
@ -1532,15 +1551,10 @@ class RunJob(BaseTask):
|
||||
return args
|
||||
|
||||
def build_cwd(self, job, private_data_dir):
|
||||
cwd = job.project.get_project_path()
|
||||
if not cwd:
|
||||
root = settings.PROJECTS_ROOT
|
||||
raise RuntimeError('project local_path %s cannot be found in %s' %
|
||||
(job.project.local_path, root))
|
||||
return cwd
|
||||
return os.path.join(private_data_dir, 'project')
|
||||
|
||||
def build_playbook_path_relative_to_cwd(self, job, private_data_dir):
|
||||
return os.path.join(job.playbook)
|
||||
return job.playbook
|
||||
|
||||
def build_extra_vars_file(self, job, private_data_dir):
|
||||
# Define special extra_vars for AWX, combine with job.extra_vars.
|
||||
@ -1587,39 +1601,86 @@ class RunJob(BaseTask):
|
||||
'''
|
||||
return getattr(settings, 'AWX_PROOT_ENABLED', False)
|
||||
|
||||
def pre_run_hook(self, job):
|
||||
def pre_run_hook(self, job, private_data_dir):
|
||||
if job.inventory is None:
|
||||
error = _('Job could not start because it does not have a valid inventory.')
|
||||
self.update_model(job.pk, status='failed', job_explanation=error)
|
||||
raise RuntimeError(error)
|
||||
if job.project and job.project.scm_type:
|
||||
elif job.project is None:
|
||||
error = _('Job could not start because it does not have a valid project.')
|
||||
self.update_model(job.pk, status='failed', job_explanation=error)
|
||||
raise RuntimeError(error)
|
||||
elif job.project.status in ('error', 'failed'):
|
||||
msg = _(
|
||||
'The project revision for this job template is unknown due to a failed update.'
|
||||
)
|
||||
job = self.update_model(job.pk, status='failed', job_explanation=msg)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
project_path = job.project.get_project_path(check_if_exists=False)
|
||||
job_revision = job.project.scm_revision
|
||||
needs_sync = True
|
||||
if not job.project.scm_type:
|
||||
# manual projects are not synced, user has responsibility for that
|
||||
needs_sync = False
|
||||
elif not os.path.exists(project_path):
|
||||
logger.debug('Performing fresh clone of {} on this instance.'.format(job.project))
|
||||
elif not job.project.scm_revision:
|
||||
logger.debug('Revision not known for {}, will sync with remote'.format(job.project))
|
||||
elif job.project.scm_type == 'git':
|
||||
git_repo = git.Repo(project_path)
|
||||
try:
|
||||
desired_revision = job.project.scm_revision
|
||||
if job.scm_branch and job.scm_branch != job.project.scm_branch:
|
||||
desired_revision = job.scm_branch # could be commit or not, but will try as commit
|
||||
current_revision = git_repo.head.commit.hexsha
|
||||
if desired_revision == current_revision:
|
||||
job_revision = desired_revision
|
||||
logger.info('Skipping project sync for {} because commit is locally available'.format(job.log_format))
|
||||
needs_sync = False
|
||||
except (ValueError, BadGitName):
|
||||
logger.debug('Needed commit for {} not in local source tree, will sync with remote'.format(job.log_format))
|
||||
# Galaxy requirements are not supported for manual projects
|
||||
if not needs_sync and job.project.scm_type:
|
||||
# see if we need a sync because of presence of roles
|
||||
galaxy_req_path = os.path.join(project_path, 'roles', 'requirements.yml')
|
||||
if os.path.exists(galaxy_req_path):
|
||||
logger.debug('Running project sync for {} because of galaxy role requirements.'.format(job.log_format))
|
||||
needs_sync = True
|
||||
|
||||
galaxy_collections_req_path = os.path.join(project_path, 'collections', 'requirements.yml')
|
||||
if os.path.exists(galaxy_collections_req_path):
|
||||
logger.debug('Running project sync for {} because of galaxy collections requirements.'.format(job.log_format))
|
||||
needs_sync = True
|
||||
|
||||
if needs_sync:
|
||||
pu_ig = job.instance_group
|
||||
pu_en = job.execution_node
|
||||
if job.is_isolated() is True:
|
||||
pu_ig = pu_ig.controller
|
||||
pu_en = settings.CLUSTER_HOST_ID
|
||||
if job.project.status in ('error', 'failed'):
|
||||
msg = _(
|
||||
'The project revision for this job template is unknown due to a failed update.'
|
||||
)
|
||||
job = self.update_model(job.pk, status='failed', job_explanation=msg)
|
||||
raise RuntimeError(msg)
|
||||
local_project_sync = job.project.create_project_update(
|
||||
_eager_fields=dict(
|
||||
launch_type="sync",
|
||||
job_type='run',
|
||||
status='running',
|
||||
instance_group = pu_ig,
|
||||
execution_node=pu_en,
|
||||
celery_task_id=job.celery_task_id))
|
||||
sync_metafields = dict(
|
||||
launch_type="sync",
|
||||
job_type='run',
|
||||
status='running',
|
||||
instance_group = pu_ig,
|
||||
execution_node=pu_en,
|
||||
celery_task_id=job.celery_task_id
|
||||
)
|
||||
if job.scm_branch and job.scm_branch != job.project.scm_branch:
|
||||
sync_metafields['scm_branch'] = job.scm_branch
|
||||
local_project_sync = job.project.create_project_update(_eager_fields=sync_metafields)
|
||||
# save the associated job before calling run() so that a
|
||||
# cancel() call on the job can cancel the project update
|
||||
job = self.update_model(job.pk, project_update=local_project_sync)
|
||||
|
||||
project_update_task = local_project_sync._get_task_class()
|
||||
try:
|
||||
project_update_task().run(local_project_sync.id)
|
||||
job = self.update_model(job.pk, scm_revision=job.project.scm_revision)
|
||||
# the job private_data_dir is passed so sync can download roles and collections there
|
||||
sync_task = project_update_task(job_private_data_dir=private_data_dir)
|
||||
sync_task.run(local_project_sync.id)
|
||||
local_project_sync.refresh_from_db()
|
||||
job = self.update_model(job.pk, scm_revision=local_project_sync.scm_revision)
|
||||
except Exception:
|
||||
local_project_sync.refresh_from_db()
|
||||
if local_project_sync.status != 'canceled':
|
||||
@ -1627,6 +1688,38 @@ class RunJob(BaseTask):
|
||||
job_explanation=('Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' %
|
||||
('project_update', local_project_sync.name, local_project_sync.id)))
|
||||
raise
|
||||
job.refresh_from_db()
|
||||
if job.cancel_flag:
|
||||
return
|
||||
else:
|
||||
# Case where a local sync is not needed, meaning that local tree is
|
||||
# up-to-date with project, job is running project current version
|
||||
if job_revision:
|
||||
job = self.update_model(job.pk, scm_revision=job_revision)
|
||||
|
||||
# copy the project directory
|
||||
runner_project_folder = os.path.join(private_data_dir, 'project')
|
||||
if job.project.scm_type == 'git':
|
||||
git_repo = git.Repo(project_path)
|
||||
if not os.path.exists(runner_project_folder):
|
||||
os.mkdir(runner_project_folder)
|
||||
tmp_branch_name = 'awx_internal/{}'.format(uuid4())
|
||||
# always clone based on specific job revision
|
||||
if not job.scm_revision:
|
||||
raise RuntimeError('Unexpectedly could not determine a revision to run from project.')
|
||||
source_branch = git_repo.create_head(tmp_branch_name, job.scm_revision)
|
||||
# git clone must take file:// syntax for source repo or else options like depth will be ignored
|
||||
source_as_uri = Path(project_path).as_uri()
|
||||
git.Repo.clone_from(
|
||||
source_as_uri, runner_project_folder, branch=source_branch,
|
||||
depth=1, single_branch=True, # shallow, do not copy full history
|
||||
recursive=True # include submodules
|
||||
)
|
||||
# force option is necessary because remote refs are not counted, although no information is lost
|
||||
git_repo.delete_head(tmp_branch_name, force=True)
|
||||
else:
|
||||
copy_tree(project_path, runner_project_folder)
|
||||
|
||||
if job.inventory.kind == 'smart':
|
||||
# cache smart inventory memberships so that the host_filter query is not
|
||||
# ran inside of the event saving code
|
||||
@ -1663,7 +1756,24 @@ class RunProjectUpdate(BaseTask):
|
||||
|
||||
@property
|
||||
def proot_show_paths(self):
|
||||
return [settings.PROJECTS_ROOT]
|
||||
show_paths = [settings.PROJECTS_ROOT]
|
||||
if self.job_private_data_dir:
|
||||
show_paths.append(self.job_private_data_dir)
|
||||
return show_paths
|
||||
|
||||
def __init__(self, *args, job_private_data_dir=None, **kwargs):
|
||||
super(RunProjectUpdate, self).__init__(*args, **kwargs)
|
||||
self.playbook_new_revision = None
|
||||
self.original_branch = None
|
||||
self.job_private_data_dir = job_private_data_dir
|
||||
|
||||
def event_handler(self, event_data):
|
||||
super(RunProjectUpdate, self).event_handler(event_data)
|
||||
returned_data = event_data.get('event_data', {})
|
||||
if returned_data.get('task_action', '') == 'set_fact':
|
||||
returned_facts = returned_data.get('res', {}).get('ansible_facts', {})
|
||||
if 'scm_version' in returned_facts:
|
||||
self.playbook_new_revision = returned_facts['scm_version']
|
||||
|
||||
def build_private_data(self, project_update, private_data_dir):
|
||||
'''
|
||||
@ -1678,14 +1788,17 @@ class RunProjectUpdate(BaseTask):
|
||||
}
|
||||
}
|
||||
'''
|
||||
handle, self.revision_path = tempfile.mkstemp(dir=settings.PROJECTS_ROOT)
|
||||
if settings.AWX_CLEANUP_PATHS:
|
||||
self.cleanup_paths.append(self.revision_path)
|
||||
private_data = {'credentials': {}}
|
||||
if project_update.credential:
|
||||
credential = project_update.credential
|
||||
if credential.has_input('ssh_key_data'):
|
||||
private_data['credentials'][credential] = credential.get_input('ssh_key_data', default='')
|
||||
|
||||
# Create dir where collections will live for the job run
|
||||
if project_update.job_type != 'check' and getattr(self, 'job_private_data_dir'):
|
||||
for folder_name in ('requirements_collections', 'requirements_roles'):
|
||||
folder_path = os.path.join(self.job_private_data_dir, folder_name)
|
||||
os.mkdir(folder_path, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
|
||||
return private_data
|
||||
|
||||
def build_passwords(self, project_update, runtime_passwords):
|
||||
@ -1781,10 +1894,21 @@ class RunProjectUpdate(BaseTask):
|
||||
scm_url, extra_vars_new = self._build_scm_url_extra_vars(project_update)
|
||||
extra_vars.update(extra_vars_new)
|
||||
|
||||
if project_update.project.scm_revision and project_update.job_type == 'run':
|
||||
scm_branch = project_update.scm_branch
|
||||
branch_override = bool(project_update.scm_branch != project_update.project.scm_branch)
|
||||
if project_update.job_type == 'run' and scm_branch and (not branch_override):
|
||||
scm_branch = project_update.project.scm_revision
|
||||
elif not scm_branch:
|
||||
scm_branch = {'hg': 'tip'}.get(project_update.scm_type, 'HEAD')
|
||||
if project_update.job_type == 'check':
|
||||
roles_enabled = False
|
||||
collections_enabled = False
|
||||
else:
|
||||
scm_branch = project_update.scm_branch or {'hg': 'tip'}.get(project_update.scm_type, 'HEAD')
|
||||
roles_enabled = getattr(settings, 'AWX_ROLES_ENABLED', True)
|
||||
collections_enabled = getattr(settings, 'AWX_COLLECTIONS_ENABLED', True)
|
||||
# collections were introduced in Ansible version 2.8
|
||||
if Version(get_ansible_version()) <= Version('2.8'):
|
||||
collections_enabled = False
|
||||
extra_vars.update({
|
||||
'project_path': project_update.get_project_path(check_if_exists=False),
|
||||
'insights_url': settings.INSIGHTS_URL_BASE,
|
||||
@ -1796,17 +1920,24 @@ class RunProjectUpdate(BaseTask):
|
||||
'scm_clean': project_update.scm_clean,
|
||||
'scm_delete_on_update': project_update.scm_delete_on_update if project_update.job_type == 'check' else False,
|
||||
'scm_full_checkout': True if project_update.job_type == 'run' else False,
|
||||
'scm_revision_output': self.revision_path,
|
||||
'scm_revision': project_update.project.scm_revision,
|
||||
'roles_enabled': getattr(settings, 'AWX_ROLES_ENABLED', True)
|
||||
'roles_enabled': roles_enabled,
|
||||
'collections_enabled': collections_enabled,
|
||||
})
|
||||
if project_update.job_type != 'check' and self.job_private_data_dir:
|
||||
extra_vars['collections_destination'] = os.path.join(self.job_private_data_dir, 'requirements_collections')
|
||||
extra_vars['roles_destination'] = os.path.join(self.job_private_data_dir, 'requirements_roles')
|
||||
# apply custom refspec from user for PR refs and the like
|
||||
if project_update.scm_refspec:
|
||||
extra_vars['scm_refspec'] = project_update.scm_refspec
|
||||
elif project_update.project.allow_override:
|
||||
# If branch is override-able, do extra fetch for all branches
|
||||
extra_vars['scm_refspec'] = 'refs/heads/*:refs/remotes/origin/*'
|
||||
self._write_extra_vars_file(private_data_dir, extra_vars)
|
||||
|
||||
def build_cwd(self, project_update, private_data_dir):
|
||||
return self.get_path_to('..', 'playbooks')
|
||||
|
||||
def build_playbook_path_relative_to_cwd(self, project_update, private_data_dir):
|
||||
self.build_cwd(project_update, private_data_dir)
|
||||
return os.path.join('project_update.yml')
|
||||
|
||||
def get_password_prompts(self, passwords={}):
|
||||
@ -1920,25 +2051,42 @@ class RunProjectUpdate(BaseTask):
|
||||
'{} spent {} waiting to acquire lock for local source tree '
|
||||
'for path {}.'.format(instance.log_format, waiting_time, lock_path))
|
||||
|
||||
def pre_run_hook(self, instance):
|
||||
def pre_run_hook(self, instance, private_data_dir):
|
||||
# re-create root project folder if a natural disaster has destroyed it
|
||||
if not os.path.exists(settings.PROJECTS_ROOT):
|
||||
os.mkdir(settings.PROJECTS_ROOT)
|
||||
self.acquire_lock(instance)
|
||||
self.original_branch = None
|
||||
if (instance.scm_type == 'git' and instance.job_type == 'run' and instance.project and
|
||||
instance.scm_branch != instance.project.scm_branch):
|
||||
project_path = instance.project.get_project_path(check_if_exists=False)
|
||||
if os.path.exists(project_path):
|
||||
git_repo = git.Repo(project_path)
|
||||
self.original_branch = git_repo.active_branch
|
||||
|
||||
def post_run_hook(self, instance, status):
|
||||
if self.original_branch:
|
||||
# for git project syncs, non-default branches can be problems
|
||||
# restore to branch the repo was on before this run
|
||||
try:
|
||||
self.original_branch.checkout()
|
||||
except Exception:
|
||||
# this could have failed due to dirty tree, but difficult to predict all cases
|
||||
logger.exception('Failed to restore project repo to prior state after {}'.format(instance.log_format))
|
||||
self.release_lock(instance)
|
||||
p = instance.project
|
||||
if self.playbook_new_revision:
|
||||
instance.scm_revision = self.playbook_new_revision
|
||||
instance.save(update_fields=['scm_revision'])
|
||||
if instance.job_type == 'check' and status not in ('failed', 'canceled',):
|
||||
fd = open(self.revision_path, 'r')
|
||||
lines = fd.readlines()
|
||||
if lines:
|
||||
p.scm_revision = lines[0].strip()
|
||||
if self.playbook_new_revision:
|
||||
p.scm_revision = self.playbook_new_revision
|
||||
else:
|
||||
logger.info("{} Could not find scm revision in check".format(instance.log_format))
|
||||
if status == 'successful':
|
||||
logger.error("{} Could not find scm revision in check".format(instance.log_format))
|
||||
p.playbook_files = p.playbooks
|
||||
p.inventory_files = p.inventories
|
||||
p.save()
|
||||
p.save(update_fields=['scm_revision', 'playbook_files', 'inventory_files'])
|
||||
|
||||
# Update any inventories that depend on this project
|
||||
dependent_inventory_sources = p.scm_inventory_sources.filter(update_on_project_update=True)
|
||||
@ -2159,11 +2307,12 @@ class RunInventoryUpdate(BaseTask):
|
||||
# All credentials not used by inventory source injector
|
||||
return inventory_update.get_extra_credentials()
|
||||
|
||||
def pre_run_hook(self, inventory_update):
|
||||
def pre_run_hook(self, inventory_update, private_data_dir):
|
||||
source_project = None
|
||||
if inventory_update.inventory_source:
|
||||
source_project = inventory_update.inventory_source.source_project
|
||||
if (inventory_update.source=='scm' and inventory_update.launch_type!='scm' and source_project):
|
||||
# In project sync, pulling galaxy roles is not needed
|
||||
local_project_sync = source_project.create_project_update(
|
||||
_eager_fields=dict(
|
||||
launch_type="sync",
|
||||
|
||||
@ -516,6 +516,25 @@ def test_job_launch_JT_with_credentials(machine_credential, credential, net_cred
|
||||
assert machine_credential in creds
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_branch_rejected_and_accepted(deploy_jobtemplate):
|
||||
deploy_jobtemplate.ask_scm_branch_on_launch = True
|
||||
deploy_jobtemplate.save()
|
||||
prompted_fields, ignored_fields, errors = deploy_jobtemplate._accept_or_ignore_job_kwargs(
|
||||
scm_branch='foobar'
|
||||
)
|
||||
assert 'scm_branch' in ignored_fields
|
||||
assert 'does not allow override of branch' in errors['scm_branch']
|
||||
|
||||
deploy_jobtemplate.project.allow_override = True
|
||||
deploy_jobtemplate.project.save()
|
||||
prompted_fields, ignored_fields, errors = deploy_jobtemplate._accept_or_ignore_job_kwargs(
|
||||
scm_branch='foobar'
|
||||
)
|
||||
assert not ignored_fields
|
||||
assert prompted_fields['scm_branch'] == 'foobar'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_launch_unprompted_vars_with_survey(mocker, survey_spec_factory, job_template_prompts, post, admin_user):
|
||||
|
||||
@ -505,3 +505,37 @@ def test_callback_disallowed_null_inventory(project):
|
||||
with pytest.raises(ValidationError) as exc:
|
||||
serializer.validate({'host_config_key': 'asdfbasecfeee'})
|
||||
assert 'Cannot enable provisioning callback without an inventory set' in str(exc)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_template_branch_error(project, inventory, post, admin_user):
|
||||
r = post(
|
||||
url=reverse('api:job_template_list'),
|
||||
data={
|
||||
"name": "fooo",
|
||||
"inventory": inventory.pk,
|
||||
"project": project.pk,
|
||||
"playbook": "helloworld.yml",
|
||||
"scm_branch": "foobar"
|
||||
},
|
||||
user=admin_user,
|
||||
expect=400
|
||||
)
|
||||
assert 'Project does not allow overriding branch' in str(r.data['scm_branch'])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_template_branch_prompt_error(project, inventory, post, admin_user):
|
||||
r = post(
|
||||
url=reverse('api:job_template_list'),
|
||||
data={
|
||||
"name": "fooo",
|
||||
"inventory": inventory.pk,
|
||||
"project": project.pk,
|
||||
"playbook": "helloworld.yml",
|
||||
"ask_scm_branch_on_launch": True
|
||||
},
|
||||
user=admin_user,
|
||||
expect=400
|
||||
)
|
||||
assert 'Project does not allow overriding branch' in str(r.data['ask_scm_branch_on_launch'])
|
||||
|
||||
@ -5,17 +5,18 @@ from django.conf import settings
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import Project, JobTemplate
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestInsightsCredential:
|
||||
def test_insights_credential(self, patch, insights_project, admin_user, insights_credential):
|
||||
patch(insights_project.get_absolute_url(),
|
||||
patch(insights_project.get_absolute_url(),
|
||||
{'credential': insights_credential.id}, admin_user,
|
||||
expect=200)
|
||||
|
||||
def test_non_insights_credential(self, patch, insights_project, admin_user, scm_credential):
|
||||
patch(insights_project.get_absolute_url(),
|
||||
patch(insights_project.get_absolute_url(),
|
||||
{'credential': scm_credential.id}, admin_user,
|
||||
expect=400)
|
||||
|
||||
@ -44,3 +45,52 @@ def test_project_unset_custom_virtualenv(get, patch, project, admin, value):
|
||||
url = reverse('api:project_detail', kwargs={'pk': project.id})
|
||||
resp = patch(url, {'custom_virtualenv': value}, user=admin, expect=200)
|
||||
assert resp.data['custom_virtualenv'] is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_no_changing_overwrite_behavior_if_used(post, patch, organization, admin_user):
|
||||
r1 = post(
|
||||
url=reverse('api:project_list'),
|
||||
data={
|
||||
'name': 'fooo',
|
||||
'organization': organization.id,
|
||||
'allow_override': True
|
||||
},
|
||||
user=admin_user,
|
||||
expect=201
|
||||
)
|
||||
JobTemplate.objects.create(
|
||||
name='provides branch', project_id=r1.data['id'],
|
||||
playbook='helloworld.yml',
|
||||
scm_branch='foobar'
|
||||
)
|
||||
r2 = patch(
|
||||
url=reverse('api:project_detail', kwargs={'pk': r1.data['id']}),
|
||||
data={'allow_override': False},
|
||||
user=admin_user,
|
||||
expect=400
|
||||
)
|
||||
assert 'job templates depend on branch override behavior for this project' in str(r2.data['allow_override'])
|
||||
assert 'ids: 2' in str(r2.data['allow_override'])
|
||||
assert Project.objects.get(pk=r1.data['id']).allow_override is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_changing_overwrite_behavior_okay_if_not_used(post, patch, organization, admin_user):
|
||||
r1 = post(
|
||||
url=reverse('api:project_list'),
|
||||
data={
|
||||
'name': 'fooo',
|
||||
'organization': organization.id,
|
||||
'allow_override': True
|
||||
},
|
||||
user=admin_user,
|
||||
expect=201
|
||||
)
|
||||
patch(
|
||||
url=reverse('api:project_detail', kwargs={'pk': r1.data['id']}),
|
||||
data={'allow_override': False},
|
||||
user=admin_user,
|
||||
expect=200
|
||||
)
|
||||
assert Project.objects.get(pk=r1.data['id']).allow_override is False
|
||||
|
||||
@ -256,7 +256,7 @@ class TestExtraVarSanitation(TestJobExecution):
|
||||
|
||||
def test_vars_unsafe_by_default(self, job, private_data_dir):
|
||||
job.created_by = User(pk=123, username='angry-spud')
|
||||
job.inventory = Inventory(pk=123, name='example-inv')
|
||||
job.inventory = Inventory(pk=123, name='example-inv')
|
||||
|
||||
task = tasks.RunJob()
|
||||
task.build_extra_vars_file(job, private_data_dir)
|
||||
@ -361,15 +361,16 @@ class TestExtraVarSanitation(TestJobExecution):
|
||||
class TestGenericRun():
|
||||
|
||||
def test_generic_failure(self, patch_Job):
|
||||
job = Job(status='running', inventory=Inventory())
|
||||
job = Job(status='running', inventory=Inventory(), project=Project())
|
||||
job.websocket_emit_status = mock.Mock()
|
||||
|
||||
task = tasks.RunJob()
|
||||
task.update_model = mock.Mock(return_value=job)
|
||||
task.build_private_data_files = mock.Mock(side_effect=OSError())
|
||||
|
||||
with pytest.raises(Exception):
|
||||
task.run(1)
|
||||
with mock.patch('awx.main.tasks.copy_tree'):
|
||||
with pytest.raises(Exception):
|
||||
task.run(1)
|
||||
|
||||
update_model_call = task.update_model.call_args[1]
|
||||
assert 'OSError' in update_model_call['result_traceback']
|
||||
@ -386,8 +387,9 @@ class TestGenericRun():
|
||||
task.update_model = mock.Mock(wraps=update_model_wrapper)
|
||||
task.build_private_data_files = mock.Mock()
|
||||
|
||||
with pytest.raises(Exception):
|
||||
task.run(1)
|
||||
with mock.patch('awx.main.tasks.copy_tree'):
|
||||
with pytest.raises(Exception):
|
||||
task.run(1)
|
||||
|
||||
for c in [
|
||||
mock.call(1, status='running', start_args=''),
|
||||
@ -524,7 +526,10 @@ class TestGenericRun():
|
||||
with mock.patch('awx.main.tasks.settings.AWX_ANSIBLE_COLLECTIONS_PATHS', ['/AWX_COLLECTION_PATH']):
|
||||
with mock.patch('awx.main.tasks.settings.AWX_TASK_ENV', {'ANSIBLE_COLLECTIONS_PATHS': '/MY_COLLECTION1:/MY_COLLECTION2'}):
|
||||
env = task.build_env(job, private_data_dir)
|
||||
assert env['ANSIBLE_COLLECTIONS_PATHS'] == '/MY_COLLECTION1:/MY_COLLECTION2:/AWX_COLLECTION_PATH'
|
||||
used_paths = env['ANSIBLE_COLLECTIONS_PATHS'].split(':')
|
||||
assert used_paths[-1].endswith('/requirements_collections')
|
||||
used_paths.pop()
|
||||
assert used_paths == ['/MY_COLLECTION1', '/MY_COLLECTION2', '/AWX_COLLECTION_PATH']
|
||||
|
||||
def test_valid_custom_virtualenv(self, patch_Job, private_data_dir):
|
||||
job = Job(project=Project(), inventory=Inventory())
|
||||
@ -1720,8 +1725,6 @@ class TestProjectUpdateCredentials(TestJobExecution):
|
||||
call_args, _ = task._write_extra_vars_file.call_args_list[0]
|
||||
_, extra_vars = call_args
|
||||
|
||||
assert extra_vars["scm_revision_output"] == 'foobar'
|
||||
|
||||
def test_username_and_password_auth(self, project_update, scm_type):
|
||||
task = tasks.RunProjectUpdate()
|
||||
ssh = CredentialType.defaults['ssh']()
|
||||
|
||||
@ -11,9 +11,9 @@
|
||||
# scm_username: username (only for svn/insights)
|
||||
# scm_password: password (only for svn/insights)
|
||||
# scm_accept_hostkey: true/false (only for git, set automatically)
|
||||
# scm_revision: current revision in tower
|
||||
# scm_revision_output: where to store gathered revision (temporary file)
|
||||
# scm_refspec: a refspec to fetch in addition to obtaining version
|
||||
# roles_enabled: Allow us to pull roles from a requirements.yml file
|
||||
# roles_destination: Path to save roles from galaxy to
|
||||
# awx_version: Current running version of the awx or tower as a string
|
||||
# awx_license_type: "open" for AWX; else presume Tower
|
||||
|
||||
@ -29,27 +29,12 @@
|
||||
delegate_to: localhost
|
||||
|
||||
- block:
|
||||
- name: check repo using git
|
||||
git:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url}}"
|
||||
version: "{{scm_branch|quote}}"
|
||||
force: "{{scm_clean}}"
|
||||
update: false
|
||||
clone: false
|
||||
register: repo_check
|
||||
when: scm_full_checkout|default('')
|
||||
ignore_errors: true
|
||||
|
||||
- name: break if already checked out
|
||||
meta: end_play
|
||||
when: scm_full_checkout|default('') and repo_check is succeeded and repo_check.before == scm_branch
|
||||
|
||||
- name: update project using git
|
||||
git:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url}}"
|
||||
version: "{{scm_branch|quote}}"
|
||||
refspec: "{{scm_refspec|default(omit)}}"
|
||||
force: "{{scm_clean}}"
|
||||
accept_hostkey: "{{scm_accept_hostkey|default(omit)}}"
|
||||
register: git_result
|
||||
@ -131,13 +116,6 @@
|
||||
debug: msg="Repository Version {{ scm_version }}"
|
||||
when: scm_version is defined
|
||||
|
||||
- name: Write Repository Version
|
||||
copy:
|
||||
dest: "{{ scm_revision_output }}"
|
||||
content: "{{ scm_version }}"
|
||||
when: scm_version is defined and scm_revision_output is defined
|
||||
delegate_to: localhost
|
||||
|
||||
- hosts: all
|
||||
gather_facts: false
|
||||
tasks:
|
||||
@ -148,18 +126,28 @@
|
||||
register: doesRequirementsExist
|
||||
|
||||
- name: fetch galaxy roles from requirements.yml
|
||||
command: ansible-galaxy install -r requirements.yml -p {{project_path|quote}}/roles/
|
||||
command: ansible-galaxy install -r requirements.yml -p {{roles_destination|quote}}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}/roles"
|
||||
register: galaxy_result
|
||||
when: doesRequirementsExist.stat.exists and (scm_version is undefined or (git_result is not skipped and git_result['before'] == git_result['after']))
|
||||
when: doesRequirementsExist.stat.exists
|
||||
changed_when: "'was installed successfully' in galaxy_result.stdout"
|
||||
|
||||
- name: fetch galaxy roles from requirements.yml (forced update)
|
||||
command: ansible-galaxy install -r requirements.yml -p {{project_path|quote}}/roles/ --force
|
||||
args:
|
||||
chdir: "{{project_path|quote}}/roles"
|
||||
when: doesRequirementsExist.stat.exists and galaxy_result is skipped
|
||||
|
||||
when: roles_enabled|bool
|
||||
delegate_to: localhost
|
||||
|
||||
- block:
|
||||
- name: detect collections/requirements.yml
|
||||
stat: path={{project_path|quote}}/collections/requirements.yml
|
||||
register: doesCollectionRequirementsExist
|
||||
|
||||
- name: fetch galaxy collections from collections/requirements.yml
|
||||
command: ansible-galaxy collection install -r requirements.yml -p {{collections_destination|quote}}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}/collections"
|
||||
register: galaxy_collection_result
|
||||
when: doesCollectionRequirementsExist.stat.exists
|
||||
changed_when: "'Installing ' in galaxy_collection_result.stdout"
|
||||
|
||||
when: collections_enabled|bool
|
||||
delegate_to: localhost
|
||||
|
||||
@ -604,6 +604,11 @@ ALLOW_JINJA_IN_EXTRA_VARS = 'template'
|
||||
# Note: This setting may be overridden by database settings.
|
||||
AWX_ROLES_ENABLED = True
|
||||
|
||||
# Enable dynamically pulling collections from a requirement.yml file
|
||||
# when updating SCM projects
|
||||
# Note: This setting may be overridden by database settings.
|
||||
AWX_COLLECTIONS_ENABLED = True
|
||||
|
||||
# Enable bubblewrap support for running jobs (playbook runs only).
|
||||
# Note: This setting may be overridden by database settings.
|
||||
AWX_PROOT_ENABLED = True
|
||||
|
||||
@ -343,6 +343,28 @@ function getProjectUpdateDetails (updateId) {
|
||||
return { link, tooltip };
|
||||
}
|
||||
|
||||
function getSCMBranchDetails (scmBranch) {
|
||||
const label = strings.get('labels.SCM_BRANCH');
|
||||
const value = scmBranch || resource.model.get('scm_branch');
|
||||
|
||||
if (!value) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return { label, value };
|
||||
}
|
||||
|
||||
function getSCMRefspecDetails (scmRefspec) {
|
||||
const label = strings.get('labels.SCM_REFSPEC');
|
||||
const value = scmRefspec || resource.model.get('scm_refspec');
|
||||
|
||||
if (!value) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return { label, value };
|
||||
}
|
||||
|
||||
function getInventoryScmDetails (updateId, updateStatus) {
|
||||
const projectId = resource.model.get('summary_fields.source_project.id');
|
||||
const projectName = resource.model.get('summary_fields.source_project.name');
|
||||
@ -800,6 +822,8 @@ function JobDetailsController (
|
||||
vm.project = getProjectDetails();
|
||||
vm.projectUpdate = getProjectUpdateDetails();
|
||||
vm.projectStatus = getProjectStatusDetails();
|
||||
vm.scmBranch = getSCMBranchDetails();
|
||||
vm.scmRefspec = getSCMRefspecDetails();
|
||||
vm.scmRevision = getSCMRevisionDetails();
|
||||
vm.inventoryScm = getInventoryScmDetails();
|
||||
vm.playbook = getPlaybookDetails();
|
||||
@ -840,6 +864,8 @@ function JobDetailsController (
|
||||
started,
|
||||
finished,
|
||||
scm,
|
||||
scmBranch,
|
||||
scmRefspec,
|
||||
inventoryScm,
|
||||
scmRevision,
|
||||
instanceGroup,
|
||||
@ -851,6 +877,8 @@ function JobDetailsController (
|
||||
vm.finished = getFinishDetails(finished);
|
||||
vm.projectUpdate = getProjectUpdateDetails(scm.id);
|
||||
vm.projectStatus = getProjectStatusDetails(scm.status);
|
||||
vm.scmBranch = getSCMBranchDetails(scmBranch);
|
||||
vm.scmRefspec = getSCMRefspecDetails(scmRefspec);
|
||||
vm.environment = getEnvironmentDetails(environment);
|
||||
vm.artifacts = getArtifactsDetails(artifacts);
|
||||
vm.executionNode = getExecutionNodeDetails(executionNode);
|
||||
|
||||
@ -218,6 +218,18 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- BRANCH DETAIL -->
|
||||
<div class="JobResults-resultRow" ng-if="vm.scmBranch">
|
||||
<label class="JobResults-resultRowLabel">{{ vm.scmBranch.label }}</label>
|
||||
<div class="JobResults-resultRowText">{{ vm.scmBranch.value }}</div>
|
||||
</div>
|
||||
|
||||
<!-- REFSPEC DETAIL -->
|
||||
<div class="JobResults-resultRow" ng-if="vm.scmRefspec">
|
||||
<label class="JobResults-resultRowLabel">{{ vm.scmRefspec.label }}</label>
|
||||
<div class="JobResults-resultRowText">{{ vm.scmRefspec.value }}</div>
|
||||
</div>
|
||||
|
||||
<!-- INVENTORY SCM DETAIL -->
|
||||
<div class="JobResults-resultRow" ng-if="!vm.project && vm.inventoryScm">
|
||||
<label class="JobResults-resultRowLabel">{{ vm.inventoryScm.label }}</label>
|
||||
|
||||
@ -78,6 +78,8 @@ function OutputStrings (BaseString) {
|
||||
OVERWRITE_VARS: t.s('Overwrite Vars'),
|
||||
PLAYBOOK: t.s('Playbook'),
|
||||
PROJECT: t.s('Project'),
|
||||
SCM_BRANCH: t.s('Branch'),
|
||||
SCM_REFSPEC: t.s('Refspec'),
|
||||
RESULT_TRACEBACK: t.s('Error Details'),
|
||||
SCM_REVISION: t.s('Revision'),
|
||||
SKIP_TAGS: t.s('Skip Tags'),
|
||||
|
||||
@ -45,6 +45,8 @@ function JobStatusService (moment, message) {
|
||||
id: model.get('summary_fields.project_update.id'),
|
||||
status: model.get('summary_fields.project_update.status')
|
||||
},
|
||||
scmBranch: model.get('scm_branch'),
|
||||
scmRefspec: model.get('scm_refspec'),
|
||||
inventoryScm: {
|
||||
id: model.get('source_project_update'),
|
||||
status: model.get('summary_fields.inventory_source.status')
|
||||
|
||||
@ -66,6 +66,8 @@ function TemplatesStrings (BaseString) {
|
||||
VALID_INTEGER: t.s('Please enter an answer that is a valid integer.'),
|
||||
VALID_DECIMAL: t.s('Please enter an answer that is a decimal number.'),
|
||||
PLAYBOOK_RUN: t.s('Playbook Run'),
|
||||
SCM_BRANCH: t.s('SCM Branch'),
|
||||
SCM_BRANCH_HELP: t.s('Branch to use in job run. Project default used if blank.'),
|
||||
CHECK: t.s('Check'),
|
||||
NO_CREDS_MATCHING_TYPE: t.s('No Credentials Matching This Type Have Been Created'),
|
||||
CREDENTIAL_TYPE_MISSING: typeLabel => t.s('This job template has a default {{typeLabel}} credential which must be included or replaced before proceeding.', { typeLabel })
|
||||
|
||||
@ -115,6 +115,9 @@ function atRelaunchCtrl (
|
||||
},
|
||||
diffMode: {
|
||||
value: populatedJob.diff_mode
|
||||
},
|
||||
scmBranch: {
|
||||
value: populatedJob.scm_branch
|
||||
}
|
||||
},
|
||||
triggerModalOpen: true
|
||||
|
||||
@ -68,6 +68,7 @@ function canLaunchWithoutPrompt () {
|
||||
!launchData.ask_skip_tags_on_launch &&
|
||||
!launchData.ask_variables_on_launch &&
|
||||
!launchData.ask_diff_mode_on_launch &&
|
||||
!launchData.ask_scm_branch_on_launch &&
|
||||
!launchData.survey_enabled &&
|
||||
launchData.variables_needed_to_start.length === 0
|
||||
);
|
||||
|
||||
@ -61,6 +61,7 @@ function canLaunchWithoutPrompt () {
|
||||
!launchData.ask_skip_tags_on_launch &&
|
||||
!launchData.ask_variables_on_launch &&
|
||||
!launchData.ask_diff_mode_on_launch &&
|
||||
!launchData.ask_scm_branch_on_launch &&
|
||||
!launchData.survey_enabled
|
||||
);
|
||||
}
|
||||
|
||||
@ -59,6 +59,7 @@ function canLaunchWithoutPrompt () {
|
||||
!launchData.ask_inventory_on_launch &&
|
||||
!launchData.ask_variables_on_launch &&
|
||||
!launchData.survey_enabled &&
|
||||
!launchData.ask_scm_branch_on_launch &&
|
||||
launchData.variables_needed_to_start.length === 0
|
||||
);
|
||||
}
|
||||
|
||||
@ -61,6 +61,9 @@ export default ['i18n', function(i18n) {
|
||||
AWX_ROLES_ENABLED: {
|
||||
type: 'toggleSwitch',
|
||||
},
|
||||
AWX_COLLECTIONS_ENABLED: {
|
||||
type: 'toggleSwitch',
|
||||
},
|
||||
AWX_TASK_ENV: {
|
||||
type: 'textarea',
|
||||
reset: 'AWX_TASK_ENV',
|
||||
|
||||
@ -128,10 +128,15 @@ export default
|
||||
job_launch_data.extra_credentials.push(extraCredential.id);
|
||||
});
|
||||
}
|
||||
|
||||
if(scope.ask_diff_mode_on_launch && _.has(scope, 'other_prompt_data.diff_mode')){
|
||||
job_launch_data.diff_mode = scope.other_prompt_data.diff_mode;
|
||||
}
|
||||
|
||||
if(scope.ask_scm_branch_on_launch && _.has(scope, 'other_prompt_data.scm_branch')){
|
||||
job_launch_data.scm_branch = scope.other_prompt_data.scm_branch;
|
||||
}
|
||||
|
||||
if(!Empty(scope.relaunchHostType)) {
|
||||
job_launch_data.hosts = scope.relaunchHostType;
|
||||
}
|
||||
|
||||
@ -128,6 +128,7 @@ export default ['$scope', '$location', '$stateParams', 'GenerateForm',
|
||||
$scope.pathRequired = ($scope.scm_type.value === 'manual') ? true : false;
|
||||
$scope.scmRequired = ($scope.scm_type.value !== 'manual') ? true : false;
|
||||
$scope.scmBranchLabel = i18n._('SCM Branch');
|
||||
$scope.scmRefspecLabel = i18n._('SCM Refspec');
|
||||
// Dynamically update popover values
|
||||
if ($scope.scm_type.value) {
|
||||
if(($scope.lookupType === 'insights_credential' && $scope.scm_type.value !== 'insights') || ($scope.lookupType === 'scm_credential' && $scope.scm_type.value === 'insights')) {
|
||||
|
||||
@ -250,6 +250,7 @@ export default ['$scope', '$rootScope', '$stateParams', 'ProjectsForm', 'Rest',
|
||||
$scope.pathRequired = ($scope.scm_type.value === 'manual') ? true : false;
|
||||
$scope.scmRequired = ($scope.scm_type.value !== 'manual') ? true : false;
|
||||
$scope.scmBranchLabel = i18n._('SCM Branch');
|
||||
$scope.scmRefspecLabel = i18n._('SCM Refspec');
|
||||
|
||||
// Dynamically update popover values
|
||||
if ($scope.scm_type.value) {
|
||||
|
||||
@ -125,6 +125,24 @@ export default ['i18n', 'NotificationsList', 'TemplateList',
|
||||
type: 'text',
|
||||
ngShow: "scm_type && scm_type.value !== 'manual' && scm_type.value !== 'insights'",
|
||||
ngDisabled: '!(project_obj.summary_fields.user_capabilities.edit || canAdd)',
|
||||
awPopOver: '<p>' + i18n._("Branch to checkout. In addition to branches, you can input tags, commit hashes, and arbitrary refs. Some commit hashes and refs may not be availble unless you also provide a custom refspec.") + '</p>',
|
||||
dataTitle: i18n._('SCM Branch'),
|
||||
subForm: 'sourceSubForm',
|
||||
},
|
||||
scm_refspec: {
|
||||
labelBind: "scmRefspecLabel",
|
||||
type: 'text',
|
||||
ngShow: "scm_type && scm_type.value === 'git'",
|
||||
ngDisabled: '!(project_obj.summary_fields.user_capabilities.edit || canAdd)',
|
||||
awPopOver: '<p>' + i18n._('A refspec to fetch (passed to the Ansible git module). This parameter allows access to references via the branch field not otherwise available.') + '</p>' +
|
||||
'<p>' + i18n._('NOTE: This field assumes the remote name is "origin".') + '</p>' +
|
||||
'<p>' + i18n._('Examples include:') + '</p>' +
|
||||
'</p><ul class=\"no-bullets\"><li>refs/*:refs/remotes/origin/*</li>' +
|
||||
'<li>refs/pull/62/head:refs/remotes/origin/pull/62/head</li></ul>' +
|
||||
'<p>' + i18n._('The first fetches all references. The second fetches the Github pull request number 62, in this example the branch needs to be `pull/62/head`.') +
|
||||
'</p>' +
|
||||
'<p>' + i18n._('For more information, refer to the') + '<a target="_blank" href="https://docs.ansible.com/ansible-tower/latest/html/userguide/projects.html#manage-playbooks-using-source-control"> ' + i18n._('Ansible Tower Documentation') + '</a>.</p>',
|
||||
dataTitle: i18n._('SCM Refspec'),
|
||||
subForm: 'sourceSubForm',
|
||||
},
|
||||
credential: {
|
||||
@ -183,6 +201,18 @@ export default ['i18n', 'NotificationsList', 'TemplateList',
|
||||
dataPlacement: 'right',
|
||||
labelClass: 'checkbox-options stack-inline',
|
||||
ngDisabled: '!(project_obj.summary_fields.user_capabilities.edit || canAdd)'
|
||||
},
|
||||
{
|
||||
name: 'allow_override',
|
||||
label: i18n._('Allow branch override'),
|
||||
type: 'checkbox',
|
||||
awPopOver: '<p>' + i18n._('Allow changing the SCM branch or revision in a job template that uses this project.') + '</p>',
|
||||
dataTitle: i18n._('Allow branch override'),
|
||||
dataContainer: 'body',
|
||||
dataPlacement: 'right',
|
||||
labelClass: 'checkbox-options stack-inline',
|
||||
ngDisabled: '!(project_obj.summary_fields.user_capabilities.edit || canAdd)',
|
||||
ngShow: "scm_type && scm_type.value !== 'insights'",
|
||||
}]
|
||||
},
|
||||
scm_update_cache_timeout: {
|
||||
|
||||
@ -20,6 +20,20 @@ export default ['$filter', '$state', '$stateParams', '$http', 'Wait',
|
||||
scheduler,
|
||||
job_type;
|
||||
|
||||
const shouldShowPromptButton = (launchConf) => launchConf.survey_enabled ||
|
||||
launchConf.ask_inventory_on_launch ||
|
||||
launchConf.ask_credential_on_launch ||
|
||||
launchConf.ask_verbosity_on_launch ||
|
||||
launchConf.ask_job_type_on_launch ||
|
||||
launchConf.ask_limit_on_launch ||
|
||||
launchConf.ask_tags_on_launch ||
|
||||
launchConf.ask_skip_tags_on_launch ||
|
||||
launchConf.ask_diff_mode_on_launch ||
|
||||
launchConf.credential_needed_to_start ||
|
||||
launchConf.ask_variables_on_launch ||
|
||||
launchConf.ask_scm_branch_on_launch ||
|
||||
launchConf.variables_needed_to_start.length !== 0;
|
||||
|
||||
var schedule_url = ParentObject.related.schedules || `${ParentObject.related.inventory_source}schedules`;
|
||||
if (ParentObject){
|
||||
$scope.parentObject = ParentObject;
|
||||
@ -152,19 +166,7 @@ export default ['$filter', '$state', '$stateParams', '$http', 'Wait',
|
||||
$scope.noVars = true;
|
||||
}
|
||||
|
||||
if (!launchConf.survey_enabled &&
|
||||
!launchConf.ask_inventory_on_launch &&
|
||||
!launchConf.ask_credential_on_launch &&
|
||||
!launchConf.ask_verbosity_on_launch &&
|
||||
!launchConf.ask_job_type_on_launch &&
|
||||
!launchConf.ask_limit_on_launch &&
|
||||
!launchConf.ask_tags_on_launch &&
|
||||
!launchConf.ask_skip_tags_on_launch &&
|
||||
!launchConf.ask_diff_mode_on_launch &&
|
||||
!launchConf.survey_enabled &&
|
||||
!launchConf.credential_needed_to_start &&
|
||||
!launchConf.inventory_needed_to_start &&
|
||||
launchConf.variables_needed_to_start.length === 0) {
|
||||
if (!shouldShowPromptButton(launchConf)) {
|
||||
$scope.showPromptButton = false;
|
||||
} else {
|
||||
$scope.showPromptButton = true;
|
||||
@ -239,20 +241,8 @@ export default ['$filter', '$state', '$stateParams', '$http', 'Wait',
|
||||
});
|
||||
};
|
||||
|
||||
if (!launchConf.survey_enabled &&
|
||||
!launchConf.ask_inventory_on_launch &&
|
||||
!launchConf.ask_credential_on_launch &&
|
||||
!launchConf.ask_verbosity_on_launch &&
|
||||
!launchConf.ask_job_type_on_launch &&
|
||||
!launchConf.ask_limit_on_launch &&
|
||||
!launchConf.ask_tags_on_launch &&
|
||||
!launchConf.ask_skip_tags_on_launch &&
|
||||
!launchConf.ask_diff_mode_on_launch &&
|
||||
!launchConf.survey_enabled &&
|
||||
!launchConf.credential_needed_to_start &&
|
||||
!launchConf.inventory_needed_to_start &&
|
||||
launchConf.variables_needed_to_start.length === 0) {
|
||||
$scope.showPromptButton = false;
|
||||
if (!shouldShowPromptButton(launchConf)) {
|
||||
$scope.showPromptButton = false;
|
||||
} else {
|
||||
$scope.showPromptButton = true;
|
||||
|
||||
|
||||
@ -10,6 +10,21 @@ function($filter, $state, $stateParams, Wait, $scope, moment,
|
||||
|
||||
let schedule, scheduler, scheduleCredentials = [];
|
||||
|
||||
const shouldShowPromptButton = (launchConf) => launchConf.survey_enabled ||
|
||||
launchConf.ask_inventory_on_launch ||
|
||||
launchConf.ask_credential_on_launch ||
|
||||
launchConf.ask_verbosity_on_launch ||
|
||||
launchConf.ask_job_type_on_launch ||
|
||||
launchConf.ask_limit_on_launch ||
|
||||
launchConf.ask_tags_on_launch ||
|
||||
launchConf.ask_skip_tags_on_launch ||
|
||||
launchConf.ask_diff_mode_on_launch ||
|
||||
launchConf.credential_needed_to_start ||
|
||||
launchConf.ask_variables_on_launch ||
|
||||
launchConf.ask_scm_branch_on_launch ||
|
||||
launchConf.passwords_needed_to_start.length !== 0 ||
|
||||
launchConf.variables_needed_to_start.length !== 0;
|
||||
|
||||
$scope.preventCredsWithPasswords = true;
|
||||
|
||||
// initial end @ midnight values
|
||||
@ -326,20 +341,7 @@ function($filter, $state, $stateParams, Wait, $scope, moment,
|
||||
// ask_variables_on_launch = true
|
||||
$scope.noVars = !launchConf.ask_variables_on_launch;
|
||||
|
||||
if (!launchConf.survey_enabled &&
|
||||
!launchConf.ask_inventory_on_launch &&
|
||||
!launchConf.ask_credential_on_launch &&
|
||||
!launchConf.ask_verbosity_on_launch &&
|
||||
!launchConf.ask_job_type_on_launch &&
|
||||
!launchConf.ask_limit_on_launch &&
|
||||
!launchConf.ask_tags_on_launch &&
|
||||
!launchConf.ask_skip_tags_on_launch &&
|
||||
!launchConf.ask_diff_mode_on_launch &&
|
||||
!launchConf.survey_enabled &&
|
||||
!launchConf.credential_needed_to_start &&
|
||||
!launchConf.inventory_needed_to_start &&
|
||||
launchConf.passwords_needed_to_start.length === 0 &&
|
||||
launchConf.variables_needed_to_start.length === 0) {
|
||||
if (!shouldShowPromptButton(launchConf)) {
|
||||
$scope.showPromptButton = false;
|
||||
|
||||
if (launchConf.ask_variables_on_launch) {
|
||||
@ -424,20 +426,7 @@ function($filter, $state, $stateParams, Wait, $scope, moment,
|
||||
currentValues: scheduleResolve
|
||||
});
|
||||
|
||||
if (!launchConf.survey_enabled &&
|
||||
!launchConf.ask_inventory_on_launch &&
|
||||
!launchConf.ask_credential_on_launch &&
|
||||
!launchConf.ask_verbosity_on_launch &&
|
||||
!launchConf.ask_job_type_on_launch &&
|
||||
!launchConf.ask_limit_on_launch &&
|
||||
!launchConf.ask_tags_on_launch &&
|
||||
!launchConf.ask_skip_tags_on_launch &&
|
||||
!launchConf.ask_diff_mode_on_launch &&
|
||||
!launchConf.survey_enabled &&
|
||||
!launchConf.credential_needed_to_start &&
|
||||
!launchConf.inventory_needed_to_start &&
|
||||
launchConf.passwords_needed_to_start.length === 0 &&
|
||||
launchConf.variables_needed_to_start.length === 0) {
|
||||
if (!shouldShowPromptButton(launchConf)) {
|
||||
$scope.showPromptButton = false;
|
||||
} else {
|
||||
$scope.showPromptButton = true;
|
||||
|
||||
@ -182,41 +182,52 @@ angular.module('Utilities', ['RestServices', 'Utilities'])
|
||||
}
|
||||
} else if (form) { //if no error code is detected it begins to loop through to see where the api threw an error
|
||||
fieldErrors = false;
|
||||
for (field in form.fields) {
|
||||
if (data[field] && form.fields[field].tab) {
|
||||
|
||||
const addApiErrors = (field, fld) => {
|
||||
if (data[fld] && field.tab) {
|
||||
// If the form is part of a tab group, activate the tab
|
||||
$('#' + form.name + "_tabs a[href=\"#" + form.fields[field].tab + '"]').tab('show');
|
||||
$('#' + form.name + "_tabs a[href=\"#" + field.tab + '"]').tab('show');
|
||||
}
|
||||
if (form.fields[field].realName) {
|
||||
if (data[form.fields[field].realName]) {
|
||||
scope[field + '_api_error'] = data[form.fields[field].realName][0];
|
||||
if (field.realName) {
|
||||
if (field.realName) {
|
||||
scope[fld + '_api_error'] = data[field.realName][0];
|
||||
//scope[form.name + '_form'][form.fields[field].realName].$setValidity('apiError', false);
|
||||
$('[name="' + form.fields[field].realName + '"]').addClass('ng-invalid');
|
||||
$('html, body').animate({scrollTop: $('[name="' + form.fields[field].realName + '"]').offset().top}, 0);
|
||||
$('[name="' + field.realName + '"]').addClass('ng-invalid');
|
||||
$('html, body').animate({scrollTop: $('[name="' + field.realName + '"]').offset().top}, 0);
|
||||
fieldErrors = true;
|
||||
}
|
||||
}
|
||||
if (form.fields[field].sourceModel) {
|
||||
if (data[field]) {
|
||||
scope[form.fields[field].sourceModel + '_' + form.fields[field].sourceField + '_api_error'] =
|
||||
data[field][0];
|
||||
if (field.sourceModel) {
|
||||
if (data[fld]) {
|
||||
scope[field.sourceModel + '_' + field.sourceField + '_api_error'] =
|
||||
data[fld][0];
|
||||
//scope[form.name + '_form'][form.fields[field].sourceModel + '_' + form.fields[field].sourceField].$setValidity('apiError', false);
|
||||
$('[name="' + form.fields[field].sourceModel + '_' + form.fields[field].sourceField + '"]').addClass('ng-invalid');
|
||||
$('[name="' + form.fields[field].sourceModel + '_' + form.fields[field].sourceField + '"]').ScrollTo({ "onlyIfOutside": true, "offsetTop": 100 });
|
||||
$('[name="' + field.sourceModel + '_' + field.sourceField + '"]').addClass('ng-invalid');
|
||||
$('[name="' + field.sourceModel + '_' + field.sourceField + '"]').ScrollTo({ "onlyIfOutside": true, "offsetTop": 100 });
|
||||
fieldErrors = true;
|
||||
}
|
||||
} else {
|
||||
if (data[field]) {
|
||||
scope[field + '_api_error'] = data[field][0];
|
||||
$('[name="' + field + '"]').addClass('ng-invalid');
|
||||
$('label[for="' + field + '"] span').addClass('error-color');
|
||||
$('html, body').animate({scrollTop: $('[name="' + field + '"]').offset().top}, 0);
|
||||
if (data[fld]) {
|
||||
scope[fld + '_api_error'] = data[fld][0];
|
||||
$('[name="' + fld + '"]').addClass('ng-invalid');
|
||||
$('label[for="' + fld + '"] span').addClass('error-color');
|
||||
$('html, body').animate({scrollTop: $('[name="' + fld + '"]').offset().top}, 0);
|
||||
fieldErrors = true;
|
||||
if(form.fields[field].codeMirror){
|
||||
$(`#cm-${field}-container .CodeMirror`).addClass('error-border');
|
||||
if(field.codeMirror){
|
||||
$(`#cm-${fld}-container .CodeMirror`).addClass('error-border');
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
for (field in form.fields) {
|
||||
if (form.fields[field].type === "checkbox_group") {
|
||||
form.fields[field].fields.forEach(fld => {
|
||||
addApiErrors(fld, fld.name);
|
||||
});
|
||||
} else {
|
||||
addApiErrors(form.fields[field], field);
|
||||
}
|
||||
}
|
||||
if (defaultMsg) {
|
||||
Alert(defaultMsg.hdr, defaultMsg.msg);
|
||||
|
||||
@ -1156,6 +1156,9 @@ angular.module('FormGenerator', [GeneratorHelpers.name, 'Utilities', listGenerat
|
||||
field.max + "</div>\n";
|
||||
}
|
||||
html += "<div class=\"error api-error\" id=\"" + this.form.name + "-" + fld + "-api-error\" ng-bind=\"" + fld + "_api_error\"></div>\n";
|
||||
for (i = 0; i < field.fields.length; i++) {
|
||||
html += "<div class=\"error api-error\" id=\"" + this.form.name + "-" + field.fields[i].name + "-api-error\" ng-bind=\"" + field.fields[i].name + "_api_error\"></div>\n";
|
||||
}
|
||||
html += "</div><!-- checkbox-group -->\n";
|
||||
html += "</div>\n";
|
||||
}
|
||||
|
||||
@ -104,7 +104,11 @@
|
||||
});
|
||||
CreateSelect2({
|
||||
element:'#playbook-select',
|
||||
multiple: false
|
||||
addNew: true,
|
||||
multiple: false,
|
||||
scope: $scope,
|
||||
options: 'playbook_options',
|
||||
model: 'playbook'
|
||||
});
|
||||
CreateSelect2({
|
||||
element:'#job_template_verbosity',
|
||||
@ -155,7 +159,11 @@
|
||||
function sync_playbook_select2() {
|
||||
CreateSelect2({
|
||||
element:'#playbook-select',
|
||||
multiple: false
|
||||
addNew: true,
|
||||
multiple: false,
|
||||
scope: $scope,
|
||||
options: 'playbook_options',
|
||||
model: 'playbook'
|
||||
});
|
||||
}
|
||||
|
||||
@ -177,6 +185,9 @@
|
||||
for (i = 0; i < data.length; i++) {
|
||||
opts.push(data[i]);
|
||||
}
|
||||
if ($scope.playbook && opts.indexOf($scope.playbook) === -1) {
|
||||
opts.push($scope.playbook);
|
||||
}
|
||||
$scope.playbook_options = opts;
|
||||
sync_playbook_select2();
|
||||
Wait('stop');
|
||||
@ -195,10 +206,14 @@
|
||||
|
||||
// Detect and alert user to potential SCM status issues
|
||||
checkSCMStatus = function (oldValue, newValue) {
|
||||
if (oldValue !== newValue && !Empty($scope.project)) {
|
||||
if ((oldValue !== newValue || (oldValue === undefined && newValue === undefined)) && !Empty($scope.project)) {
|
||||
Rest.setUrl(GetBasePath('projects') + $scope.project + '/');
|
||||
Rest.get()
|
||||
.then(({data}) => {
|
||||
$scope.allow_branch_override = data.allow_override;
|
||||
$scope.allow_playbook_selection = true;
|
||||
selectPlaybook('force_load');
|
||||
|
||||
var msg;
|
||||
switch (data.status) {
|
||||
case 'failed':
|
||||
@ -219,6 +234,8 @@
|
||||
ProcessErrors($scope, data, status, form, { hdr: 'Error!',
|
||||
msg: 'Failed to get project ' + $scope.project + '. GET returned status: ' + status });
|
||||
});
|
||||
} else {
|
||||
$scope.allow_playbook_selection = false;
|
||||
}
|
||||
};
|
||||
|
||||
@ -295,6 +312,7 @@
|
||||
}
|
||||
data.forks = $scope.forks || 0;
|
||||
data.ask_diff_mode_on_launch = $scope.ask_diff_mode_on_launch ? $scope.ask_diff_mode_on_launch : false;
|
||||
data.ask_scm_branch_on_launch = $scope.ask_scm_branch_on_launch ? $scope.ask_scm_branch_on_launch : false;
|
||||
data.ask_tags_on_launch = $scope.ask_tags_on_launch ? $scope.ask_tags_on_launch : false;
|
||||
data.ask_skip_tags_on_launch = $scope.ask_skip_tags_on_launch ? $scope.ask_skip_tags_on_launch : false;
|
||||
data.ask_limit_on_launch = $scope.ask_limit_on_launch ? $scope.ask_limit_on_launch : false;
|
||||
|
||||
@ -104,7 +104,11 @@ export default
|
||||
playbookNotFound = false;
|
||||
}
|
||||
}
|
||||
if ($scope.playbook && $scope.playbook_options.indexOf($scope.playbook) === -1) {
|
||||
$scope.playbook_options.push($scope.playbook);
|
||||
}
|
||||
$scope.playbookNotFound = playbookNotFound;
|
||||
$scope.allow_playbook_selection = true;
|
||||
sync_playbook_select2();
|
||||
if ($scope.playbook) {
|
||||
jobTemplateLoadFinished();
|
||||
@ -125,6 +129,7 @@ export default
|
||||
Rest.setUrl(GetBasePath('projects') + $scope.project + '/');
|
||||
promises.push(Rest.get()
|
||||
.then(({data}) => {
|
||||
$scope.allow_branch_override = data.allow_override;
|
||||
var msg;
|
||||
switch (data.status) {
|
||||
case 'failed':
|
||||
@ -177,7 +182,11 @@ export default
|
||||
function sync_playbook_select2() {
|
||||
select2LoadDefer.push(CreateSelect2({
|
||||
element:'#playbook-select',
|
||||
multiple: false
|
||||
addNew: true,
|
||||
multiple: false,
|
||||
scope: $scope,
|
||||
options: 'playbook_options',
|
||||
model: 'playbook'
|
||||
}));
|
||||
}
|
||||
|
||||
@ -194,11 +203,6 @@ export default
|
||||
multiple: false
|
||||
}));
|
||||
|
||||
select2LoadDefer.push(CreateSelect2({
|
||||
element:'#playbook-select',
|
||||
multiple: false
|
||||
}));
|
||||
|
||||
select2LoadDefer.push(CreateSelect2({
|
||||
element:'#job_template_job_tags',
|
||||
multiple: true,
|
||||
@ -377,6 +381,9 @@ export default
|
||||
$scope.ask_diff_mode_on_launch = (jobTemplateData.ask_diff_mode_on_launch) ? true : false;
|
||||
master.ask_diff_mode_on_launch = $scope.ask_diff_mode_on_launch;
|
||||
|
||||
$scope.ask_scm_branch_on_launch = (jobTemplateData.ask_scm_branch_on_launch) ? true : false;
|
||||
master.ask_scm_branch_on_launch = $scope.ask_scm_branch_on_launch;
|
||||
|
||||
$scope.job_tag_options = (jobTemplateData.job_tags) ? jobTemplateData.job_tags.split(',')
|
||||
.map((i) => ({name: i, label: i, value: i})) : [];
|
||||
$scope.job_tags = $scope.job_tag_options;
|
||||
@ -652,8 +659,9 @@ export default
|
||||
for(var i=0; i<form.fields[fld].fields.length; i++) {
|
||||
data[form.fields[fld].fields[i].name] = $scope[form.fields[fld].fields[i].name];
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else if (fld === 'scm_branch' && $scope.allow_branch_override) {
|
||||
data[fld] = $scope[fld];
|
||||
} else {
|
||||
if (fld !== 'extra_vars' &&
|
||||
fld !== 'survey' &&
|
||||
fld !== 'forks') {
|
||||
@ -664,6 +672,7 @@ export default
|
||||
|
||||
data.forks = $scope.forks || 0;
|
||||
data.ask_diff_mode_on_launch = $scope.ask_diff_mode_on_launch ? $scope.ask_diff_mode_on_launch : false;
|
||||
data.ask_scm_branch_on_launch = $scope.ask_scm_branch_on_launch && $scope.allow_branch_override ? $scope.ask_scm_branch_on_launch : false;
|
||||
data.ask_tags_on_launch = $scope.ask_tags_on_launch ? $scope.ask_tags_on_launch : false;
|
||||
data.ask_skip_tags_on_launch = $scope.ask_skip_tags_on_launch ? $scope.ask_skip_tags_on_launch : false;
|
||||
data.ask_limit_on_launch = $scope.ask_limit_on_launch ? $scope.ask_limit_on_launch : false;
|
||||
|
||||
@ -103,15 +103,34 @@ function(NotificationsList, i18n) {
|
||||
ngDisabled: '!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate) || !canGetAllRelatedResources',
|
||||
awLookupWhen: 'canGetAllRelatedResources'
|
||||
},
|
||||
scm_branch: {
|
||||
label: i18n._('SCM Branch'),
|
||||
type: 'text',
|
||||
ngDisabled: '!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)',
|
||||
ngShow: 'allow_branch_override',
|
||||
column: 1,
|
||||
awPopOver: "<p>" + i18n._("Branch to use in job run. Project default used if blank.") + "</p>",
|
||||
dataTitle: i18n._('Project'),
|
||||
subCheckbox: {
|
||||
variable: 'ask_scm_branch_on_launch',
|
||||
text: i18n._('Prompt on launch'),
|
||||
ngDisabled: '!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)'
|
||||
},
|
||||
dataPlacement: 'right',
|
||||
dataContainer: "body"
|
||||
},
|
||||
playbook: {
|
||||
label: i18n._('Playbook'),
|
||||
type:'select',
|
||||
defaultText: i18n._('Choose a playbook'),
|
||||
ngOptions: 'book for book in playbook_options track by book',
|
||||
ngShow: 'allow_playbook_selection',
|
||||
ngDisabled: "!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate) || !canGetAllRelatedResources",
|
||||
id: 'playbook-select',
|
||||
required: true,
|
||||
column: 1,
|
||||
awPopOver: "<p>" + i18n._("Select the playbook to be executed by this job.") + "</p>",
|
||||
awPopOver: "<p>" + i18n._("Select the playbook to be executed by this job." +
|
||||
"You can select from the dropdown or enter a file within the input.") + "</p>",
|
||||
dataTitle: i18n._('Playbook'),
|
||||
dataPlacement: 'right',
|
||||
dataContainer: "body",
|
||||
|
||||
@ -157,7 +157,7 @@ export default [ 'ProcessErrors', 'CredentialTypeModel', 'TemplatesStrings', '$f
|
||||
activeTab = activeTab || vm.steps.credential.tab;
|
||||
order++;
|
||||
}
|
||||
if(vm.promptDataClone.launchConf.ask_verbosity_on_launch || vm.promptDataClone.launchConf.ask_job_type_on_launch || vm.promptDataClone.launchConf.ask_limit_on_launch || vm.promptDataClone.launchConf.ask_tags_on_launch || vm.promptDataClone.launchConf.ask_skip_tags_on_launch || (vm.promptDataClone.launchConf.ask_variables_on_launch && !vm.promptDataClone.launchConf.ignore_ask_variables) || vm.promptDataClone.launchConf.ask_diff_mode_on_launch) {
|
||||
if(vm.promptDataClone.launchConf.ask_verbosity_on_launch || vm.promptDataClone.launchConf.ask_job_type_on_launch || vm.promptDataClone.launchConf.ask_limit_on_launch || vm.promptDataClone.launchConf.ask_tags_on_launch || vm.promptDataClone.launchConf.ask_skip_tags_on_launch || (vm.promptDataClone.launchConf.ask_variables_on_launch && !vm.promptDataClone.launchConf.ignore_ask_variables) || vm.promptDataClone.launchConf.ask_diff_mode_on_launch || vm.promptDataClone.launchConf.ask_scm_branch_on_launch) {
|
||||
vm.steps.other_prompts.includeStep = true;
|
||||
vm.steps.other_prompts.tab = {
|
||||
_active: order === 1 ? true : false,
|
||||
|
||||
@ -10,7 +10,8 @@ function PromptService (Empty, $filter) {
|
||||
limit: {},
|
||||
tags: {},
|
||||
skipTags: {},
|
||||
diffMode: {}
|
||||
diffMode: {},
|
||||
scmBranch: {}
|
||||
};
|
||||
|
||||
prompts.credentials.value = _.has(params, 'launchConf.defaults.credentials') ? _.cloneDeep(params.launchConf.defaults.credentials) : [];
|
||||
@ -41,7 +42,7 @@ function PromptService (Empty, $filter) {
|
||||
prompts.tags.value = (jobTags && jobTags !== "") ? jobTags.split(',').map((i) => ({name: i, label: i, value: i})) : [];
|
||||
prompts.skipTags.value = (skipTags && skipTags !== "") ? skipTags.split(',').map((i) => ({name: i, label: i, value: i})) : [];
|
||||
prompts.diffMode.value = _.has(params, 'currentValues.diff_mode') && typeof params.currentValues.diff_mode === 'boolean' ? params.currentValues.diff_mode : (_.has(params, 'launchConf.defaults.diff_mode') ? params.launchConf.defaults.diff_mode : null);
|
||||
|
||||
prompts.scmBranch.value = _.has(params, 'currentValues.scm_branch') && params.currentValues.scm_branch ? params.currentValues.scm_branch : (_.has(params, 'launchConf.defaults.scm_branch') ? params.launchConf.defaults.scm_branch : "");
|
||||
return prompts;
|
||||
};
|
||||
|
||||
@ -163,6 +164,9 @@ function PromptService (Empty, $filter) {
|
||||
if (promptData.launchConf.ask_diff_mode_on_launch && _.has(promptData, 'prompts.diffMode.value')) {
|
||||
launchData.diff_mode = promptData.prompts.diffMode.value;
|
||||
}
|
||||
if (promptData.launchConf.ask_scm_branch_on_launch && _.has(promptData, 'prompts.scmBranch.value')) {
|
||||
launchData.scm_branch = promptData.prompts.scmBranch.value;
|
||||
}
|
||||
if (promptData.prompts.credentials.passwords) {
|
||||
_.forOwn(promptData.prompts.credentials.passwords, (val, key) => {
|
||||
if (!launchData.credential_passwords) {
|
||||
@ -277,7 +281,9 @@ function PromptService (Empty, $filter) {
|
||||
if(_.has(params, 'promptData.prompts.diffMode.value') && _.get(params, 'promptData.launchConf.ask_diff_mode_on_launch')){
|
||||
promptDataToSave.diff_mode = launchConfDefaults.diff_mode && launchConfDefaults.diff_mode === params.promptData.prompts.diffMode.value ? null : params.promptData.prompts.diffMode.value;
|
||||
}
|
||||
|
||||
if(_.has(params, 'promptData.prompts.scmBranch.value') && _.get(params, 'promptData.launchConf.ask_scm_branch_on_launch')){
|
||||
promptDataToSave.scm_branch = launchConfDefaults.scm_branch && launchConfDefaults.scm_branch === params.promptData.prompts.scmBranch.value ? null : params.promptData.prompts.scmBranch.value;
|
||||
}
|
||||
return promptDataToSave;
|
||||
};
|
||||
}
|
||||
|
||||
@ -22,6 +22,22 @@
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group Form-formGroup Form-formGroup--singleColumn" ng-if="promptData.launchConf.ask_scm_branch_on_launch">
|
||||
<label for="scm_branch">
|
||||
<span class="Form-inputLabel">{{:: vm.strings.get('prompt.SCM_BRANCH') }}</span>
|
||||
<a id="awp-scm-branch" href="" aw-pop-over="{{:: vm.strings.get('prompt.SCM_BRANCH_HELP') }}" data-placement="right" data-container="body" over-title="{{:: vm.strings.get('prompt.SCM_BRANCH') }}" class="help-link" data-original-title="" title="" tabindex="-1">
|
||||
<i class="fa fa-question-circle"></i>
|
||||
</a>
|
||||
</label>
|
||||
<div>
|
||||
<input
|
||||
type="text"
|
||||
ng-model="promptData.prompts.scmBranch.value"
|
||||
name="scm_branch"
|
||||
class="form-control Form-textInput"
|
||||
ng-disabled="readOnlyPrompts">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group Form-formGroup Form-formGroup--singleColumn" ng-if="promptData.launchConf.ask_limit_on_launch">
|
||||
<label for="limit">
|
||||
<span class="Form-inputLabel">{{:: vm.strings.get('prompt.LIMIT') }}</span>
|
||||
|
||||
@ -20,6 +20,10 @@
|
||||
<div class="Prompt-previewRowTitle">{{:: vm.strings.get('prompt.INVENTORY') }}</div>
|
||||
<div class="Prompt-previewRowValue" ng-bind="promptData.prompts.inventory.value.name"></div>
|
||||
</div>
|
||||
<div class="Prompt-previewRow--flex" ng-if="promptData.prompts.scmBranch.value">
|
||||
<div class="Prompt-previewRowTitle">{{:: vm.strings.get('prompt.SCM_BRANCH') }}</div>
|
||||
<div class="Prompt-previewRowValue" ng-bind="promptData.prompts.scmBranch.value"></div>
|
||||
</div>
|
||||
<div class="Prompt-previewRow--flex" ng-if="promptData.prompts.limit.value">
|
||||
<div class="Prompt-previewRowTitle">{{:: vm.strings.get('prompt.LIMIT') }}</div>
|
||||
<div class="Prompt-previewRowValue" ng-bind="promptData.prompts.limit.value"></div>
|
||||
|
||||
@ -16,6 +16,20 @@ export default ['$scope', 'TemplatesService', 'JobTemplateModel', 'PromptService
|
||||
|
||||
let promptWatcher, credentialsWatcher, surveyQuestionWatcher, listPromises = [];
|
||||
|
||||
const shouldShowPromptButton = (launchConf) => launchConf.survey_enabled ||
|
||||
launchConf.ask_inventory_on_launch ||
|
||||
launchConf.ask_credential_on_launch ||
|
||||
launchConf.ask_verbosity_on_launch ||
|
||||
launchConf.ask_job_type_on_launch ||
|
||||
launchConf.ask_limit_on_launch ||
|
||||
launchConf.ask_tags_on_launch ||
|
||||
launchConf.ask_skip_tags_on_launch ||
|
||||
launchConf.ask_diff_mode_on_launch ||
|
||||
launchConf.credential_needed_to_start ||
|
||||
launchConf.ask_variables_on_launch ||
|
||||
launchConf.ask_scm_branch_on_launch ||
|
||||
launchConf.variables_needed_to_start.length !== 0;
|
||||
|
||||
$scope.strings = TemplatesStrings;
|
||||
$scope.editNodeHelpMessage = null;
|
||||
|
||||
@ -198,18 +212,7 @@ export default ['$scope', 'TemplatesService', 'JobTemplateModel', 'PromptService
|
||||
$scope.promptData = _.cloneDeep($scope.nodeConfig.node.promptData);
|
||||
const launchConf = $scope.promptData.launchConf;
|
||||
|
||||
if (!launchConf.survey_enabled &&
|
||||
!launchConf.ask_inventory_on_launch &&
|
||||
!launchConf.ask_credential_on_launch &&
|
||||
!launchConf.ask_verbosity_on_launch &&
|
||||
!launchConf.ask_job_type_on_launch &&
|
||||
!launchConf.ask_limit_on_launch &&
|
||||
!launchConf.ask_tags_on_launch &&
|
||||
!launchConf.ask_skip_tags_on_launch &&
|
||||
!launchConf.ask_diff_mode_on_launch &&
|
||||
!launchConf.credential_needed_to_start &&
|
||||
!launchConf.ask_variables_on_launch &&
|
||||
launchConf.variables_needed_to_start.length === 0) {
|
||||
if (!shouldShowPromptButton(launchConf)) {
|
||||
$scope.showPromptButton = false;
|
||||
$scope.promptModalMissingReqFields = false;
|
||||
} else {
|
||||
@ -305,18 +308,7 @@ export default ['$scope', 'TemplatesService', 'JobTemplateModel', 'PromptService
|
||||
|
||||
$scope.credentialRequiresPassword = credentialRequiresPassword;
|
||||
|
||||
if (!launchConf.survey_enabled &&
|
||||
!launchConf.ask_inventory_on_launch &&
|
||||
!launchConf.ask_credential_on_launch &&
|
||||
!launchConf.ask_verbosity_on_launch &&
|
||||
!launchConf.ask_job_type_on_launch &&
|
||||
!launchConf.ask_limit_on_launch &&
|
||||
!launchConf.ask_tags_on_launch &&
|
||||
!launchConf.ask_skip_tags_on_launch &&
|
||||
!launchConf.ask_diff_mode_on_launch &&
|
||||
!launchConf.credential_needed_to_start &&
|
||||
!launchConf.ask_variables_on_launch &&
|
||||
launchConf.variables_needed_to_start.length === 0) {
|
||||
if (!shouldShowPromptButton(launchConf)) {
|
||||
$scope.showPromptButton = false;
|
||||
$scope.promptModalMissingReqFields = false;
|
||||
$scope.nodeFormDataLoaded = true;
|
||||
@ -491,18 +483,7 @@ export default ['$scope', 'TemplatesService', 'JobTemplateModel', 'PromptService
|
||||
$scope.selectedTemplateInvalid = selectedTemplateInvalid;
|
||||
$scope.selectedTemplate = angular.copy(selectedTemplate);
|
||||
|
||||
if (!launchConf.survey_enabled &&
|
||||
!launchConf.ask_inventory_on_launch &&
|
||||
!launchConf.ask_credential_on_launch &&
|
||||
!launchConf.ask_verbosity_on_launch &&
|
||||
!launchConf.ask_job_type_on_launch &&
|
||||
!launchConf.ask_limit_on_launch &&
|
||||
!launchConf.ask_tags_on_launch &&
|
||||
!launchConf.ask_skip_tags_on_launch &&
|
||||
!launchConf.ask_diff_mode_on_launch &&
|
||||
!launchConf.credential_needed_to_start &&
|
||||
!launchConf.ask_variables_on_launch &&
|
||||
launchConf.variables_needed_to_start.length === 0) {
|
||||
if (!shouldShowPromptButton(launchConf)) {
|
||||
$scope.showPromptButton = false;
|
||||
$scope.promptModalMissingReqFields = false;
|
||||
} else {
|
||||
|
||||
@ -145,6 +145,7 @@
|
||||
nodeConfig.node.originalNodeObject.job_tags !== null ||
|
||||
nodeConfig.node.originalNodeObject.skip_tags !== null ||
|
||||
nodeConfig.node.originalNodeObject.diff_mode !== null ||
|
||||
nodeConfig.node.originalNodeObject.scm_branch !== null ||
|
||||
showExtraVars">
|
||||
{{:: strings.get('workflow_maker.READ_ONLY_PROMPT_VALUES')}}
|
||||
</div>
|
||||
@ -158,6 +159,7 @@
|
||||
nodeConfig.node.originalNodeObject.job_tags !== null ||
|
||||
nodeConfig.node.originalNodeObject.skip_tags !== null ||
|
||||
nodeConfig.node.originalNodeObject.diff_mode !== null ||
|
||||
nodeConfig.node.originalNodeObject.scm_branch !== null ||
|
||||
showExtraVars)">
|
||||
{{:: strings.get('workflow_maker.READ_ONLY_NO_PROMPT_VALUES')}}
|
||||
</div>
|
||||
@ -219,6 +221,10 @@
|
||||
<span ng-if="!promptData.prompts.diffMode.value">{{:: strings.get('OFF') }}</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="Prompt-previewRow--flex" ng-if="nodeConfig.node.originalNodeObject.scm_branch !== null">
|
||||
<div class="Prompt-previewRowTitle">{{:: strings.get('prompt.SCM_BRANCH') }}</div>
|
||||
<div class="Prompt-previewRowValue" ng-bind="nodeConfig.node.originalNodeObject.scm_branch"></div>
|
||||
</div>
|
||||
<div class="Prompt-previewRow--noflex" ng-show="showExtraVars">
|
||||
<div class="Prompt-previewRowTitle">{{:: strings.get('prompt.EXTRA_VARIABLES') }}</div>
|
||||
<div>
|
||||
|
||||
@ -268,12 +268,13 @@ As Tower instances are brought online, it effectively expands the work capacity
|
||||
|
||||
It's important to note that not all instances are required to be provisioned with an equal capacity.
|
||||
|
||||
Project updates behave differently than they did before. Previously they were ordinary jobs that ran on a single instance. It's now important that they run successfully on any instance that could potentially run a job. Projects will now sync themselves to the correct version on the instance immediately prior to running the job.
|
||||
|
||||
When the sync happens, it is recorded in the database as a project update with a `launch_type` of "sync" and a `job_type` of "run". Project syncs will not change the status or version of the project; instead, they will update the source tree _only_ on the instance where they run. The only exception to this behavior is when the project is in the "never updated" state (meaning that no project updates of any type have been run), in which case a sync should fill in the project's initial revision and status, and subsequent syncs should not make such changes.
|
||||
|
||||
If an Instance Group is configured but all instances in that group are offline or unavailable, any jobs that are launched targeting only that group will be stuck in a waiting state until instances become available. Fallback or backup resources should be provisioned to handle any work that might encounter this scenario.
|
||||
|
||||
#### Project synchronization behavior
|
||||
|
||||
Project updates behave differently than they did before. Previously they were ordinary jobs that ran on a single instance. It's now important that they run successfully on any instance that could potentially run a job. Projects will sync themselves to the correct version on the instance immediately prior to running the job. If the needed revision is already locally checked out and galaxy or collections updates are not needed, then a sync may not be performed.
|
||||
|
||||
When the sync happens, it is recorded in the database as a project update with a `launch_type` of "sync" and a `job_type` of "run". Project syncs will not change the status or version of the project; instead, they will update the source tree _only_ on the instance where they run. The only exception to this behavior is when the project is in the "never updated" state (meaning that no project updates of any type have been run), in which case a sync should fill in the project's initial revision and status, and subsequent syncs should not make such changes.
|
||||
|
||||
#### Controlling where a particular job runs
|
||||
|
||||
|
||||
@ -1,6 +1,62 @@
|
||||
## Collections Support
|
||||
## Collections
|
||||
|
||||
AWX supports Ansible collections by appending the directories specified in `AWX_ANSIBLE_COLLECTIONS_PATHS`
|
||||
AWX supports using Ansible collections.
|
||||
This section will give ways to use collections in job runs.
|
||||
|
||||
### Project Collections Requirements
|
||||
|
||||
If you specify a collections requirements file in SCM at `collections/requirements.yml`,
|
||||
then AWX will install collections in that file in the implicit project sync
|
||||
before a job run. The invocation is:
|
||||
|
||||
```
|
||||
ansible-galaxy collection install -r requirements.yml -p <job tmp location>
|
||||
```
|
||||
|
||||
Example of tmp directory where job is running:
|
||||
|
||||
```
|
||||
├── project
|
||||
│ ├── ansible.cfg
|
||||
│ └── debug.yml
|
||||
├── requirements_collections
|
||||
│ └── ansible_collections
|
||||
│ └── username
|
||||
│ └── collection_name
|
||||
│ ├── FILES.json
|
||||
│ ├── MANIFEST.json
|
||||
│ ├── README.md
|
||||
│ ├── roles
|
||||
│ │ ├── role_in_collection_name
|
||||
│ │ │ ├── defaults
|
||||
│ │ │ │ └── main.yml
|
||||
│ │ │ ├── tasks
|
||||
│ │ │ │ └── main.yml
|
||||
│ │ │ └── templates
|
||||
│ │ │ └── stuff.j2
|
||||
│ └── tests
|
||||
│ └── main.yml
|
||||
├── requirements_roles
|
||||
│ └── username.role_name
|
||||
│ ├── defaults
|
||||
│ │ └── main.yml
|
||||
│ ├── meta
|
||||
│ │ └── main.yml
|
||||
│ ├── README.md
|
||||
│ ├── tasks
|
||||
│ │ ├── main.yml
|
||||
│ │ └── some_role.yml
|
||||
│ ├── templates
|
||||
│ │ └── stuff.j2
|
||||
│ └── vars
|
||||
│ └── Archlinux.yml
|
||||
└── tmp_6wod58k
|
||||
|
||||
```
|
||||
|
||||
### Global Collections Path
|
||||
|
||||
AWX appends the directories specified in `AWX_ANSIBLE_COLLECTIONS_PATHS`
|
||||
to the environment variable `ANSIBLE_COLLECTIONS_PATHS`. The default value of `AWX_ANSIBLE_COLLECTIONS_PATHS`
|
||||
contains `/var/lib/awx/collections`. It is recommended that place your collections that you wish to call in
|
||||
contains `/var/lib/awx/collections`. It is recommended that place your collections that you wish to call in
|
||||
your playbooks into this path.
|
||||
|
||||
105
docs/job_branch_override.md
Normal file
105
docs/job_branch_override.md
Normal file
@ -0,0 +1,105 @@
|
||||
## Job Branch Override
|
||||
|
||||
Background: Projects specify the branch, tag, or reference to use from source control
|
||||
in the `scm_branch` field.
|
||||
|
||||
This "Branch Override" feature allows project admins to delegate branch selection to
|
||||
admins of job templates that use that project (requiring only project
|
||||
`use_role`). Admins of job templates can further
|
||||
delegate that ability to users executing the job template
|
||||
(requiring only job template `execute_role`) by enabling
|
||||
`ask_scm_branch_on_launch` on the job template.
|
||||
|
||||
### Source Tree Copy Behavior
|
||||
|
||||
Background: Every job run has its own private data directory.
|
||||
This folder is temporary, cleaned up at the end of the job run.
|
||||
|
||||
This directory contains a copy of the project source tree for the given
|
||||
`scm_branch` the job is running.
|
||||
|
||||
A new shallow copy is made for every job run.
|
||||
Jobs are free to make changes to the project folder and make use of those
|
||||
changes while it is still running.
|
||||
|
||||
#### Use Cases That No Long Work
|
||||
|
||||
With the introduction of this feature, the function of `scm_clean` is watered
|
||||
down. It will still be possible to enable this function, and it will be
|
||||
passed through as a parameter to the playbook as a tool for trouble shooting.
|
||||
Two notable cases that lose support are documented here.
|
||||
|
||||
1) Setting `scm_clean` to `true` will no longer persist changes between job runs.
|
||||
|
||||
That means that jobs that rely on content which is not committed to source
|
||||
control may fail now.
|
||||
|
||||
2) Because it is a shallow copy, this folder will not contain the full
|
||||
git history for git project types.
|
||||
|
||||
### Project Revision Concerns
|
||||
|
||||
Background of how normal project updates work:
|
||||
The revision of the default branch (specified as `scm_branch` of the project)
|
||||
is stored when updated, and jobs using that project will employ this revision.
|
||||
|
||||
Providing a non-default `scm_branch` in a job comes with some restrictions
|
||||
which are unlike the normal update behavior.
|
||||
If `scm_branch` is a branch identifier (not a commit hash or tag), then
|
||||
the newest revision is pulled from the source control remote immediately
|
||||
before the job starts.
|
||||
This revision is shown in the `scm_revision` field of the
|
||||
job and its respective project update.
|
||||
This means that offline job runs are impossible for non-default branches.
|
||||
To be sure that a job is running a static version from source control,
|
||||
use tags or commit hashes.
|
||||
|
||||
Project updates do not save the revision of all branches, only the
|
||||
project default branch.
|
||||
|
||||
The `scm_branch` field is not validated, so the project must update
|
||||
to assure it is valid.
|
||||
If `scm_branch` is provided or prompted for, the `playbook` field of
|
||||
job templates will not be validated, and users will have to launch
|
||||
the job template in order to verify presence of the expected playbook.
|
||||
|
||||
### Git Refspec
|
||||
|
||||
The field `scm_refspec` has been added to projects. This is provided by
|
||||
the user or left blank.
|
||||
|
||||
A non-blank `scm_refspec` field will cause project updates (of any type)
|
||||
to pass the `refspec` field when running the Ansible
|
||||
git module inside of the `project_update.yml` playbook. When the git module
|
||||
is provided with this field, it performs an extra `git fetch` command
|
||||
to pull that refspec from the remote.
|
||||
|
||||
The refspec specifies what references the update will download from the remote.
|
||||
Examples:
|
||||
|
||||
- `refs/*:refs/remotes/origin/*`
|
||||
This will fetch all references, including even remotes of the remote
|
||||
- `refs/pull/*:refs/remotes/origin/pull/*`
|
||||
Github-specific, this will fetch all refs for all pull requests
|
||||
- `refs/pull/62/head:refs/remotes/origin/pull/62/head`
|
||||
This will fetch the ref for that one github pull request
|
||||
|
||||
For large projects, users should consider performance when
|
||||
using the first or second examples here.
|
||||
|
||||
This parameter affects availability of the project branch, and can allow
|
||||
access to references not otherwise available. For example, the third example
|
||||
will allow the user to supply `pull/62/head` for `scm_branch`, which would
|
||||
not be possible without the refspec field.
|
||||
|
||||
The Ansible git module always fetches `refs/heads/*`. It will do this
|
||||
whether or not a custom refspec is provided. This means that a project's
|
||||
branches and tags (and commit hashes therein) can be used as `scm_branch`
|
||||
no matter what is used for `scm_refspec`.
|
||||
|
||||
The `scm_refspec` will affect which `scm_branch` fields can be used as overrides.
|
||||
For example, you could set up a project that allows branch override with the
|
||||
1st or 2nd refspec example, then use this in a job template
|
||||
that prompts for `scm_branch`, then a client could launch the job template when
|
||||
a new pull request is created, providing the branch `pull/N/head`,
|
||||
then the job template would run against the provided github pull request reference.
|
||||
30
docs/licenses/GitPython.txt
Normal file
30
docs/licenses/GitPython.txt
Normal file
@ -0,0 +1,30 @@
|
||||
Copyright (C) 2008, 2009 Michael Trier and contributors
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the GitPython project nor the names of
|
||||
its contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
42
docs/licenses/gitdb2.txt
Normal file
42
docs/licenses/gitdb2.txt
Normal file
@ -0,0 +1,42 @@
|
||||
Copyright (C) 2010, 2011 Sebastian Thiel and contributors
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the GitDB project nor the names of
|
||||
its contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
Additional Licenses
|
||||
-------------------
|
||||
The files at
|
||||
gitdb/test/fixtures/packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.idx
|
||||
and
|
||||
gitdb/test/fixtures/packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.pack
|
||||
are licensed under GNU GPL as part of the git source repository,
|
||||
see http://en.wikipedia.org/wiki/Git_%28software%29 for more information.
|
||||
|
||||
They are not required for the actual operation, which is why they are not found
|
||||
in the distribution package.
|
||||
30
docs/licenses/smmap2.txt
Normal file
30
docs/licenses/smmap2.txt
Normal file
@ -0,0 +1,30 @@
|
||||
Copyright (C) 2010, 2011 Sebastian Thiel and contributors
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the async project nor the names of
|
||||
its contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
@ -11,7 +11,7 @@ Tower 3.5 forward uses the process isolation feature in ansible runner to achiev
|
||||
By default `bubblewrap` is enabled, this can be turned off via Tower Config or from a tower settings file:
|
||||
|
||||
AWX_PROOT_ENABLED = False
|
||||
|
||||
|
||||
Process isolation, when enabled, will be used for the following Job Types:
|
||||
|
||||
* Job Templates - Launching jobs from regular job templates
|
||||
@ -30,11 +30,18 @@ If there is other information on the system that is sensitive and should be hidd
|
||||
or by updating the following entry in a tower settings file:
|
||||
|
||||
AWX_PROOT_HIDE_PATHS = ['/list/of/', '/paths']
|
||||
|
||||
|
||||
If there are any directories that should specifically be exposed that can be set in a similar way:
|
||||
|
||||
AWX_PROOT_SHOW_PATHS = ['/list/of/', '/paths']
|
||||
|
||||
|
||||
By default the system will use the system's tmp dir (/tmp by default) as it's staging area. This can be changed:
|
||||
|
||||
AWX_PROOT_BASE_PATH = "/opt/tmp"
|
||||
|
||||
### Project Folder Isolation
|
||||
|
||||
Starting in AWX versions above 6.0.0, the project folder will be copied for each job run.
|
||||
This allows playbooks to make local changes to the source tree for convenience,
|
||||
such as creating temporary files, without the possibility of interference with
|
||||
other jobs.
|
||||
|
||||
@ -20,6 +20,7 @@ The standard pattern applies to fields
|
||||
- `limit`
|
||||
- `diff_mode`
|
||||
- `verbosity`
|
||||
- `scm_branch`
|
||||
|
||||
##### Non-Standard Cases
|
||||
|
||||
|
||||
@ -22,6 +22,7 @@ django-split-settings==0.3.0
|
||||
django-taggit==0.22.2
|
||||
djangorestframework==3.9.4
|
||||
djangorestframework-yaml==1.0.3
|
||||
GitPython==2.1.11
|
||||
irc==16.2
|
||||
jinja2==2.10.1
|
||||
jsonschema==2.6.0
|
||||
|
||||
@ -40,6 +40,8 @@ djangorestframework-yaml==1.0.3
|
||||
djangorestframework==3.9.4
|
||||
|
||||
future==0.16.0 # via django-radius
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
gitpython==2.1.11
|
||||
hyperlink==19.0.0 # via twisted
|
||||
idna==2.8 # via hyperlink, requests, twisted
|
||||
incremental==17.5.0 # via twisted
|
||||
@ -100,6 +102,7 @@ service-identity==18.1.0 # via twisted
|
||||
simplejson==3.16.0 # via uwsgitop
|
||||
six==1.12.0 # via ansible-runner, asgi-amqp, asgiref, autobahn, automat, cryptography, django-extensions, irc, isodate, jaraco.classes, jaraco.collections, jaraco.itertools, jaraco.logging, jaraco.stream, pygerduty, pyhamcrest, pyopenssl, pyrad, python-dateutil, python-memcached, slackclient, social-auth-app-django, social-auth-core, tacacs-plus, tempora, twilio, txaio, websocket-client
|
||||
slackclient==1.1.2
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
social-auth-app-django==2.1.0
|
||||
social-auth-core==3.0.0
|
||||
sqlparse==0.3.0 # via django
|
||||
|
||||
@ -2,6 +2,9 @@ FROM centos:7
|
||||
|
||||
ARG UID=0
|
||||
|
||||
# Add ansible-devel so that we get collections!
|
||||
ADD tools/docker-compose/ansible_nightly.repo /etc/yum.repos.d/ansible_nightly.repo
|
||||
|
||||
RUN yum -y update && yum -y install epel-release && yum -y install https://centos7.iuscommunity.org/ius-release.rpm
|
||||
|
||||
# sync with installer/roles/image_build/templates/Dockerfile.j2
|
||||
|
||||
4
tools/docker-compose/ansible_nightly.repo
Normal file
4
tools/docker-compose/ansible_nightly.repo
Normal file
@ -0,0 +1,4 @@
|
||||
[ansible-nightly]
|
||||
baseurl=https://releases.ansible.com/ansible/rpm/nightly/devel/epel-7-$basearch
|
||||
gpgcheck=0
|
||||
enabled=1
|
||||
Loading…
x
Reference in New Issue
Block a user