Allow JTs to specify and prompt for SCM branch

Copy project folder each job run
  change cwd to private_data_dir, from proj
  do not add cwd to show_paths if it is
  a subdirectory of private_data_dir, which
  is already shown

Pass the job private_data_dir to the local
  project sync, and also add that directory
  to the project sync show paths

Add GitPython dep and use for job sync logic
  use this to manage shallow clone from desired
  commit, and to map branch to commit,
  and to assess necessity of project sync

Start on some validation change, but not all
  allow arbitrary playbooks with custom branch
This commit is contained in:
AlanCoding
2019-06-04 15:26:14 -04:00
parent 28e3625066
commit ac86dc4fb9
12 changed files with 367 additions and 93 deletions

View File

@@ -1338,7 +1338,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
class Meta: class Meta:
model = Project model = Project
fields = ('*', 'organization', 'scm_update_on_launch', fields = ('*', 'organization', 'scm_update_on_launch',
'scm_update_cache_timeout', 'scm_revision', 'custom_virtualenv',) + \ 'scm_update_cache_timeout', 'scm_revision', 'allow_override', 'custom_virtualenv',) + \
('last_update_failed', 'last_updated') # Backwards compatibility ('last_update_failed', 'last_updated') # Backwards compatibility
def get_related(self, obj): def get_related(self, obj):
@@ -2701,7 +2701,7 @@ class LabelsListMixin(object):
class JobOptionsSerializer(LabelsListMixin, BaseSerializer): class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
class Meta: class Meta:
fields = ('*', 'job_type', 'inventory', 'project', 'playbook', fields = ('*', 'job_type', 'inventory', 'project', 'playbook', 'scm_branch',
'forks', 'limit', 'verbosity', 'extra_vars', 'job_tags', 'forks', 'limit', 'verbosity', 'extra_vars', 'job_tags',
'force_handlers', 'skip_tags', 'start_at_task', 'timeout', 'force_handlers', 'skip_tags', 'start_at_task', 'timeout',
'use_fact_cache',) 'use_fact_cache',)
@@ -2752,9 +2752,14 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
playbook = attrs.get('playbook', self.instance and self.instance.playbook or '') playbook = attrs.get('playbook', self.instance and self.instance.playbook or '')
if not project: if not project:
raise serializers.ValidationError({'project': _('This field is required.')}) raise serializers.ValidationError({'project': _('This field is required.')})
if project and project.scm_type and playbook and force_text(playbook) not in project.playbook_files: playbook_not_found = bool(
raise serializers.ValidationError({'playbook': _('Playbook not found for project.')}) (
if project and not project.scm_type and playbook and force_text(playbook) not in project.playbooks: project and project.scm_type and (not project.allow_override) and
playbook and force_text(playbook) not in project.playbook_files
) or
(project and not project.scm_type and playbook and force_text(playbook) not in project.playbooks) # manual
)
if playbook_not_found:
raise serializers.ValidationError({'playbook': _('Playbook not found for project.')}) raise serializers.ValidationError({'playbook': _('Playbook not found for project.')})
if project and not playbook: if project and not playbook:
raise serializers.ValidationError({'playbook': _('Must select playbook for project.')}) raise serializers.ValidationError({'playbook': _('Must select playbook for project.')})
@@ -2799,7 +2804,8 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
class Meta: class Meta:
model = JobTemplate model = JobTemplate
fields = ('*', 'host_config_key', 'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch', fields = ('*', 'host_config_key', 'ask_scm_branch_on_launch', 'ask_diff_mode_on_launch', 'ask_variables_on_launch',
'ask_limit_on_launch', 'ask_tags_on_launch',
'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch',
'ask_credential_on_launch', 'survey_enabled', 'become_enabled', 'diff_mode', 'ask_credential_on_launch', 'survey_enabled', 'become_enabled', 'diff_mode',
'allow_simultaneous', 'custom_virtualenv', 'job_slice_count') 'allow_simultaneous', 'custom_virtualenv', 'job_slice_count')
@@ -3365,6 +3371,7 @@ class WorkflowJobCancelSerializer(WorkflowJobSerializer):
class LaunchConfigurationBaseSerializer(BaseSerializer): class LaunchConfigurationBaseSerializer(BaseSerializer):
scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
job_type = serializers.ChoiceField(allow_blank=True, allow_null=True, required=False, default=None, job_type = serializers.ChoiceField(allow_blank=True, allow_null=True, required=False, default=None,
choices=NEW_JOB_TYPE_CHOICES) choices=NEW_JOB_TYPE_CHOICES)
job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
@@ -3377,7 +3384,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
class Meta: class Meta:
fields = ('*', 'extra_data', 'inventory', # Saved launch-time config fields fields = ('*', 'extra_data', 'inventory', # Saved launch-time config fields
'job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags', 'diff_mode', 'verbosity') 'scm_branch', 'job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags', 'diff_mode', 'verbosity')
def get_related(self, obj): def get_related(self, obj):
res = super(LaunchConfigurationBaseSerializer, self).get_related(obj) res = super(LaunchConfigurationBaseSerializer, self).get_related(obj)
@@ -3960,6 +3967,7 @@ class JobLaunchSerializer(BaseSerializer):
required=False, write_only=True required=False, write_only=True
) )
credential_passwords = VerbatimField(required=False, write_only=True) credential_passwords = VerbatimField(required=False, write_only=True)
scm_branch = serializers.CharField(required=False, write_only=True, allow_blank=True)
diff_mode = serializers.BooleanField(required=False, write_only=True) diff_mode = serializers.BooleanField(required=False, write_only=True)
job_tags = serializers.CharField(required=False, write_only=True, allow_blank=True) job_tags = serializers.CharField(required=False, write_only=True, allow_blank=True)
job_type = serializers.ChoiceField(required=False, choices=NEW_JOB_TYPE_CHOICES, write_only=True) job_type = serializers.ChoiceField(required=False, choices=NEW_JOB_TYPE_CHOICES, write_only=True)
@@ -3970,13 +3978,15 @@ class JobLaunchSerializer(BaseSerializer):
class Meta: class Meta:
model = JobTemplate model = JobTemplate
fields = ('can_start_without_user_input', 'passwords_needed_to_start', fields = ('can_start_without_user_input', 'passwords_needed_to_start',
'extra_vars', 'inventory', 'limit', 'job_tags', 'skip_tags', 'job_type', 'verbosity', 'diff_mode', 'extra_vars', 'inventory', 'scm_branch', 'limit', 'job_tags', 'skip_tags', 'job_type', 'verbosity', 'diff_mode',
'credentials', 'credential_passwords', 'ask_variables_on_launch', 'ask_tags_on_launch', 'credentials', 'credential_passwords',
'ask_scm_branch_on_launch', 'ask_variables_on_launch', 'ask_tags_on_launch',
'ask_diff_mode_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_limit_on_launch', 'ask_diff_mode_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_limit_on_launch',
'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch',
'survey_enabled', 'variables_needed_to_start', 'credential_needed_to_start', 'survey_enabled', 'variables_needed_to_start', 'credential_needed_to_start',
'inventory_needed_to_start', 'job_template_data', 'defaults', 'verbosity') 'inventory_needed_to_start', 'job_template_data', 'defaults', 'verbosity')
read_only_fields = ( read_only_fields = (
'ask_scm_branch_on_launch',
'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch', 'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch',
'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch',
'ask_inventory_on_launch', 'ask_credential_on_launch',) 'ask_inventory_on_launch', 'ask_credential_on_launch',)

View File

@@ -0,0 +1,41 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-06-14 15:08
from __future__ import unicode_literals
import awx.main.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0081_v360_notify_on_start'),
]
operations = [
migrations.AddField(
model_name='job',
name='scm_branch',
field=models.CharField(blank=True, default='', help_text='Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.', max_length=1024),
),
migrations.AddField(
model_name='jobtemplate',
name='ask_scm_branch_on_launch',
field=awx.main.fields.AskForField(default=False),
),
migrations.AddField(
model_name='jobtemplate',
name='scm_branch',
field=models.CharField(blank=True, default='', help_text='Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.', max_length=1024),
),
migrations.AddField(
model_name='project',
name='allow_override',
field=models.BooleanField(default=False, help_text='Allow changing the SCM branch or revision in a job template that uses this project.'),
),
migrations.AlterField(
model_name='project',
name='scm_update_cache_timeout',
field=models.PositiveIntegerField(blank=True, default=0, help_text='The number of seconds after the last project update ran that a new project update will be launched as a job dependency.'),
),
]

View File

@@ -96,6 +96,13 @@ class JobOptions(BaseModel):
default='', default='',
blank=True, blank=True,
) )
scm_branch = models.CharField(
max_length=1024,
default='',
blank=True,
help_text=_('Branch to use in job run. Project default used if blank. '
'Only allowed if project allow_override field is set to true.'),
)
forks = models.PositiveIntegerField( forks = models.PositiveIntegerField(
blank=True, blank=True,
default=0, default=0,
@@ -234,6 +241,11 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
default=False, default=False,
allows_field='credentials' allows_field='credentials'
) )
ask_scm_branch_on_launch = AskForField(
blank=True,
default=False,
allows_field='scm_branch'
)
job_slice_count = models.PositiveIntegerField( job_slice_count = models.PositiveIntegerField(
blank=True, blank=True,
default=1, default=1,

View File

@@ -261,9 +261,14 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
scm_update_cache_timeout = models.PositiveIntegerField( scm_update_cache_timeout = models.PositiveIntegerField(
default=0, default=0,
blank=True, blank=True,
help_text=_('The number of seconds after the last project update ran that a new' help_text=_('The number of seconds after the last project update ran that a new '
'project update will be launched as a job dependency.'), 'project update will be launched as a job dependency.'),
) )
allow_override = models.BooleanField(
default=False,
help_text=_('Allow changing the SCM branch or revision in a job template '
'that uses this project.'),
)
scm_revision = models.CharField( scm_revision = models.CharField(
max_length=1024, max_length=1024,

View File

@@ -20,6 +20,8 @@ from distutils.dir_util import copy_tree
from distutils.version import LooseVersion as Version from distutils.version import LooseVersion as Version
import yaml import yaml
import fcntl import fcntl
from pathlib import Path
from uuid import uuid4
try: try:
import psutil import psutil
except Exception: except Exception:
@@ -41,6 +43,10 @@ from django.core.exceptions import ObjectDoesNotExist
# Django-CRUM # Django-CRUM
from crum import impersonate from crum import impersonate
# GitPython
import git
from gitdb.exc import BadName as BadGitName
# Runner # Runner
import ansible_runner import ansible_runner
@@ -694,9 +700,12 @@ class BaseTask(object):
model = None model = None
event_model = None event_model = None
abstract = True abstract = True
cleanup_paths = []
proot_show_paths = [] proot_show_paths = []
def __init__(self, *args, **kwargs):
super(BaseTask, self).__init__(*args, **kwargs)
self.cleanup_paths = []
def update_model(self, pk, _attempt=0, **updates): def update_model(self, pk, _attempt=0, **updates):
"""Reload the model instance from the database and update the """Reload the model instance from the database and update the
given fields. given fields.
@@ -769,9 +778,11 @@ class BaseTask(object):
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
if settings.AWX_CLEANUP_PATHS: if settings.AWX_CLEANUP_PATHS:
self.cleanup_paths.append(path) self.cleanup_paths.append(path)
# Ansible Runner requires that this directory exists. runner_project_folder = os.path.join(path, 'project')
# Specifically, when using process isolation if not os.path.exists(runner_project_folder):
os.mkdir(os.path.join(path, 'project')) # Ansible Runner requires that this directory exists.
# Specifically, when using process isolation
os.mkdir(runner_project_folder)
return path return path
def build_private_data_files(self, instance, private_data_dir): def build_private_data_files(self, instance, private_data_dir):
@@ -860,7 +871,10 @@ class BaseTask(object):
''' '''
process_isolation_params = dict() process_isolation_params = dict()
if self.should_use_proot(instance): if self.should_use_proot(instance):
show_paths = self.proot_show_paths + [private_data_dir, cwd] + \ local_paths = [private_data_dir]
if cwd != private_data_dir and Path(private_data_dir) not in Path(cwd).parents:
local_paths.append(cwd)
show_paths = self.proot_show_paths + local_paths + \
settings.AWX_PROOT_SHOW_PATHS settings.AWX_PROOT_SHOW_PATHS
# Help the user out by including the collections path inside the bubblewrap environment # Help the user out by including the collections path inside the bubblewrap environment
@@ -1030,7 +1044,7 @@ class BaseTask(object):
expect_passwords[k] = passwords.get(v, '') or '' expect_passwords[k] = passwords.get(v, '') or ''
return expect_passwords return expect_passwords
def pre_run_hook(self, instance): def pre_run_hook(self, instance, private_data_dir):
''' '''
Hook for any steps to run before the job/task starts Hook for any steps to run before the job/task starts
''' '''
@@ -1157,7 +1171,8 @@ class BaseTask(object):
try: try:
isolated = self.instance.is_isolated() isolated = self.instance.is_isolated()
self.instance.send_notification_templates("running") self.instance.send_notification_templates("running")
self.pre_run_hook(self.instance) private_data_dir = self.build_private_data_dir(self.instance)
self.pre_run_hook(self.instance, private_data_dir)
if self.instance.cancel_flag: if self.instance.cancel_flag:
self.instance = self.update_model(self.instance.pk, status='canceled') self.instance = self.update_model(self.instance.pk, status='canceled')
if self.instance.status != 'running': if self.instance.status != 'running':
@@ -1173,7 +1188,6 @@ class BaseTask(object):
# store a record of the venv used at runtime # store a record of the venv used at runtime
if hasattr(self.instance, 'custom_virtualenv'): if hasattr(self.instance, 'custom_virtualenv'):
self.update_model(pk, custom_virtualenv=getattr(self.instance, 'ansible_virtualenv_path', settings.ANSIBLE_VENV_PATH)) self.update_model(pk, custom_virtualenv=getattr(self.instance, 'ansible_virtualenv_path', settings.ANSIBLE_VENV_PATH))
private_data_dir = self.build_private_data_dir(self.instance)
# Fetch "cached" fact data from prior runs and put on the disk # Fetch "cached" fact data from prior runs and put on the disk
# where ansible expects to find it # where ansible expects to find it
@@ -1256,9 +1270,6 @@ class BaseTask(object):
module_args = ansible_runner.utils.args2cmdline( module_args = ansible_runner.utils.args2cmdline(
params.get('module_args'), params.get('module_args'),
) )
else:
# otherwise, it's a playbook, so copy the project dir
copy_tree(cwd, os.path.join(private_data_dir, 'project'))
shutil.move( shutil.move(
params.pop('inventory'), params.pop('inventory'),
os.path.join(private_data_dir, 'inventory') os.path.join(private_data_dir, 'inventory')
@@ -1532,15 +1543,10 @@ class RunJob(BaseTask):
return args return args
def build_cwd(self, job, private_data_dir): def build_cwd(self, job, private_data_dir):
cwd = job.project.get_project_path() return os.path.join(private_data_dir, 'project')
if not cwd:
root = settings.PROJECTS_ROOT
raise RuntimeError('project local_path %s cannot be found in %s' %
(job.project.local_path, root))
return cwd
def build_playbook_path_relative_to_cwd(self, job, private_data_dir): def build_playbook_path_relative_to_cwd(self, job, private_data_dir):
return os.path.join(job.playbook) return job.playbook
def build_extra_vars_file(self, job, private_data_dir): def build_extra_vars_file(self, job, private_data_dir):
# Define special extra_vars for AWX, combine with job.extra_vars. # Define special extra_vars for AWX, combine with job.extra_vars.
@@ -1587,39 +1593,117 @@ class RunJob(BaseTask):
''' '''
return getattr(settings, 'AWX_PROOT_ENABLED', False) return getattr(settings, 'AWX_PROOT_ENABLED', False)
def pre_run_hook(self, job): def copy_folders(self, project_path, galaxy_install_path, private_data_dir):
if project_path is None:
raise RuntimeError('project does not supply a valid path')
elif not os.path.exists(project_path):
raise RuntimeError('project path %s cannot be found' % project_path)
runner_project_folder = os.path.join(private_data_dir, 'project')
copy_tree(project_path, runner_project_folder)
if galaxy_install_path:
galaxy_run_path = os.path.join(private_data_dir, 'project', 'roles')
copy_tree(galaxy_install_path, galaxy_run_path)
def pre_run_hook(self, job, private_data_dir):
if job.inventory is None: if job.inventory is None:
error = _('Job could not start because it does not have a valid inventory.') error = _('Job could not start because it does not have a valid inventory.')
self.update_model(job.pk, status='failed', job_explanation=error) self.update_model(job.pk, status='failed', job_explanation=error)
raise RuntimeError(error) raise RuntimeError(error)
if job.project and job.project.scm_type: elif job.project is None:
error = _('Job could not start because it does not have a valid project.')
self.update_model(job.pk, status='failed', job_explanation=error)
raise RuntimeError(error)
elif job.project.status in ('error', 'failed'):
msg = _(
'The project revision for this job template is unknown due to a failed update.'
)
job = self.update_model(job.pk, status='failed', job_explanation=msg)
raise RuntimeError(msg)
galaxy_install_path = None
git_repo = None
project_path = job.project.get_project_path(check_if_exists=False)
job_revision = job.project.scm_revision
needs_sync = True
if not job.project.scm_type:
# manual projects are not synced, user has responsibility for that
needs_sync = False
elif not os.path.exists(project_path):
logger.debug('Performing fresh clone of {} on this instance.'.format(job.project))
needs_sync = True
elif job.project.scm_type == 'git':
git_repo = git.Repo(project_path)
if job.scm_branch and job.scm_branch != job.project.scm_branch and git_repo:
try:
commit = git_repo.commit(job.scm_branch)
job_revision = commit.hexsha
logger.info('Skipping project sync for {} because commit is locally available'.format(job.log_format))
needs_sync = False # requested commit is already locally available
except (ValueError, BadGitName):
pass
else:
if git_repo.head.commit.hexsha == job.project.scm_revision:
logger.info('Source tree for for {} is already up to date'.format(job.log_format))
needs_sync = False
# Galaxy requirements are not supported for manual projects
if not needs_sync and job.project.scm_type:
# see if we need a sync because of presence of roles
galaxy_req_path = os.path.join(project_path, 'roles', 'requirements.yml')
if os.path.exists(galaxy_req_path):
logger.debug('Running project sync for {} because of galaxy role requirements.'.format(job.log_format))
needs_sync = True
if needs_sync:
pu_ig = job.instance_group pu_ig = job.instance_group
pu_en = job.execution_node pu_en = job.execution_node
if job.is_isolated() is True: if job.is_isolated() is True:
pu_ig = pu_ig.controller pu_ig = pu_ig.controller
pu_en = settings.CLUSTER_HOST_ID pu_en = settings.CLUSTER_HOST_ID
if job.project.status in ('error', 'failed'): sync_metafields = dict(
msg = _( launch_type="sync",
'The project revision for this job template is unknown due to a failed update.' job_type='run',
) status='running',
job = self.update_model(job.pk, status='failed', job_explanation=msg) instance_group = pu_ig,
raise RuntimeError(msg) execution_node=pu_en,
local_project_sync = job.project.create_project_update( celery_task_id=job.celery_task_id
_eager_fields=dict( )
launch_type="sync", if job.scm_branch and job.scm_branch != job.project.scm_branch:
job_type='run', sync_metafields['scm_branch'] = job.scm_branch
status='running', local_project_sync = job.project.create_project_update(_eager_fields=sync_metafields)
instance_group = pu_ig,
execution_node=pu_en,
celery_task_id=job.celery_task_id))
# save the associated job before calling run() so that a # save the associated job before calling run() so that a
# cancel() call on the job can cancel the project update # cancel() call on the job can cancel the project update
job = self.update_model(job.pk, project_update=local_project_sync) job = self.update_model(job.pk, project_update=local_project_sync)
# Save the roles from galaxy to a temporary directory to be moved later
# at this point, the project folder has not yet been coppied into the temporary directory
galaxy_install_path = tempfile.mkdtemp(prefix='tmp_roles_', dir=private_data_dir)
os.chmod(galaxy_install_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
project_update_task = local_project_sync._get_task_class() project_update_task = local_project_sync._get_task_class()
try: try:
project_update_task().run(local_project_sync.id) sync_task = project_update_task(roles_destination=galaxy_install_path)
job = self.update_model(job.pk, scm_revision=job.project.scm_revision) sync_task.run(local_project_sync.id)
# if job overrided the branch, we need to find the revision that will be ran
if job.scm_branch and job.scm_branch != job.project.scm_branch:
# TODO: handle case of non-git
if job.project.scm_type == 'git':
git_repo = git.Repo(project_path)
try:
commit = git_repo.commit(job.scm_branch)
job_revision = commit.hexsha
logger.debug('Evaluated {} to be a valid commit for {}'.format(job.scm_branch, job.log_format))
except (ValueError, BadGitName):
# not a commit, see if it is a ref
try:
user_branch = getattr(git_repo.refs, job.scm_branch)
job_revision = user_branch.commit.hexsha
logger.debug('Evaluated {} to be a valid ref for {}'.format(job.scm_branch, job.log_format))
except git.exc.NoSuchPathError as exc:
raise RuntimeError('Could not find specified version {}, error: {}'.format(
job.scm_branch, exc
))
else:
job_revision = sync_task.updated_revision
job = self.update_model(job.pk, scm_revision=job_revision)
except Exception: except Exception:
local_project_sync.refresh_from_db() local_project_sync.refresh_from_db()
if local_project_sync.status != 'canceled': if local_project_sync.status != 'canceled':
@@ -1627,6 +1711,31 @@ class RunJob(BaseTask):
job_explanation=('Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % job_explanation=('Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' %
('project_update', local_project_sync.name, local_project_sync.id))) ('project_update', local_project_sync.name, local_project_sync.id)))
raise raise
else:
# Case where a local sync is not needed, meaning that local tree is
# up-to-date with project, job is running project current version
if job_revision:
job = self.update_model(job.pk, scm_revision=job_revision)
# copy the project directory
runner_project_folder = os.path.join(private_data_dir, 'project')
if job.project.scm_type == 'git':
git_repo = git.Repo(project_path)
if not os.path.exists(runner_project_folder):
os.mkdir(runner_project_folder)
tmp_branch_name = 'awx_internal/{}'.format(uuid4())
# always clone based on specific job revision
source_branch = git_repo.create_head(tmp_branch_name, job.scm_revision)
git_repo.clone(runner_project_folder, branch=source_branch, depth=1, single_branch=True)
# force option is necessary because remote refs are not counted, although no information is lost
git_repo.delete_head(tmp_branch_name, force=True)
else:
copy_tree(project_path, runner_project_folder)
if galaxy_install_path and os.listdir(galaxy_install_path):
logger.debug('Copying galaxy roles for {} to tmp directory'.format(job.log_format))
galaxy_run_path = os.path.join(private_data_dir, 'project', 'roles')
copy_tree(galaxy_install_path, galaxy_run_path)
if job.inventory.kind == 'smart': if job.inventory.kind == 'smart':
# cache smart inventory memberships so that the host_filter query is not # cache smart inventory memberships so that the host_filter query is not
# ran inside of the event saving code # ran inside of the event saving code
@@ -1663,7 +1772,23 @@ class RunProjectUpdate(BaseTask):
@property @property
def proot_show_paths(self): def proot_show_paths(self):
return [settings.PROJECTS_ROOT] show_paths = [settings.PROJECTS_ROOT]
if self.roles_destination:
show_paths.append(self.roles_destination)
return show_paths
def __init__(self, *args, roles_destination=None, **kwargs):
super(RunProjectUpdate, self).__init__(*args, **kwargs)
self.updated_revision = None
self.roles_destination = roles_destination
def event_handler(self, event_data):
super(RunProjectUpdate, self).event_handler(event_data)
returned_data = event_data.get('event_data', {})
if returned_data.get('task_action', '') == 'set_fact':
returned_facts = returned_data.get('res', {}).get('ansible_facts', {})
if 'scm_version' in returned_facts:
self.updated_revision = returned_facts['scm_version']
def build_private_data(self, project_update, private_data_dir): def build_private_data(self, project_update, private_data_dir):
''' '''
@@ -1678,9 +1803,6 @@ class RunProjectUpdate(BaseTask):
} }
} }
''' '''
handle, self.revision_path = tempfile.mkstemp(dir=settings.PROJECTS_ROOT)
if settings.AWX_CLEANUP_PATHS:
self.cleanup_paths.append(self.revision_path)
private_data = {'credentials': {}} private_data = {'credentials': {}}
if project_update.credential: if project_update.credential:
credential = project_update.credential credential = project_update.credential
@@ -1781,7 +1903,7 @@ class RunProjectUpdate(BaseTask):
scm_url, extra_vars_new = self._build_scm_url_extra_vars(project_update) scm_url, extra_vars_new = self._build_scm_url_extra_vars(project_update)
extra_vars.update(extra_vars_new) extra_vars.update(extra_vars_new)
if project_update.project.scm_revision and project_update.job_type == 'run': if project_update.project.scm_revision and project_update.job_type == 'run' and not project_update.project.allow_override:
scm_branch = project_update.project.scm_revision scm_branch = project_update.project.scm_revision
else: else:
scm_branch = project_update.scm_branch or {'hg': 'tip'}.get(project_update.scm_type, 'HEAD') scm_branch = project_update.scm_branch or {'hg': 'tip'}.get(project_update.scm_type, 'HEAD')
@@ -1796,17 +1918,21 @@ class RunProjectUpdate(BaseTask):
'scm_clean': project_update.scm_clean, 'scm_clean': project_update.scm_clean,
'scm_delete_on_update': project_update.scm_delete_on_update if project_update.job_type == 'check' else False, 'scm_delete_on_update': project_update.scm_delete_on_update if project_update.job_type == 'check' else False,
'scm_full_checkout': True if project_update.job_type == 'run' else False, 'scm_full_checkout': True if project_update.job_type == 'run' else False,
'scm_revision_output': self.revision_path,
'scm_revision': project_update.project.scm_revision, 'scm_revision': project_update.project.scm_revision,
'roles_enabled': getattr(settings, 'AWX_ROLES_ENABLED', True) 'roles_enabled': getattr(settings, 'AWX_ROLES_ENABLED', True) if project_update.job_type != 'check' else False
}) })
if project_update.project.allow_override:
# If branch is override-able, do extra fetch for all branches
# coming feature TODO: obtain custom refspec from user for PR refs and the like
extra_vars['git_refspec'] = 'refs/heads/*:refs/remotes/origin/*'
if self.roles_destination:
extra_vars['roles_destination'] = self.roles_destination
self._write_extra_vars_file(private_data_dir, extra_vars) self._write_extra_vars_file(private_data_dir, extra_vars)
def build_cwd(self, project_update, private_data_dir): def build_cwd(self, project_update, private_data_dir):
return self.get_path_to('..', 'playbooks') return self.get_path_to('..', 'playbooks')
def build_playbook_path_relative_to_cwd(self, project_update, private_data_dir): def build_playbook_path_relative_to_cwd(self, project_update, private_data_dir):
self.build_cwd(project_update, private_data_dir)
return os.path.join('project_update.yml') return os.path.join('project_update.yml')
def get_password_prompts(self, passwords={}): def get_password_prompts(self, passwords={}):
@@ -1920,7 +2046,7 @@ class RunProjectUpdate(BaseTask):
'{} spent {} waiting to acquire lock for local source tree ' '{} spent {} waiting to acquire lock for local source tree '
'for path {}.'.format(instance.log_format, waiting_time, lock_path)) 'for path {}.'.format(instance.log_format, waiting_time, lock_path))
def pre_run_hook(self, instance): def pre_run_hook(self, instance, private_data_dir):
# re-create root project folder if a natural disaster has destroyed it # re-create root project folder if a natural disaster has destroyed it
if not os.path.exists(settings.PROJECTS_ROOT): if not os.path.exists(settings.PROJECTS_ROOT):
os.mkdir(settings.PROJECTS_ROOT) os.mkdir(settings.PROJECTS_ROOT)
@@ -1930,10 +2056,8 @@ class RunProjectUpdate(BaseTask):
self.release_lock(instance) self.release_lock(instance)
p = instance.project p = instance.project
if instance.job_type == 'check' and status not in ('failed', 'canceled',): if instance.job_type == 'check' and status not in ('failed', 'canceled',):
fd = open(self.revision_path, 'r') if self.updated_revision:
lines = fd.readlines() p.scm_revision = self.updated_revision
if lines:
p.scm_revision = lines[0].strip()
else: else:
logger.info("{} Could not find scm revision in check".format(instance.log_format)) logger.info("{} Could not find scm revision in check".format(instance.log_format))
p.playbook_files = p.playbooks p.playbook_files = p.playbooks
@@ -2159,11 +2283,12 @@ class RunInventoryUpdate(BaseTask):
# All credentials not used by inventory source injector # All credentials not used by inventory source injector
return inventory_update.get_extra_credentials() return inventory_update.get_extra_credentials()
def pre_run_hook(self, inventory_update): def pre_run_hook(self, inventory_update, private_data_dir):
source_project = None source_project = None
if inventory_update.inventory_source: if inventory_update.inventory_source:
source_project = inventory_update.inventory_source.source_project source_project = inventory_update.inventory_source.source_project
if (inventory_update.source=='scm' and inventory_update.launch_type!='scm' and source_project): if (inventory_update.source=='scm' and inventory_update.launch_type!='scm' and source_project):
# In project sync, pulling galaxy roles is not needed
local_project_sync = source_project.create_project_update( local_project_sync = source_project.create_project_update(
_eager_fields=dict( _eager_fields=dict(
launch_type="sync", launch_type="sync",

View File

@@ -361,12 +361,13 @@ class TestExtraVarSanitation(TestJobExecution):
class TestGenericRun(): class TestGenericRun():
def test_generic_failure(self, patch_Job): def test_generic_failure(self, patch_Job):
job = Job(status='running', inventory=Inventory()) job = Job(status='running', inventory=Inventory(), project=Project())
job.websocket_emit_status = mock.Mock() job.websocket_emit_status = mock.Mock()
task = tasks.RunJob() task = tasks.RunJob()
task.update_model = mock.Mock(return_value=job) task.update_model = mock.Mock(return_value=job)
task.build_private_data_files = mock.Mock(side_effect=OSError()) task.build_private_data_files = mock.Mock(side_effect=OSError())
task.copy_folders = mock.Mock()
with pytest.raises(Exception): with pytest.raises(Exception):
task.run(1) task.run(1)
@@ -385,6 +386,7 @@ class TestGenericRun():
task = tasks.RunJob() task = tasks.RunJob()
task.update_model = mock.Mock(wraps=update_model_wrapper) task.update_model = mock.Mock(wraps=update_model_wrapper)
task.build_private_data_files = mock.Mock() task.build_private_data_files = mock.Mock()
task.copy_folders = mock.Mock()
with pytest.raises(Exception): with pytest.raises(Exception):
task.run(1) task.run(1)

View File

@@ -12,8 +12,9 @@
# scm_password: password (only for svn/insights) # scm_password: password (only for svn/insights)
# scm_accept_hostkey: true/false (only for git, set automatically) # scm_accept_hostkey: true/false (only for git, set automatically)
# scm_revision: current revision in tower # scm_revision: current revision in tower
# scm_revision_output: where to store gathered revision (temporary file) # git_refspec: a refspec to fetch in addition to obtaining version
# roles_enabled: Allow us to pull roles from a requirements.yml file # roles_enabled: Allow us to pull roles from a requirements.yml file
# roles_destination: Path to save roles from galaxy to
# awx_version: Current running version of the awx or tower as a string # awx_version: Current running version of the awx or tower as a string
# awx_license_type: "open" for AWX; else presume Tower # awx_license_type: "open" for AWX; else presume Tower
@@ -29,27 +30,12 @@
delegate_to: localhost delegate_to: localhost
- block: - block:
- name: check repo using git
git:
dest: "{{project_path|quote}}"
repo: "{{scm_url}}"
version: "{{scm_branch|quote}}"
force: "{{scm_clean}}"
update: false
clone: false
register: repo_check
when: scm_full_checkout|default('')
ignore_errors: true
- name: break if already checked out
meta: end_play
when: scm_full_checkout|default('') and repo_check is succeeded and repo_check.before == scm_branch
- name: update project using git - name: update project using git
git: git:
dest: "{{project_path|quote}}" dest: "{{project_path|quote}}"
repo: "{{scm_url}}" repo: "{{scm_url}}"
version: "{{scm_branch|quote}}" version: "{{scm_branch|quote}}"
refspec: "{{git_refspec|default(omit)}}"
force: "{{scm_clean}}" force: "{{scm_clean}}"
accept_hostkey: "{{scm_accept_hostkey|default(omit)}}" accept_hostkey: "{{scm_accept_hostkey|default(omit)}}"
register: git_result register: git_result
@@ -131,13 +117,6 @@
debug: msg="Repository Version {{ scm_version }}" debug: msg="Repository Version {{ scm_version }}"
when: scm_version is defined when: scm_version is defined
- name: Write Repository Version
copy:
dest: "{{ scm_revision_output }}"
content: "{{ scm_version }}"
when: scm_version is defined and scm_revision_output is defined
delegate_to: localhost
- hosts: all - hosts: all
gather_facts: false gather_facts: false
tasks: tasks:
@@ -148,18 +127,12 @@
register: doesRequirementsExist register: doesRequirementsExist
- name: fetch galaxy roles from requirements.yml - name: fetch galaxy roles from requirements.yml
command: ansible-galaxy install -r requirements.yml -p {{project_path|quote}}/roles/ command: ansible-galaxy install -r requirements.yml -p {{roles_destination|quote}}
args: args:
chdir: "{{project_path|quote}}/roles" chdir: "{{project_path|quote}}/roles"
register: galaxy_result register: galaxy_result
when: doesRequirementsExist.stat.exists and (scm_version is undefined or (git_result is not skipped and git_result['before'] == git_result['after'])) when: doesRequirementsExist.stat.exists
changed_when: "'was installed successfully' in galaxy_result.stdout" changed_when: "'was installed successfully' in galaxy_result.stdout"
- name: fetch galaxy roles from requirements.yml (forced update)
command: ansible-galaxy install -r requirements.yml -p {{project_path|quote}}/roles/ --force
args:
chdir: "{{project_path|quote}}/roles"
when: doesRequirementsExist.stat.exists and galaxy_result is skipped
when: roles_enabled|bool when: roles_enabled|bool
delegate_to: localhost delegate_to: localhost

View File

@@ -0,0 +1,30 @@
Copyright (C) 2008, 2009 Michael Trier and contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the GitPython project nor the names of
its contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

42
docs/licenses/gitdb2.txt Normal file
View File

@@ -0,0 +1,42 @@
Copyright (C) 2010, 2011 Sebastian Thiel and contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the GitDB project nor the names of
its contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Additional Licenses
-------------------
The files at
gitdb/test/fixtures/packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.idx
and
gitdb/test/fixtures/packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.pack
are licensed under GNU GPL as part of the git source repository,
see http://en.wikipedia.org/wiki/Git_%28software%29 for more information.
They are not required for the actual operation, which is why they are not found
in the distribution package.

30
docs/licenses/smmap2.txt Normal file
View File

@@ -0,0 +1,30 @@
Copyright (C) 2010, 2011 Sebastian Thiel and contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the async project nor the names of
its contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@@ -22,6 +22,7 @@ django-split-settings==0.3.0
django-taggit==0.22.2 django-taggit==0.22.2
djangorestframework==3.9.4 djangorestframework==3.9.4
djangorestframework-yaml==1.0.3 djangorestframework-yaml==1.0.3
GitPython==2.1.11
irc==16.2 irc==16.2
jinja2==2.10.1 jinja2==2.10.1
jsonschema==2.6.0 jsonschema==2.6.0

View File

@@ -40,6 +40,8 @@ djangorestframework-yaml==1.0.3
djangorestframework==3.9.4 djangorestframework==3.9.4
future==0.16.0 # via django-radius future==0.16.0 # via django-radius
gitdb2==2.0.5 # via gitpython
gitpython==2.1.11
hyperlink==19.0.0 # via twisted hyperlink==19.0.0 # via twisted
idna==2.8 # via hyperlink, requests, twisted idna==2.8 # via hyperlink, requests, twisted
incremental==17.5.0 # via twisted incremental==17.5.0 # via twisted
@@ -100,6 +102,7 @@ service-identity==18.1.0 # via twisted
simplejson==3.16.0 # via uwsgitop simplejson==3.16.0 # via uwsgitop
six==1.12.0 # via ansible-runner, asgi-amqp, asgiref, autobahn, automat, cryptography, django-extensions, irc, isodate, jaraco.classes, jaraco.collections, jaraco.itertools, jaraco.logging, jaraco.stream, pygerduty, pyhamcrest, pyopenssl, pyrad, python-dateutil, python-memcached, slackclient, social-auth-app-django, social-auth-core, tacacs-plus, tempora, twilio, txaio, websocket-client six==1.12.0 # via ansible-runner, asgi-amqp, asgiref, autobahn, automat, cryptography, django-extensions, irc, isodate, jaraco.classes, jaraco.collections, jaraco.itertools, jaraco.logging, jaraco.stream, pygerduty, pyhamcrest, pyopenssl, pyrad, python-dateutil, python-memcached, slackclient, social-auth-app-django, social-auth-core, tacacs-plus, tempora, twilio, txaio, websocket-client
slackclient==1.1.2 slackclient==1.1.2
smmap2==2.0.5 # via gitdb2
social-auth-app-django==2.1.0 social-auth-app-django==2.1.0
social-auth-core==3.0.0 social-auth-core==3.0.0
sqlparse==0.3.0 # via django sqlparse==0.3.0 # via django