diff --git a/awx/api/serializers.py b/awx/api/serializers.py index fdddc9ba22..562ab99db4 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -1338,7 +1338,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer): class Meta: model = Project fields = ('*', 'organization', 'scm_update_on_launch', - 'scm_update_cache_timeout', 'scm_revision', 'custom_virtualenv',) + \ + 'scm_update_cache_timeout', 'scm_revision', 'allow_override', 'custom_virtualenv',) + \ ('last_update_failed', 'last_updated') # Backwards compatibility def get_related(self, obj): @@ -2701,7 +2701,7 @@ class LabelsListMixin(object): class JobOptionsSerializer(LabelsListMixin, BaseSerializer): class Meta: - fields = ('*', 'job_type', 'inventory', 'project', 'playbook', + fields = ('*', 'job_type', 'inventory', 'project', 'playbook', 'scm_branch', 'forks', 'limit', 'verbosity', 'extra_vars', 'job_tags', 'force_handlers', 'skip_tags', 'start_at_task', 'timeout', 'use_fact_cache',) @@ -2752,9 +2752,14 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer): playbook = attrs.get('playbook', self.instance and self.instance.playbook or '') if not project: raise serializers.ValidationError({'project': _('This field is required.')}) - if project and project.scm_type and playbook and force_text(playbook) not in project.playbook_files: - raise serializers.ValidationError({'playbook': _('Playbook not found for project.')}) - if project and not project.scm_type and playbook and force_text(playbook) not in project.playbooks: + playbook_not_found = bool( + ( + project and project.scm_type and (not project.allow_override) and + playbook and force_text(playbook) not in project.playbook_files + ) or + (project and not project.scm_type and playbook and force_text(playbook) not in project.playbooks) # manual + ) + if playbook_not_found: raise serializers.ValidationError({'playbook': _('Playbook not found for project.')}) if project and not playbook: raise serializers.ValidationError({'playbook': _('Must select playbook for project.')}) @@ -2799,7 +2804,8 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO class Meta: model = JobTemplate - fields = ('*', 'host_config_key', 'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch', + fields = ('*', 'host_config_key', 'ask_scm_branch_on_launch', 'ask_diff_mode_on_launch', 'ask_variables_on_launch', + 'ask_limit_on_launch', 'ask_tags_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch', 'survey_enabled', 'become_enabled', 'diff_mode', 'allow_simultaneous', 'custom_virtualenv', 'job_slice_count') @@ -3365,6 +3371,7 @@ class WorkflowJobCancelSerializer(WorkflowJobSerializer): class LaunchConfigurationBaseSerializer(BaseSerializer): + scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) job_type = serializers.ChoiceField(allow_blank=True, allow_null=True, required=False, default=None, choices=NEW_JOB_TYPE_CHOICES) job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) @@ -3377,7 +3384,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer): class Meta: fields = ('*', 'extra_data', 'inventory', # Saved launch-time config fields - 'job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags', 'diff_mode', 'verbosity') + 'scm_branch', 'job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags', 'diff_mode', 'verbosity') def get_related(self, obj): res = super(LaunchConfigurationBaseSerializer, self).get_related(obj) @@ -3960,6 +3967,7 @@ class JobLaunchSerializer(BaseSerializer): required=False, write_only=True ) credential_passwords = VerbatimField(required=False, write_only=True) + scm_branch = serializers.CharField(required=False, write_only=True, allow_blank=True) diff_mode = serializers.BooleanField(required=False, write_only=True) job_tags = serializers.CharField(required=False, write_only=True, allow_blank=True) job_type = serializers.ChoiceField(required=False, choices=NEW_JOB_TYPE_CHOICES, write_only=True) @@ -3970,13 +3978,15 @@ class JobLaunchSerializer(BaseSerializer): class Meta: model = JobTemplate fields = ('can_start_without_user_input', 'passwords_needed_to_start', - 'extra_vars', 'inventory', 'limit', 'job_tags', 'skip_tags', 'job_type', 'verbosity', 'diff_mode', - 'credentials', 'credential_passwords', 'ask_variables_on_launch', 'ask_tags_on_launch', + 'extra_vars', 'inventory', 'scm_branch', 'limit', 'job_tags', 'skip_tags', 'job_type', 'verbosity', 'diff_mode', + 'credentials', 'credential_passwords', + 'ask_scm_branch_on_launch', 'ask_variables_on_launch', 'ask_tags_on_launch', 'ask_diff_mode_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_limit_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch', 'survey_enabled', 'variables_needed_to_start', 'credential_needed_to_start', 'inventory_needed_to_start', 'job_template_data', 'defaults', 'verbosity') read_only_fields = ( + 'ask_scm_branch_on_launch', 'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch',) diff --git a/awx/main/migrations/0082_v360_job_branch_overrirde.py b/awx/main/migrations/0082_v360_job_branch_overrirde.py new file mode 100644 index 0000000000..66a561ff1f --- /dev/null +++ b/awx/main/migrations/0082_v360_job_branch_overrirde.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.20 on 2019-06-14 15:08 +from __future__ import unicode_literals + +import awx.main.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0081_v360_notify_on_start'), + ] + + operations = [ + migrations.AddField( + model_name='job', + name='scm_branch', + field=models.CharField(blank=True, default='', help_text='Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.', max_length=1024), + ), + migrations.AddField( + model_name='jobtemplate', + name='ask_scm_branch_on_launch', + field=awx.main.fields.AskForField(default=False), + ), + migrations.AddField( + model_name='jobtemplate', + name='scm_branch', + field=models.CharField(blank=True, default='', help_text='Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.', max_length=1024), + ), + migrations.AddField( + model_name='project', + name='allow_override', + field=models.BooleanField(default=False, help_text='Allow changing the SCM branch or revision in a job template that uses this project.'), + ), + migrations.AlterField( + model_name='project', + name='scm_update_cache_timeout', + field=models.PositiveIntegerField(blank=True, default=0, help_text='The number of seconds after the last project update ran that a new project update will be launched as a job dependency.'), + ), + ] diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 12c691d195..cea3ddc4dc 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -96,6 +96,13 @@ class JobOptions(BaseModel): default='', blank=True, ) + scm_branch = models.CharField( + max_length=1024, + default='', + blank=True, + help_text=_('Branch to use in job run. Project default used if blank. ' + 'Only allowed if project allow_override field is set to true.'), + ) forks = models.PositiveIntegerField( blank=True, default=0, @@ -234,6 +241,11 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour default=False, allows_field='credentials' ) + ask_scm_branch_on_launch = AskForField( + blank=True, + default=False, + allows_field='scm_branch' + ) job_slice_count = models.PositiveIntegerField( blank=True, default=1, diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index c86b08421d..0ba25e7325 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -261,9 +261,14 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn scm_update_cache_timeout = models.PositiveIntegerField( default=0, blank=True, - help_text=_('The number of seconds after the last project update ran that a new' + help_text=_('The number of seconds after the last project update ran that a new ' 'project update will be launched as a job dependency.'), ) + allow_override = models.BooleanField( + default=False, + help_text=_('Allow changing the SCM branch or revision in a job template ' + 'that uses this project.'), + ) scm_revision = models.CharField( max_length=1024, diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 0a41380d03..9dfbee9e95 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -20,6 +20,8 @@ from distutils.dir_util import copy_tree from distutils.version import LooseVersion as Version import yaml import fcntl +from pathlib import Path +from uuid import uuid4 try: import psutil except Exception: @@ -41,6 +43,10 @@ from django.core.exceptions import ObjectDoesNotExist # Django-CRUM from crum import impersonate +# GitPython +import git +from gitdb.exc import BadName as BadGitName + # Runner import ansible_runner @@ -694,9 +700,12 @@ class BaseTask(object): model = None event_model = None abstract = True - cleanup_paths = [] proot_show_paths = [] + def __init__(self, *args, **kwargs): + super(BaseTask, self).__init__(*args, **kwargs) + self.cleanup_paths = [] + def update_model(self, pk, _attempt=0, **updates): """Reload the model instance from the database and update the given fields. @@ -769,9 +778,11 @@ class BaseTask(object): os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) if settings.AWX_CLEANUP_PATHS: self.cleanup_paths.append(path) - # Ansible Runner requires that this directory exists. - # Specifically, when using process isolation - os.mkdir(os.path.join(path, 'project')) + runner_project_folder = os.path.join(path, 'project') + if not os.path.exists(runner_project_folder): + # Ansible Runner requires that this directory exists. + # Specifically, when using process isolation + os.mkdir(runner_project_folder) return path def build_private_data_files(self, instance, private_data_dir): @@ -860,7 +871,10 @@ class BaseTask(object): ''' process_isolation_params = dict() if self.should_use_proot(instance): - show_paths = self.proot_show_paths + [private_data_dir, cwd] + \ + local_paths = [private_data_dir] + if cwd != private_data_dir and Path(private_data_dir) not in Path(cwd).parents: + local_paths.append(cwd) + show_paths = self.proot_show_paths + local_paths + \ settings.AWX_PROOT_SHOW_PATHS # Help the user out by including the collections path inside the bubblewrap environment @@ -1030,7 +1044,7 @@ class BaseTask(object): expect_passwords[k] = passwords.get(v, '') or '' return expect_passwords - def pre_run_hook(self, instance): + def pre_run_hook(self, instance, private_data_dir): ''' Hook for any steps to run before the job/task starts ''' @@ -1157,7 +1171,8 @@ class BaseTask(object): try: isolated = self.instance.is_isolated() self.instance.send_notification_templates("running") - self.pre_run_hook(self.instance) + private_data_dir = self.build_private_data_dir(self.instance) + self.pre_run_hook(self.instance, private_data_dir) if self.instance.cancel_flag: self.instance = self.update_model(self.instance.pk, status='canceled') if self.instance.status != 'running': @@ -1173,7 +1188,6 @@ class BaseTask(object): # store a record of the venv used at runtime if hasattr(self.instance, 'custom_virtualenv'): self.update_model(pk, custom_virtualenv=getattr(self.instance, 'ansible_virtualenv_path', settings.ANSIBLE_VENV_PATH)) - private_data_dir = self.build_private_data_dir(self.instance) # Fetch "cached" fact data from prior runs and put on the disk # where ansible expects to find it @@ -1256,9 +1270,6 @@ class BaseTask(object): module_args = ansible_runner.utils.args2cmdline( params.get('module_args'), ) - else: - # otherwise, it's a playbook, so copy the project dir - copy_tree(cwd, os.path.join(private_data_dir, 'project')) shutil.move( params.pop('inventory'), os.path.join(private_data_dir, 'inventory') @@ -1532,15 +1543,10 @@ class RunJob(BaseTask): return args def build_cwd(self, job, private_data_dir): - cwd = job.project.get_project_path() - if not cwd: - root = settings.PROJECTS_ROOT - raise RuntimeError('project local_path %s cannot be found in %s' % - (job.project.local_path, root)) - return cwd + return os.path.join(private_data_dir, 'project') def build_playbook_path_relative_to_cwd(self, job, private_data_dir): - return os.path.join(job.playbook) + return job.playbook def build_extra_vars_file(self, job, private_data_dir): # Define special extra_vars for AWX, combine with job.extra_vars. @@ -1587,39 +1593,117 @@ class RunJob(BaseTask): ''' return getattr(settings, 'AWX_PROOT_ENABLED', False) - def pre_run_hook(self, job): + def copy_folders(self, project_path, galaxy_install_path, private_data_dir): + if project_path is None: + raise RuntimeError('project does not supply a valid path') + elif not os.path.exists(project_path): + raise RuntimeError('project path %s cannot be found' % project_path) + runner_project_folder = os.path.join(private_data_dir, 'project') + copy_tree(project_path, runner_project_folder) + if galaxy_install_path: + galaxy_run_path = os.path.join(private_data_dir, 'project', 'roles') + copy_tree(galaxy_install_path, galaxy_run_path) + + def pre_run_hook(self, job, private_data_dir): if job.inventory is None: error = _('Job could not start because it does not have a valid inventory.') self.update_model(job.pk, status='failed', job_explanation=error) raise RuntimeError(error) - if job.project and job.project.scm_type: + elif job.project is None: + error = _('Job could not start because it does not have a valid project.') + self.update_model(job.pk, status='failed', job_explanation=error) + raise RuntimeError(error) + elif job.project.status in ('error', 'failed'): + msg = _( + 'The project revision for this job template is unknown due to a failed update.' + ) + job = self.update_model(job.pk, status='failed', job_explanation=msg) + raise RuntimeError(msg) + + galaxy_install_path = None + git_repo = None + project_path = job.project.get_project_path(check_if_exists=False) + job_revision = job.project.scm_revision + needs_sync = True + if not job.project.scm_type: + # manual projects are not synced, user has responsibility for that + needs_sync = False + elif not os.path.exists(project_path): + logger.debug('Performing fresh clone of {} on this instance.'.format(job.project)) + needs_sync = True + elif job.project.scm_type == 'git': + git_repo = git.Repo(project_path) + if job.scm_branch and job.scm_branch != job.project.scm_branch and git_repo: + try: + commit = git_repo.commit(job.scm_branch) + job_revision = commit.hexsha + logger.info('Skipping project sync for {} because commit is locally available'.format(job.log_format)) + needs_sync = False # requested commit is already locally available + except (ValueError, BadGitName): + pass + else: + if git_repo.head.commit.hexsha == job.project.scm_revision: + logger.info('Source tree for for {} is already up to date'.format(job.log_format)) + needs_sync = False + # Galaxy requirements are not supported for manual projects + if not needs_sync and job.project.scm_type: + # see if we need a sync because of presence of roles + galaxy_req_path = os.path.join(project_path, 'roles', 'requirements.yml') + if os.path.exists(galaxy_req_path): + logger.debug('Running project sync for {} because of galaxy role requirements.'.format(job.log_format)) + needs_sync = True + + if needs_sync: pu_ig = job.instance_group pu_en = job.execution_node if job.is_isolated() is True: pu_ig = pu_ig.controller pu_en = settings.CLUSTER_HOST_ID - if job.project.status in ('error', 'failed'): - msg = _( - 'The project revision for this job template is unknown due to a failed update.' - ) - job = self.update_model(job.pk, status='failed', job_explanation=msg) - raise RuntimeError(msg) - local_project_sync = job.project.create_project_update( - _eager_fields=dict( - launch_type="sync", - job_type='run', - status='running', - instance_group = pu_ig, - execution_node=pu_en, - celery_task_id=job.celery_task_id)) + sync_metafields = dict( + launch_type="sync", + job_type='run', + status='running', + instance_group = pu_ig, + execution_node=pu_en, + celery_task_id=job.celery_task_id + ) + if job.scm_branch and job.scm_branch != job.project.scm_branch: + sync_metafields['scm_branch'] = job.scm_branch + local_project_sync = job.project.create_project_update(_eager_fields=sync_metafields) # save the associated job before calling run() so that a # cancel() call on the job can cancel the project update job = self.update_model(job.pk, project_update=local_project_sync) + # Save the roles from galaxy to a temporary directory to be moved later + # at this point, the project folder has not yet been coppied into the temporary directory + galaxy_install_path = tempfile.mkdtemp(prefix='tmp_roles_', dir=private_data_dir) + os.chmod(galaxy_install_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) project_update_task = local_project_sync._get_task_class() try: - project_update_task().run(local_project_sync.id) - job = self.update_model(job.pk, scm_revision=job.project.scm_revision) + sync_task = project_update_task(roles_destination=galaxy_install_path) + sync_task.run(local_project_sync.id) + # if job overrided the branch, we need to find the revision that will be ran + if job.scm_branch and job.scm_branch != job.project.scm_branch: + # TODO: handle case of non-git + if job.project.scm_type == 'git': + git_repo = git.Repo(project_path) + try: + commit = git_repo.commit(job.scm_branch) + job_revision = commit.hexsha + logger.debug('Evaluated {} to be a valid commit for {}'.format(job.scm_branch, job.log_format)) + except (ValueError, BadGitName): + # not a commit, see if it is a ref + try: + user_branch = getattr(git_repo.refs, job.scm_branch) + job_revision = user_branch.commit.hexsha + logger.debug('Evaluated {} to be a valid ref for {}'.format(job.scm_branch, job.log_format)) + except git.exc.NoSuchPathError as exc: + raise RuntimeError('Could not find specified version {}, error: {}'.format( + job.scm_branch, exc + )) + else: + job_revision = sync_task.updated_revision + job = self.update_model(job.pk, scm_revision=job_revision) except Exception: local_project_sync.refresh_from_db() if local_project_sync.status != 'canceled': @@ -1627,6 +1711,31 @@ class RunJob(BaseTask): job_explanation=('Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % ('project_update', local_project_sync.name, local_project_sync.id))) raise + else: + # Case where a local sync is not needed, meaning that local tree is + # up-to-date with project, job is running project current version + if job_revision: + job = self.update_model(job.pk, scm_revision=job_revision) + + # copy the project directory + runner_project_folder = os.path.join(private_data_dir, 'project') + if job.project.scm_type == 'git': + git_repo = git.Repo(project_path) + if not os.path.exists(runner_project_folder): + os.mkdir(runner_project_folder) + tmp_branch_name = 'awx_internal/{}'.format(uuid4()) + # always clone based on specific job revision + source_branch = git_repo.create_head(tmp_branch_name, job.scm_revision) + git_repo.clone(runner_project_folder, branch=source_branch, depth=1, single_branch=True) + # force option is necessary because remote refs are not counted, although no information is lost + git_repo.delete_head(tmp_branch_name, force=True) + else: + copy_tree(project_path, runner_project_folder) + if galaxy_install_path and os.listdir(galaxy_install_path): + logger.debug('Copying galaxy roles for {} to tmp directory'.format(job.log_format)) + galaxy_run_path = os.path.join(private_data_dir, 'project', 'roles') + copy_tree(galaxy_install_path, galaxy_run_path) + if job.inventory.kind == 'smart': # cache smart inventory memberships so that the host_filter query is not # ran inside of the event saving code @@ -1663,7 +1772,23 @@ class RunProjectUpdate(BaseTask): @property def proot_show_paths(self): - return [settings.PROJECTS_ROOT] + show_paths = [settings.PROJECTS_ROOT] + if self.roles_destination: + show_paths.append(self.roles_destination) + return show_paths + + def __init__(self, *args, roles_destination=None, **kwargs): + super(RunProjectUpdate, self).__init__(*args, **kwargs) + self.updated_revision = None + self.roles_destination = roles_destination + + def event_handler(self, event_data): + super(RunProjectUpdate, self).event_handler(event_data) + returned_data = event_data.get('event_data', {}) + if returned_data.get('task_action', '') == 'set_fact': + returned_facts = returned_data.get('res', {}).get('ansible_facts', {}) + if 'scm_version' in returned_facts: + self.updated_revision = returned_facts['scm_version'] def build_private_data(self, project_update, private_data_dir): ''' @@ -1678,9 +1803,6 @@ class RunProjectUpdate(BaseTask): } } ''' - handle, self.revision_path = tempfile.mkstemp(dir=settings.PROJECTS_ROOT) - if settings.AWX_CLEANUP_PATHS: - self.cleanup_paths.append(self.revision_path) private_data = {'credentials': {}} if project_update.credential: credential = project_update.credential @@ -1781,7 +1903,7 @@ class RunProjectUpdate(BaseTask): scm_url, extra_vars_new = self._build_scm_url_extra_vars(project_update) extra_vars.update(extra_vars_new) - if project_update.project.scm_revision and project_update.job_type == 'run': + if project_update.project.scm_revision and project_update.job_type == 'run' and not project_update.project.allow_override: scm_branch = project_update.project.scm_revision else: scm_branch = project_update.scm_branch or {'hg': 'tip'}.get(project_update.scm_type, 'HEAD') @@ -1796,17 +1918,21 @@ class RunProjectUpdate(BaseTask): 'scm_clean': project_update.scm_clean, 'scm_delete_on_update': project_update.scm_delete_on_update if project_update.job_type == 'check' else False, 'scm_full_checkout': True if project_update.job_type == 'run' else False, - 'scm_revision_output': self.revision_path, 'scm_revision': project_update.project.scm_revision, - 'roles_enabled': getattr(settings, 'AWX_ROLES_ENABLED', True) + 'roles_enabled': getattr(settings, 'AWX_ROLES_ENABLED', True) if project_update.job_type != 'check' else False }) + if project_update.project.allow_override: + # If branch is override-able, do extra fetch for all branches + # coming feature TODO: obtain custom refspec from user for PR refs and the like + extra_vars['git_refspec'] = 'refs/heads/*:refs/remotes/origin/*' + if self.roles_destination: + extra_vars['roles_destination'] = self.roles_destination self._write_extra_vars_file(private_data_dir, extra_vars) def build_cwd(self, project_update, private_data_dir): return self.get_path_to('..', 'playbooks') def build_playbook_path_relative_to_cwd(self, project_update, private_data_dir): - self.build_cwd(project_update, private_data_dir) return os.path.join('project_update.yml') def get_password_prompts(self, passwords={}): @@ -1920,7 +2046,7 @@ class RunProjectUpdate(BaseTask): '{} spent {} waiting to acquire lock for local source tree ' 'for path {}.'.format(instance.log_format, waiting_time, lock_path)) - def pre_run_hook(self, instance): + def pre_run_hook(self, instance, private_data_dir): # re-create root project folder if a natural disaster has destroyed it if not os.path.exists(settings.PROJECTS_ROOT): os.mkdir(settings.PROJECTS_ROOT) @@ -1930,10 +2056,8 @@ class RunProjectUpdate(BaseTask): self.release_lock(instance) p = instance.project if instance.job_type == 'check' and status not in ('failed', 'canceled',): - fd = open(self.revision_path, 'r') - lines = fd.readlines() - if lines: - p.scm_revision = lines[0].strip() + if self.updated_revision: + p.scm_revision = self.updated_revision else: logger.info("{} Could not find scm revision in check".format(instance.log_format)) p.playbook_files = p.playbooks @@ -2159,11 +2283,12 @@ class RunInventoryUpdate(BaseTask): # All credentials not used by inventory source injector return inventory_update.get_extra_credentials() - def pre_run_hook(self, inventory_update): + def pre_run_hook(self, inventory_update, private_data_dir): source_project = None if inventory_update.inventory_source: source_project = inventory_update.inventory_source.source_project if (inventory_update.source=='scm' and inventory_update.launch_type!='scm' and source_project): + # In project sync, pulling galaxy roles is not needed local_project_sync = source_project.create_project_update( _eager_fields=dict( launch_type="sync", diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index 75d616ac3b..e00e71b907 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -361,12 +361,13 @@ class TestExtraVarSanitation(TestJobExecution): class TestGenericRun(): def test_generic_failure(self, patch_Job): - job = Job(status='running', inventory=Inventory()) + job = Job(status='running', inventory=Inventory(), project=Project()) job.websocket_emit_status = mock.Mock() task = tasks.RunJob() task.update_model = mock.Mock(return_value=job) task.build_private_data_files = mock.Mock(side_effect=OSError()) + task.copy_folders = mock.Mock() with pytest.raises(Exception): task.run(1) @@ -385,6 +386,7 @@ class TestGenericRun(): task = tasks.RunJob() task.update_model = mock.Mock(wraps=update_model_wrapper) task.build_private_data_files = mock.Mock() + task.copy_folders = mock.Mock() with pytest.raises(Exception): task.run(1) diff --git a/awx/playbooks/project_update.yml b/awx/playbooks/project_update.yml index 25cbef5951..0fa776b2c9 100644 --- a/awx/playbooks/project_update.yml +++ b/awx/playbooks/project_update.yml @@ -12,8 +12,9 @@ # scm_password: password (only for svn/insights) # scm_accept_hostkey: true/false (only for git, set automatically) # scm_revision: current revision in tower -# scm_revision_output: where to store gathered revision (temporary file) +# git_refspec: a refspec to fetch in addition to obtaining version # roles_enabled: Allow us to pull roles from a requirements.yml file +# roles_destination: Path to save roles from galaxy to # awx_version: Current running version of the awx or tower as a string # awx_license_type: "open" for AWX; else presume Tower @@ -29,27 +30,12 @@ delegate_to: localhost - block: - - name: check repo using git - git: - dest: "{{project_path|quote}}" - repo: "{{scm_url}}" - version: "{{scm_branch|quote}}" - force: "{{scm_clean}}" - update: false - clone: false - register: repo_check - when: scm_full_checkout|default('') - ignore_errors: true - - - name: break if already checked out - meta: end_play - when: scm_full_checkout|default('') and repo_check is succeeded and repo_check.before == scm_branch - - name: update project using git git: dest: "{{project_path|quote}}" repo: "{{scm_url}}" version: "{{scm_branch|quote}}" + refspec: "{{git_refspec|default(omit)}}" force: "{{scm_clean}}" accept_hostkey: "{{scm_accept_hostkey|default(omit)}}" register: git_result @@ -131,13 +117,6 @@ debug: msg="Repository Version {{ scm_version }}" when: scm_version is defined - - name: Write Repository Version - copy: - dest: "{{ scm_revision_output }}" - content: "{{ scm_version }}" - when: scm_version is defined and scm_revision_output is defined - delegate_to: localhost - - hosts: all gather_facts: false tasks: @@ -148,18 +127,12 @@ register: doesRequirementsExist - name: fetch galaxy roles from requirements.yml - command: ansible-galaxy install -r requirements.yml -p {{project_path|quote}}/roles/ + command: ansible-galaxy install -r requirements.yml -p {{roles_destination|quote}} args: chdir: "{{project_path|quote}}/roles" register: galaxy_result - when: doesRequirementsExist.stat.exists and (scm_version is undefined or (git_result is not skipped and git_result['before'] == git_result['after'])) + when: doesRequirementsExist.stat.exists changed_when: "'was installed successfully' in galaxy_result.stdout" - - name: fetch galaxy roles from requirements.yml (forced update) - command: ansible-galaxy install -r requirements.yml -p {{project_path|quote}}/roles/ --force - args: - chdir: "{{project_path|quote}}/roles" - when: doesRequirementsExist.stat.exists and galaxy_result is skipped - when: roles_enabled|bool delegate_to: localhost diff --git a/docs/licenses/GitPython.txt b/docs/licenses/GitPython.txt new file mode 100644 index 0000000000..5a9a6f8d38 --- /dev/null +++ b/docs/licenses/GitPython.txt @@ -0,0 +1,30 @@ +Copyright (C) 2008, 2009 Michael Trier and contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +* Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +* Neither the name of the GitPython project nor the names of +its contributors may be used to endorse or promote products derived +from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/docs/licenses/gitdb2.txt b/docs/licenses/gitdb2.txt new file mode 100644 index 0000000000..0d6fe8bdb9 --- /dev/null +++ b/docs/licenses/gitdb2.txt @@ -0,0 +1,42 @@ +Copyright (C) 2010, 2011 Sebastian Thiel and contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +* Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +* Neither the name of the GitDB project nor the names of +its contributors may be used to endorse or promote products derived +from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +Additional Licenses +------------------- +The files at +gitdb/test/fixtures/packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.idx +and +gitdb/test/fixtures/packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.pack +are licensed under GNU GPL as part of the git source repository, +see http://en.wikipedia.org/wiki/Git_%28software%29 for more information. + +They are not required for the actual operation, which is why they are not found +in the distribution package. diff --git a/docs/licenses/smmap2.txt b/docs/licenses/smmap2.txt new file mode 100644 index 0000000000..710010f1fe --- /dev/null +++ b/docs/licenses/smmap2.txt @@ -0,0 +1,30 @@ +Copyright (C) 2010, 2011 Sebastian Thiel and contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +* Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +* Neither the name of the async project nor the names of +its contributors may be used to endorse or promote products derived +from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/requirements/requirements.in b/requirements/requirements.in index 5519c0ea9b..5adb3eae41 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -22,6 +22,7 @@ django-split-settings==0.3.0 django-taggit==0.22.2 djangorestframework==3.9.4 djangorestframework-yaml==1.0.3 +GitPython==2.1.11 irc==16.2 jinja2==2.10.1 jsonschema==2.6.0 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 330d75ecd1..5209b57126 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -40,6 +40,8 @@ djangorestframework-yaml==1.0.3 djangorestframework==3.9.4 future==0.16.0 # via django-radius +gitdb2==2.0.5 # via gitpython +gitpython==2.1.11 hyperlink==19.0.0 # via twisted idna==2.8 # via hyperlink, requests, twisted incremental==17.5.0 # via twisted @@ -100,6 +102,7 @@ service-identity==18.1.0 # via twisted simplejson==3.16.0 # via uwsgitop six==1.12.0 # via ansible-runner, asgi-amqp, asgiref, autobahn, automat, cryptography, django-extensions, irc, isodate, jaraco.classes, jaraco.collections, jaraco.itertools, jaraco.logging, jaraco.stream, pygerduty, pyhamcrest, pyopenssl, pyrad, python-dateutil, python-memcached, slackclient, social-auth-app-django, social-auth-core, tacacs-plus, tempora, twilio, txaio, websocket-client slackclient==1.1.2 +smmap2==2.0.5 # via gitdb2 social-auth-app-django==2.1.0 social-auth-core==3.0.0 sqlparse==0.3.0 # via django