Fix bugs with discovery of collection requirements

Addresses some cases where
  collection requirements do not exist
  collection requirements cannot be evaluated

Consolidate logic for roles and collection installs
This commit is contained in:
AlanCoding 2019-07-25 08:22:32 -04:00
parent cc6413c44c
commit 270bd19dbd
No known key found for this signature in database
GPG Key ID: FD2C3C012A72926B
3 changed files with 28 additions and 23 deletions

View File

@ -1475,7 +1475,14 @@ class RunJob(BaseTask):
if authorize:
env['ANSIBLE_NET_AUTH_PASS'] = network_cred.get_input('authorize_password', default='')
env['ANSIBLE_COLLECTIONS_PATHS'] = os.path.join(private_data_dir, 'requirements_collections')
for env_key, folder in (
('ANSIBLE_COLLECTIONS_PATHS', 'requirements_collections'),
('ANSIBLE_ROLES_PATH', 'requirements_roles')):
paths = []
if env_key in env:
paths.append(env[env_key])
paths.append(os.path.join(private_data_dir, folder))
env[env_key] = os.pathsep.join(paths)
return env
@ -1619,7 +1626,7 @@ class RunJob(BaseTask):
needs_sync = False
elif not os.path.exists(project_path):
logger.debug('Performing fresh clone of {} on this instance.'.format(job.project))
elif job.project.scm_revision:
elif not job.project.scm_revision:
logger.debug('Revision not known for {}, will sync with remote'.format(job.project))
elif job.project.scm_type == 'git':
git_repo = git.Repo(project_path)
@ -1627,10 +1634,11 @@ class RunJob(BaseTask):
desired_revision = job.project.scm_revision
if job.scm_branch and job.scm_branch != job.project.scm_branch:
desired_revision = job.scm_branch # could be commit or not, but will try as commit
commit = git_repo.commit(desired_revision)
job_revision = commit.hexsha
logger.info('Skipping project sync for {} because commit is locally available'.format(job.log_format))
needs_sync = False
current_revision = git_repo.head.commit.hexsha
if desired_revision == current_revision:
job_revision = desired_revision
logger.info('Skipping project sync for {} because commit is locally available'.format(job.log_format))
needs_sync = False
except (ValueError, BadGitName):
logger.debug('Needed commit for {} not in local source tree, will sync with remote'.format(job.log_format))
# Galaxy requirements are not supported for manual projects
@ -1641,7 +1649,6 @@ class RunJob(BaseTask):
logger.debug('Running project sync for {} because of galaxy role requirements.'.format(job.log_format))
needs_sync = True
galaxy_install_path = None
if needs_sync:
pu_ig = job.instance_group
pu_en = job.execution_node
@ -1663,13 +1670,10 @@ class RunJob(BaseTask):
# cancel() call on the job can cancel the project update
job = self.update_model(job.pk, project_update=local_project_sync)
# Save the roles from galaxy to a temporary directory to be moved later
# at this point, the project folder has not yet been coppied into the temporary directory
galaxy_install_path = tempfile.mkdtemp(prefix='tmp_roles_', dir=private_data_dir)
os.chmod(galaxy_install_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
project_update_task = local_project_sync._get_task_class()
try:
sync_task = project_update_task(job_private_data_dir=private_data_dir, roles_destination=galaxy_install_path)
# the job private_data_dir is passed so sync can download roles and collections there
sync_task = project_update_task(job_private_data_dir=private_data_dir)
sync_task.run(local_project_sync.id)
local_project_sync.refresh_from_db()
job = self.update_model(job.pk, scm_revision=local_project_sync.scm_revision)
@ -1705,10 +1709,6 @@ class RunJob(BaseTask):
git_repo.delete_head(tmp_branch_name, force=True)
else:
copy_tree(project_path, runner_project_folder)
if galaxy_install_path and os.listdir(galaxy_install_path):
logger.debug('Copying galaxy roles for {} to tmp directory'.format(job.log_format))
galaxy_run_path = os.path.join(private_data_dir, 'project', 'roles')
copy_tree(galaxy_install_path, galaxy_run_path)
if job.inventory.kind == 'smart':
# cache smart inventory memberships so that the host_filter query is not
@ -1751,11 +1751,10 @@ class RunProjectUpdate(BaseTask):
show_paths.append(self.job_private_data_dir)
return show_paths
def __init__(self, *args, job_private_data_dir=None, roles_destination=None, **kwargs):
def __init__(self, *args, job_private_data_dir=None, **kwargs):
super(RunProjectUpdate, self).__init__(*args, **kwargs)
self.playbook_new_revision = None
self.job_private_data_dir = job_private_data_dir
self.roles_destination = roles_destination
def event_handler(self, event_data):
super(RunProjectUpdate, self).event_handler(event_data)
@ -1786,7 +1785,9 @@ class RunProjectUpdate(BaseTask):
# Create dir where collections will live for the job run
if project_update.job_type != 'check' and getattr(self, 'job_private_data_dir'):
os.mkdir(os.path.join(self.job_private_data_dir, 'requirements_collections'), stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
for folder_name in ('requirements_collections', 'requirements_roles'):
folder_path = os.path.join(self.job_private_data_dir, folder_name)
os.mkdir(folder_path, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
return private_data
def build_passwords(self, project_update, runtime_passwords):
@ -1902,16 +1903,15 @@ class RunProjectUpdate(BaseTask):
'roles_enabled': getattr(settings, 'AWX_ROLES_ENABLED', True) if project_update.job_type != 'check' else False,
'collections_enabled': getattr(settings, 'AWX_COLLECTIONS_ENABLED', True) if project_update.job_type != 'check' else False,
})
if project_update.job_type != 'check':
if project_update.job_type != 'check' and self.job_private_data_dir:
extra_vars['collections_destination'] = os.path.join(self.job_private_data_dir, 'requirements_collections')
extra_vars['roles_destination'] = os.path.join(self.job_private_data_dir, 'requirements_roles')
# apply custom refspec from user for PR refs and the like
if project_update.scm_refspec:
extra_vars['scm_refspec'] = project_update.scm_refspec
elif project_update.project.allow_override:
# If branch is override-able, do extra fetch for all branches
extra_vars['scm_refspec'] = 'refs/heads/*:refs/remotes/origin/*'
if self.roles_destination:
extra_vars['roles_destination'] = self.roles_destination
self._write_extra_vars_file(private_data_dir, extra_vars)
def build_cwd(self, project_update, private_data_dir):

View File

@ -526,7 +526,10 @@ class TestGenericRun():
with mock.patch('awx.main.tasks.settings.AWX_ANSIBLE_COLLECTIONS_PATHS', ['/AWX_COLLECTION_PATH']):
with mock.patch('awx.main.tasks.settings.AWX_TASK_ENV', {'ANSIBLE_COLLECTIONS_PATHS': '/MY_COLLECTION1:/MY_COLLECTION2'}):
env = task.build_env(job, private_data_dir)
assert env['ANSIBLE_COLLECTIONS_PATHS'] == '/MY_COLLECTION1:/MY_COLLECTION2:/AWX_COLLECTION_PATH'
used_paths = env['ANSIBLE_COLLECTIONS_PATHS'].split(':')
assert used_paths[-1].endswith('/requirements_collections')
used_paths.pop()
assert used_paths == ['/MY_COLLECTION1', '/MY_COLLECTION2', '/AWX_COLLECTION_PATH']
def test_valid_custom_virtualenv(self, patch_Job, private_data_dir):
job = Job(project=Project(), inventory=Inventory())

View File

@ -147,6 +147,8 @@
args:
chdir: "{{project_path|quote}}/collections"
register: galaxy_collection_result
when: doesCollectionRequirementsExist.stat.exists
changed_when: "'Installing ' in galaxy_collection_result.stdout"
when: collections_enabled|bool
delegate_to: localhost