From cdfc9e05d41ebfc6f751bea73849872cc8558974 Mon Sep 17 00:00:00 2001 From: Shane McDonald Date: Fri, 15 May 2020 17:52:08 -0400 Subject: [PATCH 001/494] Fix offline RHEL 8 builds This was causing our EL8 Brew builds to break, because it wasn't being vendored. This is in fact required for python3. It was being resolved as a dependency of other things (see list at end of line), so it was being downloaded on-the-fly since our normal builds have internet access. It only broke when it wasn't vendored for offline builds. --- requirements/requirements_ansible.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/requirements_ansible.txt b/requirements/requirements_ansible.txt index 5e8569e0f0..3ef3f1318d 100644 --- a/requirements/requirements_ansible.txt +++ b/requirements/requirements_ansible.txt @@ -26,7 +26,7 @@ azure-mgmt-loganalytics==0.2.0 # via -r /awx_devel/requirements/requirements_an azure-mgmt-marketplaceordering==0.1.0 # via -r /awx_devel/requirements/requirements_ansible.in azure-mgmt-monitor==0.5.2 # via -r /awx_devel/requirements/requirements_ansible.in azure-mgmt-network==2.3.0 # via -r /awx_devel/requirements/requirements_ansible.in -azure-mgmt-nspkg==2.0.0; python_version < "3" # via -r /awx_devel/requirements/requirements_ansible.in, azure-mgmt-authorization, azure-mgmt-automation, azure-mgmt-batch, azure-mgmt-cdn, azure-mgmt-compute, azure-mgmt-containerinstance, azure-mgmt-containerregistry, azure-mgmt-containerservice, azure-mgmt-cosmosdb, azure-mgmt-devtestlabs, azure-mgmt-dns, azure-mgmt-hdinsight, azure-mgmt-iothub, azure-mgmt-keyvault, azure-mgmt-loganalytics, azure-mgmt-marketplaceordering, azure-mgmt-monitor, azure-mgmt-network, azure-mgmt-rdbms, azure-mgmt-redis, azure-mgmt-resource, azure-mgmt-servicebus, azure-mgmt-sql, azure-mgmt-storage, azure-mgmt-trafficmanager, azure-mgmt-web +azure-mgmt-nspkg==2.0.0 # via -r /awx_devel/requirements/requirements_ansible.in, azure-mgmt-authorization, azure-mgmt-automation, azure-mgmt-batch, azure-mgmt-cdn, azure-mgmt-compute, azure-mgmt-containerinstance, azure-mgmt-containerregistry, azure-mgmt-containerservice, azure-mgmt-cosmosdb, azure-mgmt-devtestlabs, azure-mgmt-dns, azure-mgmt-hdinsight, azure-mgmt-iothub, azure-mgmt-keyvault, azure-mgmt-loganalytics, azure-mgmt-marketplaceordering, azure-mgmt-monitor, azure-mgmt-network, azure-mgmt-rdbms, azure-mgmt-redis, azure-mgmt-resource, azure-mgmt-servicebus, azure-mgmt-sql, azure-mgmt-storage, azure-mgmt-trafficmanager, azure-mgmt-web azure-mgmt-rdbms==1.4.1 # via -r /awx_devel/requirements/requirements_ansible.in azure-mgmt-redis==5.0.0 # via -r /awx_devel/requirements/requirements_ansible.in azure-mgmt-resource==2.1.0 # via -r /awx_devel/requirements/requirements_ansible.in From c4d9b81c55d273309d290b8cb218d5a561e73624 Mon Sep 17 00:00:00 2001 From: mabashian Date: Mon, 18 May 2020 13:23:19 -0400 Subject: [PATCH 002/494] Fixes bug where all_parents_must_converge was not being set for new or existing approval nodes. --- .../workflow-maker/workflow-maker.controller.js | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/awx/ui/client/src/templates/workflows/workflow-maker/workflow-maker.controller.js b/awx/ui/client/src/templates/workflows/workflow-maker/workflow-maker.controller.js index 96cc1ad7e4..d701d220d6 100644 --- a/awx/ui/client/src/templates/workflows/workflow-maker/workflow-maker.controller.js +++ b/awx/ui/client/src/templates/workflows/workflow-maker/workflow-maker.controller.js @@ -148,11 +148,14 @@ export default ['$scope', 'TemplatesService', Object.keys(nodeRef).map((workflowMakerNodeId) => { const node = nodeRef[workflowMakerNodeId]; + const all_parents_must_converge = _.get(node, 'all_parents_must_converge', false); if (node.isNew) { if (node.unifiedJobTemplate && node.unifiedJobTemplate.unified_job_type === "workflow_approval") { addPromises.push(TemplatesService.addWorkflowNode({ url: $scope.workflowJobTemplateObj.related.workflow_nodes, - data: {} + data: { + all_parents_must_converge + } }).then(({data: newNodeData}) => { Rest.setUrl(newNodeData.related.create_approval_template); approvalTemplatePromises.push(Rest.post({ @@ -234,6 +237,14 @@ export default ['$scope', 'TemplatesService', }); })); } + if (node.originalNodeObject.all_parents_must_converge !== all_parents_must_converge) { + editPromises.push(TemplatesService.editWorkflowNode({ + id: node.originalNodeObject.id, + data: { + all_parents_must_converge + } + })); + } } else { editPromises.push(TemplatesService.editWorkflowNode({ id: node.originalNodeObject.id, From b83db0500fcc541cb4f627a1c8d7d395799e5752 Mon Sep 17 00:00:00 2001 From: Jake McDermott Date: Tue, 19 May 2020 10:52:09 -0400 Subject: [PATCH 003/494] Enable management job notications for admins --- .../notifications/notification.controller.js | 6 ++++-- .../notifications/notification.route.js | 13 +++++++++++++ 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/awx/ui/client/src/management-jobs/notifications/notification.controller.js b/awx/ui/client/src/management-jobs/notifications/notification.controller.js index 62e8dff8a3..cc3b0b74be 100644 --- a/awx/ui/client/src/management-jobs/notifications/notification.controller.js +++ b/awx/ui/client/src/management-jobs/notifications/notification.controller.js @@ -6,10 +6,10 @@ export default [ 'NotificationsList', 'GetBasePath', 'ToggleNotification', 'NotificationsListInit', - '$stateParams', 'Dataset', '$scope', + '$stateParams', 'Dataset', '$scope', 'isAdmin', function( NotificationsList, GetBasePath, ToggleNotification, NotificationsListInit, - $stateParams, Dataset, $scope) { + $stateParams, Dataset, $scope, isAdmin) { var defaultUrl = GetBasePath('system_job_templates'), list = NotificationsList, id = $stateParams.management_id; @@ -19,6 +19,8 @@ export default $scope[`${list.iterator}_dataset`] = Dataset.data; $scope[list.name] = $scope[`${list.iterator}_dataset`].results; + $scope.sufficientRoleForNotifToggle = isAdmin; + NotificationsListInit({ scope: $scope, url: defaultUrl, diff --git a/awx/ui/client/src/management-jobs/notifications/notification.route.js b/awx/ui/client/src/management-jobs/notifications/notification.route.js index a6b1f30d8a..7b6bf64585 100644 --- a/awx/ui/client/src/management-jobs/notifications/notification.route.js +++ b/awx/ui/client/src/management-jobs/notifications/notification.route.js @@ -39,6 +39,19 @@ export default { let path = `${GetBasePath('system_job_templates')}${$stateParams.management_id}`; Rest.setUrl(path); return Rest.get(path).then((res) => res.data); + }], + isAdmin: ['Rest', 'GetBasePath', function(Rest, GetBasePath) { + Rest.setUrl(GetBasePath('me')); + return Rest.get() + .then((res) => { + if (res.data && res.data.results && res.data.count && res.data.results[0] && res.data.results[0].is_superuser) { + return true; + } + return false; + }) + .catch(() => { + return false; + }); }] }, ncyBreadcrumb: { From b6d3c3c1a3f36c256a9bbff967701efb85039354 Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Mon, 11 May 2020 16:58:13 -0400 Subject: [PATCH 004/494] drastically optimize job host summary creation see: https://github.com/ansible/awx/issues/6991 --- awx/main/models/events.py | 33 +++++++------ awx/main/tasks.py | 2 + .../tests/functional/models/test_events.py | 48 ++++++++++++++++++- 3 files changed, 67 insertions(+), 16 deletions(-) diff --git a/awx/main/models/events.py b/awx/main/models/events.py index d5cd2d76e7..80d530a2c3 100644 --- a/awx/main/models/events.py +++ b/awx/main/models/events.py @@ -7,7 +7,7 @@ from collections import defaultdict from django.db import models, DatabaseError, connection from django.utils.dateparse import parse_datetime from django.utils.text import Truncator -from django.utils.timezone import utc +from django.utils.timezone import utc, now from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import force_text @@ -407,11 +407,14 @@ class BasePlaybookEvent(CreatedModifiedModel): except (KeyError, ValueError): kwargs.pop('created', None) + host_map = kwargs.pop('host_map', {}) + sanitize_event_keys(kwargs, cls.VALID_KEYS) workflow_job_id = kwargs.pop('workflow_job_id', None) event = cls(**kwargs) if workflow_job_id: setattr(event, 'workflow_job_id', workflow_job_id) + setattr(event, 'host_map', host_map) event._update_from_event_data() return event @@ -484,8 +487,10 @@ class JobEvent(BasePlaybookEvent): if not self.job or not self.job.inventory: logger.info('Event {} missing job or inventory, host summaries not updated'.format(self.pk)) return - qs = self.job.inventory.hosts.filter(name__in=hostnames) job = self.job + + from awx.main.models.jobs import JobHostSummary # circular import + summaries = dict() for host in hostnames: host_stats = {} for stat in ('changed', 'dark', 'failures', 'ignored', 'ok', 'processed', 'rescued', 'skipped'): @@ -493,20 +498,18 @@ class JobEvent(BasePlaybookEvent): host_stats[stat] = self.event_data.get(stat, {}).get(host, 0) except AttributeError: # in case event_data[stat] isn't a dict. pass - if qs.filter(name=host).exists(): - host_actual = qs.get(name=host) - host_summary, created = job.job_host_summaries.get_or_create(host=host_actual, host_name=host_actual.name, defaults=host_stats) - else: - host_summary, created = job.job_host_summaries.get_or_create(host_name=host, defaults=host_stats) + host_id = self.host_map.get(host, None) + summaries.setdefault( + (host_id, host), + JobHostSummary(created=now(), modified=now(), job_id=job.id, host_id=host_id, host_name=host) + ) + host_summary = summaries[(host_id, host)] - if not created: - update_fields = [] - for stat, value in host_stats.items(): - if getattr(host_summary, stat) != value: - setattr(host_summary, stat, value) - update_fields.append(stat) - if update_fields: - host_summary.save(update_fields=update_fields) + for stat, value in host_stats.items(): + if getattr(host_summary, stat) != value: + setattr(host_summary, stat, value) + + JobHostSummary.objects.bulk_create(summaries.values()) @property def job_verbosity(self): diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 3a765364a3..38fd1f5207 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -1215,6 +1215,8 @@ class BaseTask(object): else: event_data['host_name'] = '' event_data['host_id'] = '' + if event_data.get('event') == 'playbook_on_stats': + event_data['host_map'] = self.host_map if isinstance(self, RunProjectUpdate): # it's common for Ansible's SCM modules to print diff --git a/awx/main/tests/functional/models/test_events.py b/awx/main/tests/functional/models/test_events.py index a61c3fda25..0d2530b968 100644 --- a/awx/main/tests/functional/models/test_events.py +++ b/awx/main/tests/functional/models/test_events.py @@ -1,7 +1,9 @@ from unittest import mock import pytest -from awx.main.models import Job, JobEvent +from django.utils.timezone import now + +from awx.main.models import Job, JobEvent, Inventory, Host @pytest.mark.django_db @@ -61,3 +63,47 @@ def test_parent_failed(emit, event): assert events.count() == 2 for e in events.all(): assert e.failed is True + + +@pytest.mark.django_db +def test_host_summary_generation(): + hostnames = [f'Host {i}' for i in range(5000)] + inv = Inventory() + inv.save() + Host.objects.bulk_create([ + Host(created=now(), modified=now(), name=h, inventory_id=inv.id) + for h in hostnames + ]) + j = Job(inventory=inv) + j.save() + host_map = dict((host.name, host.id) for host in inv.hosts.all()) + JobEvent.create_from_data( + job_id=j.pk, + parent_uuid='abc123', + event='playbook_on_stats', + event_data={ + 'ok': dict((hostname, len(hostname)) for hostname in hostnames), + 'changed': {}, + 'dark': {}, + 'failures': {}, + 'ignored': {}, + 'processed': {}, + 'rescued': {}, + 'skipped': {}, + }, + host_map=host_map + ).save() + + assert j.job_host_summaries.count() == len(hostnames) + assert sorted([s.host_name for s in j.job_host_summaries.all()]) == sorted(hostnames) + + for s in j.job_host_summaries.all(): + assert host_map[s.host_name] == s.host_id + assert s.ok == len(s.host_name) + assert s.changed == 0 + assert s.dark == 0 + assert s.failures == 0 + assert s.ignored == 0 + assert s.processed == 0 + assert s.rescued == 0 + assert s.skipped == 0 From 3ea642f212f4a88438e0efba020185a60454c885 Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Tue, 12 May 2020 09:38:59 -0400 Subject: [PATCH 005/494] properly handle host summary bulk updates if hosts go missing --- awx/main/models/events.py | 18 ++++---- .../tests/functional/models/test_events.py | 44 ++++++++++++++++++- 2 files changed, 51 insertions(+), 11 deletions(-) diff --git a/awx/main/models/events.py b/awx/main/models/events.py index 80d530a2c3..57de629a8f 100644 --- a/awx/main/models/events.py +++ b/awx/main/models/events.py @@ -489,25 +489,23 @@ class JobEvent(BasePlaybookEvent): return job = self.job - from awx.main.models.jobs import JobHostSummary # circular import + from awx.main.models import Host, JobHostSummary # circular import + existing = Host.objects.filter(id__in=self.host_map.values()).values_list('id', flat=True) + summaries = dict() for host in hostnames: + host_id = self.host_map.get(host, None) + if host_id not in existing: + host_id = None host_stats = {} for stat in ('changed', 'dark', 'failures', 'ignored', 'ok', 'processed', 'rescued', 'skipped'): try: host_stats[stat] = self.event_data.get(stat, {}).get(host, 0) except AttributeError: # in case event_data[stat] isn't a dict. pass - host_id = self.host_map.get(host, None) - summaries.setdefault( - (host_id, host), - JobHostSummary(created=now(), modified=now(), job_id=job.id, host_id=host_id, host_name=host) + summaries[(host_id, host)] = JobHostSummary( + created=now(), modified=now(), job_id=job.id, host_id=host_id, host_name=host, **host_stats ) - host_summary = summaries[(host_id, host)] - - for stat, value in host_stats.items(): - if getattr(host_summary, stat) != value: - setattr(host_summary, stat, value) JobHostSummary.objects.bulk_create(summaries.values()) diff --git a/awx/main/tests/functional/models/test_events.py b/awx/main/tests/functional/models/test_events.py index 0d2530b968..6dd0cac06a 100644 --- a/awx/main/tests/functional/models/test_events.py +++ b/awx/main/tests/functional/models/test_events.py @@ -67,7 +67,7 @@ def test_parent_failed(emit, event): @pytest.mark.django_db def test_host_summary_generation(): - hostnames = [f'Host {i}' for i in range(5000)] + hostnames = [f'Host {i}' for i in range(500)] inv = Inventory() inv.save() Host.objects.bulk_create([ @@ -107,3 +107,45 @@ def test_host_summary_generation(): assert s.processed == 0 assert s.rescued == 0 assert s.skipped == 0 + + +@pytest.mark.django_db +def test_host_summary_generation_with_deleted_hosts(): + hostnames = [f'Host {i}' for i in range(10)] + inv = Inventory() + inv.save() + Host.objects.bulk_create([ + Host(created=now(), modified=now(), name=h, inventory_id=inv.id) + for h in hostnames + ]) + j = Job(inventory=inv) + j.save() + host_map = dict((host.name, host.id) for host in inv.hosts.all()) + + # delete half of the hosts during the playbook run + for h in inv.hosts.all()[:5]: + h.delete() + + JobEvent.create_from_data( + job_id=j.pk, + parent_uuid='abc123', + event='playbook_on_stats', + event_data={ + 'ok': dict((hostname, len(hostname)) for hostname in hostnames), + 'changed': {}, + 'dark': {}, + 'failures': {}, + 'ignored': {}, + 'processed': {}, + 'rescued': {}, + 'skipped': {}, + }, + host_map=host_map + ).save() + + + ids = sorted([s.host_id or -1 for s in j.job_host_summaries.order_by('id').all()]) + names = sorted([s.host_name for s in j.job_host_summaries.all()]) + assert ids == [-1, -1, -1, -1, -1, 6, 7, 8, 9, 10] + assert names == ['Host 0', 'Host 1', 'Host 2', 'Host 3', 'Host 4', 'Host 5', + 'Host 6', 'Host 7', 'Host 8', 'Host 9'] From 59d457207c8832e7c369a5112d6fb9b2c2b8c7be Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Tue, 12 May 2020 14:46:57 -0400 Subject: [PATCH 006/494] properly update .failed, .last_job_id, and last_job_host_summary --- awx/main/models/events.py | 20 ++++++++++++++++++- awx/main/models/jobs.py | 14 ------------- .../tests/functional/models/test_events.py | 6 +++++- 3 files changed, 24 insertions(+), 16 deletions(-) diff --git a/awx/main/models/events.py b/awx/main/models/events.py index 57de629a8f..e8d87253eb 100644 --- a/awx/main/models/events.py +++ b/awx/main/models/events.py @@ -503,12 +503,30 @@ class JobEvent(BasePlaybookEvent): host_stats[stat] = self.event_data.get(stat, {}).get(host, 0) except AttributeError: # in case event_data[stat] isn't a dict. pass - summaries[(host_id, host)] = JobHostSummary( + summary = JobHostSummary( created=now(), modified=now(), job_id=job.id, host_id=host_id, host_name=host, **host_stats ) + summary.failed = bool(summary.dark or summary.failures) + summaries[(host_id, host)] = summary JobHostSummary.objects.bulk_create(summaries.values()) + # update the last_job_id and last_job_host_summary_id + # in single queries + host_mapping = dict( + (summary['host'], summary['id']) + for summary in JobHostSummary.objects.filter(job_id=job.id).values('id', 'host') + ) + all_hosts = Host.objects.filter( + pk__in=host_mapping.keys() + ).only('id') + for h in all_hosts: + h.last_job_id = job.id + if h.id in host_mapping: + h.last_job_host_summary_id = host_mapping[h.id] + Host.objects.bulk_update(all_hosts, ['last_job_id', 'last_job_host_summary_id']) + + @property def job_verbosity(self): return self.job.verbosity diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index e67478a8e8..45f89d4f9c 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -1133,20 +1133,6 @@ class JobHostSummary(CreatedModifiedModel): self.failed = bool(self.dark or self.failures) update_fields.append('failed') super(JobHostSummary, self).save(*args, **kwargs) - self.update_host_last_job_summary() - - def update_host_last_job_summary(self): - update_fields = [] - if self.host is None: - return - if self.host.last_job_id != self.job_id: - self.host.last_job_id = self.job_id - update_fields.append('last_job_id') - if self.host.last_job_host_summary_id != self.id: - self.host.last_job_host_summary_id = self.id - update_fields.append('last_job_host_summary_id') - if update_fields: - self.host.save(update_fields=update_fields) class SystemJobOptions(BaseModel): diff --git a/awx/main/tests/functional/models/test_events.py b/awx/main/tests/functional/models/test_events.py index 6dd0cac06a..7f881a2fea 100644 --- a/awx/main/tests/functional/models/test_events.py +++ b/awx/main/tests/functional/models/test_events.py @@ -67,7 +67,7 @@ def test_parent_failed(emit, event): @pytest.mark.django_db def test_host_summary_generation(): - hostnames = [f'Host {i}' for i in range(500)] + hostnames = [f'Host {i}' for i in range(100)] inv = Inventory() inv.save() Host.objects.bulk_create([ @@ -108,6 +108,10 @@ def test_host_summary_generation(): assert s.rescued == 0 assert s.skipped == 0 + for host in Host.objects.all(): + assert host.last_job_id == j.id + assert host.last_job_host_summary.host == host + @pytest.mark.django_db def test_host_summary_generation_with_deleted_hosts(): From d85df2e4a1567a25ca5f783ca0f2407052577333 Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Tue, 12 May 2020 16:20:22 -0400 Subject: [PATCH 007/494] further optimize job host summary queries --- awx/main/models/events.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/awx/main/models/events.py b/awx/main/models/events.py index e8d87253eb..ac33a311f4 100644 --- a/awx/main/models/events.py +++ b/awx/main/models/events.py @@ -490,12 +490,15 @@ class JobEvent(BasePlaybookEvent): job = self.job from awx.main.models import Host, JobHostSummary # circular import - existing = Host.objects.filter(id__in=self.host_map.values()).values_list('id', flat=True) + all_hosts = Host.objects.filter( + pk__in=self.host_map.values() + ).only('id') + existing_host_ids = set(h.id for h in all_hosts) summaries = dict() for host in hostnames: host_id = self.host_map.get(host, None) - if host_id not in existing: + if host_id not in existing_host_ids: host_id = None host_stats = {} for stat in ('changed', 'dark', 'failures', 'ignored', 'ok', 'processed', 'rescued', 'skipped'): @@ -514,12 +517,9 @@ class JobEvent(BasePlaybookEvent): # update the last_job_id and last_job_host_summary_id # in single queries host_mapping = dict( - (summary['host'], summary['id']) - for summary in JobHostSummary.objects.filter(job_id=job.id).values('id', 'host') + (summary['host_id'], summary['id']) + for summary in JobHostSummary.objects.filter(job_id=job.id).values('id', 'host_id') ) - all_hosts = Host.objects.filter( - pk__in=host_mapping.keys() - ).only('id') for h in all_hosts: h.last_job_id = job.id if h.id in host_mapping: From 71257c18c202a004b8bbc21ba9286a3f41ce5df5 Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Wed, 20 May 2020 16:30:44 -0400 Subject: [PATCH 008/494] Revert "follow symlinks while discovering valid playbooks" This reverts commit 3dd21d720eb5351e63f9f31c4e601a67965dac56. --- awx/main/models/projects.py | 2 +- awx/main/utils/ansible.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index 38ca20ab06..0207fec97b 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -199,7 +199,7 @@ class ProjectOptions(models.Model): results = [] project_path = self.get_project_path() if project_path: - for dirpath, dirnames, filenames in os.walk(smart_str(project_path), followlinks=True): + for dirpath, dirnames, filenames in os.walk(smart_str(project_path)): if skip_directory(dirpath): continue for filename in filenames: diff --git a/awx/main/utils/ansible.py b/awx/main/utils/ansible.py index eda6d6d214..18011504b9 100644 --- a/awx/main/utils/ansible.py +++ b/awx/main/utils/ansible.py @@ -64,7 +64,6 @@ def could_be_playbook(project_path, dir_path, filename): matched = True break except IOError: - logger.exception(f'failed to open {playbook_path}') return None if not matched: return None From 4c499b2d80e34251439640cb149ef4af96db6f7f Mon Sep 17 00:00:00 2001 From: Bill Nottingham Date: Wed, 20 May 2020 11:03:13 -0400 Subject: [PATCH 009/494] Always check configuration before gathering data. We shouldn't perform expensive operations if we won't be able to send it. Only log at debug level, otherwise every node will log this every 5 minutes. --- awx/main/tasks.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 3a765364a3..a1c03df658 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -358,6 +358,9 @@ def gather_analytics(): from rest_framework.fields import DateTimeField if not settings.INSIGHTS_TRACKING_STATE: return + if not (settings.AUTOMATION_ANALYTICS_URL and settings.REDHAT_USERNAME and settings.REDHAT_PASSWORD): + logger.debug('Not gathering analytics, configuration is invalid') + return last_gather = Setting.objects.filter(key='AUTOMATION_ANALYTICS_LAST_GATHER').first() if last_gather: last_time = DateTimeField().to_internal_value(last_gather.value) From 492d01ff3b9be0eb48bd00bc7bd204fd5c1d95a6 Mon Sep 17 00:00:00 2001 From: Gabe Muniz Date: Mon, 11 May 2020 17:10:01 -0400 Subject: [PATCH 010/494] added try/except to virtual env --- awx/main/utils/common.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/awx/main/utils/common.py b/awx/main/utils/common.py index 0eb8741d9a..2a634b132e 100644 --- a/awx/main/utils/common.py +++ b/awx/main/utils/common.py @@ -1010,14 +1010,18 @@ def get_custom_venv_choices(custom_paths=None): custom_venv_choices = [] for custom_venv_path in all_venv_paths: - if os.path.exists(custom_venv_path): - custom_venv_choices.extend([ - os.path.join(custom_venv_path, x, '') - for x in os.listdir(custom_venv_path) - if x != 'awx' and - os.path.isdir(os.path.join(custom_venv_path, x)) and - os.path.exists(os.path.join(custom_venv_path, x, 'bin', 'activate')) - ]) + try: + if os.path.exists(custom_venv_path): + custom_venv_choices.extend([ + os.path.join(custom_venv_path, x, '') + for x in os.listdir(custom_venv_path) + if x != 'awx' and + os.path.isdir(os.path.join(custom_venv_path, x)) and + os.path.exists(os.path.join(custom_venv_path, x, 'bin', 'activate')) + ]) + except Exception: + logger.exception("Encountered an error while discovering custom virtual environments.") + pass return custom_venv_choices From 563d3944ed68cf66c3eb876d5ef1bb48e973c13b Mon Sep 17 00:00:00 2001 From: gamuniz Date: Mon, 11 May 2020 18:02:25 -0400 Subject: [PATCH 011/494] removed pass per feedback --- awx/main/utils/common.py | 1 - 1 file changed, 1 deletion(-) diff --git a/awx/main/utils/common.py b/awx/main/utils/common.py index 2a634b132e..d3e9d61f65 100644 --- a/awx/main/utils/common.py +++ b/awx/main/utils/common.py @@ -1021,7 +1021,6 @@ def get_custom_venv_choices(custom_paths=None): ]) except Exception: logger.exception("Encountered an error while discovering custom virtual environments.") - pass return custom_venv_choices From aec7d3cc939d0d8beb4eb65609bf8f7352fb89d2 Mon Sep 17 00:00:00 2001 From: Christian Adams Date: Tue, 26 May 2020 23:37:30 -0400 Subject: [PATCH 012/494] Correctly parse sumologic url paths - Sumologic includes a token with a '==' at the end of it's host path. This adds rsyslog conf parsing tests and does not escape equals signs. --- awx/main/tests/unit/api/test_logger.py | 17 ++++++++++++++--- awx/main/utils/external_logging.py | 2 +- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/awx/main/tests/unit/api/test_logger.py b/awx/main/tests/unit/api/test_logger.py index 2a0bb9856d..95ee8f9a98 100644 --- a/awx/main/tests/unit/api/test_logger.py +++ b/awx/main/tests/unit/api/test_logger.py @@ -35,7 +35,7 @@ data_loggly = { # Test reconfigure logging settings function # name this whatever you want @pytest.mark.parametrize( - 'enabled, type, host, port, protocol, expected_config', [ + 'enabled, log_type, host, port, protocol, expected_config', [ ( True, 'loggly', @@ -135,9 +135,20 @@ data_loggly = { 'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa ]) ), + ( + True, # valid sumologic config + 'sumologic', + 'https://endpoint5.collection.us2.sumologic.com/receiver/v1/http/ZaVnC4dhaV0qoiETY0MrM3wwLoDgO1jFgjOxE6-39qokkj3LGtOroZ8wNaN2M6DtgYrJZsmSi4-36_Up5TbbN_8hosYonLKHSSOSKY845LuLZBCBwStrHQ==', # noqa + None, + 'https', + '\n'.join([ + 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")', + 'action(type="omhttp" server="endpoint5.collection.us2.sumologic.com" serverport="443" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="receiver/v1/http/ZaVnC4dhaV0qoiETY0MrM3wwLoDgO1jFgjOxE6-39qokkj3LGtOroZ8wNaN2M6DtgYrJZsmSi4-36_Up5TbbN_8hosYonLKHSSOSKY845LuLZBCBwStrHQ==")', # noqa + ]) + ), ] ) -def test_rsyslog_conf_template(enabled, type, host, port, protocol, expected_config): +def test_rsyslog_conf_template(enabled, log_type, host, port, protocol, expected_config): mock_settings, _ = _mock_logging_defaults() @@ -146,7 +157,7 @@ def test_rsyslog_conf_template(enabled, type, host, port, protocol, expected_con setattr(mock_settings, 'LOGGING', logging_defaults) setattr(mock_settings, 'LOGGING["handlers"]["external_logger"]["address"]', '/var/run/awx-rsyslog/rsyslog.sock') setattr(mock_settings, 'LOG_AGGREGATOR_ENABLED', enabled) - setattr(mock_settings, 'LOG_AGGREGATOR_TYPE', type) + setattr(mock_settings, 'LOG_AGGREGATOR_TYPE', log_type) setattr(mock_settings, 'LOG_AGGREGATOR_HOST', host) if port: setattr(mock_settings, 'LOG_AGGREGATOR_PORT', port) diff --git a/awx/main/utils/external_logging.py b/awx/main/utils/external_logging.py index 743e0f45ab..ac9d2834c8 100644 --- a/awx/main/utils/external_logging.py +++ b/awx/main/utils/external_logging.py @@ -78,7 +78,7 @@ def construct_rsyslog_conf_template(settings=settings): f'action.resumeInterval="{timeout}"' ] if parsed.path: - path = urlparse.quote(parsed.path[1:]) + path = urlparse.quote(parsed.path[1:], safe='/=') if parsed.query: path = f'{path}?{urlparse.quote(parsed.query)}' params.append(f'restpath="{path}"') From b13a175668e8d120046462f08f6a78b43ddc05cf Mon Sep 17 00:00:00 2001 From: mosad Date: Fri, 29 May 2020 12:35:45 +0200 Subject: [PATCH 013/494] Reshape security context for AWX containers --- installer/roles/kubernetes/defaults/main.yml | 10 +++++++ .../kubernetes/templates/deployment.yml.j2 | 30 ++++++++++++++++++- 2 files changed, 39 insertions(+), 1 deletion(-) diff --git a/installer/roles/kubernetes/defaults/main.yml b/installer/roles/kubernetes/defaults/main.yml index 701a659ad7..3bb3aacbf4 100644 --- a/installer/roles/kubernetes/defaults/main.yml +++ b/installer/roles/kubernetes/defaults/main.yml @@ -16,12 +16,19 @@ kubernetes_web_image: "{{ tower_package_name | default('ansible/awx_web') }}" web_mem_request: 1 web_cpu_request: 500 +web_security_context_enabled: true +web_security_context_privileged: false task_mem_request: 2 task_cpu_request: 1500 +task_security_context_enabled: true +task_security_context_privileged: false redis_mem_request: 2 redis_cpu_request: 500 +redis_security_context_enabled: true +redis_security_context_privileged: false +redis_security_context_user: 1001 kubernetes_redis_image: "redis" kubernetes_redis_image_tag: "latest" @@ -29,6 +36,9 @@ kubernetes_redis_config_mount_path: "/usr/local/etc/redis/redis.conf" memcached_mem_request: 1 memcached_cpu_request: 500 +memcached_security_context_enabled: true +memcached_security_context_privileged: false +memcached_security_context_user: 1001 kubernetes_memcached_version: "latest" kubernetes_memcached_image: "memcached" diff --git a/installer/roles/kubernetes/templates/deployment.yml.j2 b/installer/roles/kubernetes/templates/deployment.yml.j2 index f5cf12cb99..9334a44a50 100644 --- a/installer/roles/kubernetes/templates/deployment.yml.j2 +++ b/installer/roles/kubernetes/templates/deployment.yml.j2 @@ -89,6 +89,12 @@ spec: {% endif %} containers: - name: {{ kubernetes_deployment_name }}-web +{% if web_security_context_enabled is defined and web_security_context_enabled | bool %} + securityContext: +{% if web_security_context_privileged is defined %} + privileged: {{ web_security_context_privileged }} +{% endif %} +{% endif %} image: "{{ kubernetes_web_image }}:{{ kubernetes_web_version }}" imagePullPolicy: Always ports: @@ -178,8 +184,12 @@ spec: cpu: "{{ web_cpu_limit }}m" {% endif %} - name: {{ kubernetes_deployment_name }}-task +{% if task_security_context_enabled is defined and task_security_context_enabled | bool %} securityContext: - privileged: true +{% if task_security_context_privileged is defined %} + privileged: {{ task_security_context_privileged }} +{% endif %} +{% endif %} image: "{{ kubernetes_task_image }}:{{ kubernetes_task_version }}" command: - /usr/bin/launch_awx_task.sh @@ -270,6 +280,15 @@ spec: cpu: "{{ task_cpu_limit }}m" {% endif %} - name: {{ kubernetes_deployment_name }}-redis +{% if redis_security_context_enabled is defined and redis_security_context_enabled | bool %} + securityContext: +{% if redis_security_context_privileged is defined %} + privileged: {{ redis_security_context_privileged }} +{% endif %} +{% if redis_security_context_user is defined %} + runAsUser: {{ redis_security_context_user }} +{% endif %} +{% endif %} image: {{ kubernetes_redis_image }}:{{ kubernetes_redis_image_tag }} imagePullPolicy: Always args: ["redis-server", "{{ kubernetes_redis_config_mount_path }}"] @@ -295,6 +314,15 @@ spec: cpu: "{{ redis_cpu_limit }}m" {% endif %} - name: {{ kubernetes_deployment_name }}-memcached +{% if memcached_security_context_enabled is defined and memcached_security_context_enabled | bool %} + securityContext: +{% if memcached_security_context_privileged is defined %} + privileged: {{ memcached_security_context_privileged }} +{% endif %} +{% if memcached_security_context_user is defined %} + runAsUser: {{ memcached_security_context_user }} +{% endif %} +{% endif %} image: "{{ kubernetes_memcached_image }}:{{ kubernetes_memcached_version }}" imagePullPolicy: Always command: From 4a9603a7ea10163f980b355dda510dafb4c062dc Mon Sep 17 00:00:00 2001 From: mosad Date: Fri, 29 May 2020 20:24:49 +0200 Subject: [PATCH 014/494] Allow priv container for awx_task and option to create psp --- installer/roles/kubernetes/defaults/main.yml | 6 +- .../kubernetes/templates/deployment.yml.j2 | 64 +++++++++++++++++++ 2 files changed, 69 insertions(+), 1 deletion(-) diff --git a/installer/roles/kubernetes/defaults/main.yml b/installer/roles/kubernetes/defaults/main.yml index 3bb3aacbf4..659c4c04bb 100644 --- a/installer/roles/kubernetes/defaults/main.yml +++ b/installer/roles/kubernetes/defaults/main.yml @@ -14,6 +14,10 @@ kubernetes_task_image: "{{ tower_package_name | default('ansible/awx_task') }}" kubernetes_web_version: "{{ tower_package_version | default(dockerhub_version) }}" kubernetes_web_image: "{{ tower_package_name | default('ansible/awx_web') }}" +awx_psp_create: false +awx_psp_name: 'awx' +awx_psp_privileged: true + web_mem_request: 1 web_cpu_request: 500 web_security_context_enabled: true @@ -22,7 +26,7 @@ web_security_context_privileged: false task_mem_request: 2 task_cpu_request: 1500 task_security_context_enabled: true -task_security_context_privileged: false +task_security_context_privileged: true redis_mem_request: 2 redis_cpu_request: 500 diff --git a/installer/roles/kubernetes/templates/deployment.yml.j2 b/installer/roles/kubernetes/templates/deployment.yml.j2 index 9334a44a50..243b235c91 100644 --- a/installer/roles/kubernetes/templates/deployment.yml.j2 +++ b/installer/roles/kubernetes/templates/deployment.yml.j2 @@ -15,6 +15,70 @@ imagePullSecrets: - name: "{{ kubernetes_image_pull_secrets }}" {% endif %} +{% if awx_psp_create is defined and awx_psp_create | bool %} +--- +apiVersion: policy/v1beta1 +kind: PodSecurityPolicy +metadata: + name: {{ awx_psp_name }}-psp +spec: +{% if awx_psp_privileged is defined %} + privileged: {{ awx_psp_privileged }} + allowPrivilegeEscalation: {{ awx_psp_privileged }} +{% endif %} + requiredDropCapabilities: + - ALL + volumes: + - 'configMap' + - 'emptyDir' + - 'projected' + - 'secret' + - 'downwardAPI' + - 'persistentVolumeClaim' + hostNetwork: false + hostIPC: false + hostPID: false + runAsUser: + rule: 'MustRunAsNonRoot' + seLinux: + rule: 'RunAsAny' + supplementalGroups: + rule: 'RunAsAny' + fsGroup: + rule: 'RunAsAny' + +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + namespace: {{ kubernetes_namespace }} + name: {{ awx_psp_name }}-role +rules: +- apiGroups: + - policy + resources: + - podsecuritypolicies + resourceNames: + - {{ awx_psp_name }}-psp + verbs: + - use + +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: {{ awx_psp_name }}-role-binding + namespace: {{ kubernetes_namespace }} +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: {{ awx_psp_name }}-role +subjects: +- kind: ServiceAccount + name: awx + namespace: {{ kubernetes_namespace }} +{% endif %} + --- apiVersion: {{ kubernetes_deployment_api_version }} kind: Deployment From c48da1b3845b2a2c5367f2f03876394f827ab0ec Mon Sep 17 00:00:00 2001 From: Christian Adams Date: Tue, 26 May 2020 11:38:25 -0400 Subject: [PATCH 015/494] allow org admins to remove labels --- awx/main/access.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/main/access.py b/awx/main/access.py index f1bbc42683..4705fb2cfc 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -495,7 +495,7 @@ class NotificationAttachMixin(BaseAccess): # due to this special case, we use symmetrical logic with attach permission return self._can_attach(notification_template=sub_obj, resource_obj=obj) return super(NotificationAttachMixin, self).can_unattach( - obj, sub_obj, relationship, relationship, data=data + obj, sub_obj, relationship, data=data ) From 85426f76a5a37885f331a794c9a000f3c944ccbc Mon Sep 17 00:00:00 2001 From: beeankha Date: Thu, 14 May 2020 15:43:50 -0400 Subject: [PATCH 016/494] Fix misc. linter errors due to the flake8-3.8.1 release - [Ref] https://flake8.pycqa.org/en/latest/release-notes/ --- awx/asgi.py | 3 +-- awx/main/management/commands/callback_stats.py | 4 ++-- awx/main/models/inventory.py | 12 ++++++------ awx/main/scheduler/dag_simple.py | 8 ++++---- awx/main/signals.py | 8 ++++---- .../inventory/plugins/satellite6/files/foreman.yml | 10 +++++----- awx/main/tests/factories/tower.py | 8 ++++---- .../tests/functional/analytics/test_collectors.py | 6 +++--- .../commands/test_secret_key_regeneration.py | 1 + awx/main/tests/functional/test_dispatch.py | 2 ++ awx/settings/defaults.py | 2 +- 11 files changed, 33 insertions(+), 31 deletions(-) diff --git a/awx/asgi.py b/awx/asgi.py index 40640a4a19..698c5f7533 100644 --- a/awx/asgi.py +++ b/awx/asgi.py @@ -4,12 +4,11 @@ import os import logging import django from awx import __version__ as tower_version - # Prepare the AWX environment. from awx import prepare_env, MODE +from channels.routing import get_default_application # noqa prepare_env() # NOQA -from channels.routing import get_default_application """ diff --git a/awx/main/management/commands/callback_stats.py b/awx/main/management/commands/callback_stats.py index 74b7815b91..0a61089607 100644 --- a/awx/main/management/commands/callback_stats.py +++ b/awx/main/management/commands/callback_stats.py @@ -34,7 +34,7 @@ class Command(BaseCommand): if clear: for i in range(12): sys.stdout.write('\x1b[1A\x1b[2K') - for l in lines: - print(l) + for line in lines: + print(line) clear = True time.sleep(.25) diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 6e5ffce508..0bf553d385 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -2624,22 +2624,22 @@ class satellite6(PluginFileInjector): "environment": {"prefix": "foreman_environment_", "separator": "", "key": "foreman['environment_name'] | lower | regex_replace(' ', '') | " - "regex_replace('[^A-Za-z0-9\_]', '_') | regex_replace('none', '')"}, # NOQA: W605 + "regex_replace('[^A-Za-z0-9_]', '_') | regex_replace('none', '')"}, "location": {"prefix": "foreman_location_", "separator": "", - "key": "foreman['location_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_')"}, + "key": "foreman['location_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')"}, "organization": {"prefix": "foreman_organization_", "separator": "", - "key": "foreman['organization_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_')"}, + "key": "foreman['organization_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')"}, "lifecycle_environment": {"prefix": "foreman_lifecycle_environment_", "separator": "", "key": "foreman['content_facet_attributes']['lifecycle_environment_name'] | " - "lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_')"}, + "lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')"}, "content_view": {"prefix": "foreman_content_view_", "separator": "", "key": "foreman['content_facet_attributes']['content_view_name'] | " - "lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_')"} - } + "lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')"} + } ret['legacy_hostvars'] = True # convert hostvar structure to the form used by the script ret['want_params'] = True diff --git a/awx/main/scheduler/dag_simple.py b/awx/main/scheduler/dag_simple.py index 065983577a..5a354edbba 100644 --- a/awx/main/scheduler/dag_simple.py +++ b/awx/main/scheduler/dag_simple.py @@ -152,8 +152,8 @@ class SimpleDAG(object): return self._get_children_by_label(this_ord, label) else: nodes = [] - for l in self.node_from_edges_by_label.keys(): - nodes.extend(self._get_children_by_label(this_ord, l)) + for label_obj in self.node_from_edges_by_label.keys(): + nodes.extend(self._get_children_by_label(this_ord, label_obj)) return nodes def _get_parents_by_label(self, node_index, label): @@ -168,8 +168,8 @@ class SimpleDAG(object): return self._get_parents_by_label(this_ord, label) else: nodes = [] - for l in self.node_to_edges_by_label.keys(): - nodes.extend(self._get_parents_by_label(this_ord, l)) + for label_obj in self.node_to_edges_by_label.keys(): + nodes.extend(self._get_parents_by_label(this_ord, label_obj)) return nodes def get_root_nodes(self): diff --git a/awx/main/signals.py b/awx/main/signals.py index dece9f49d6..adfbc65d01 100644 --- a/awx/main/signals.py +++ b/awx/main/signals.py @@ -150,9 +150,9 @@ def rbac_activity_stream(instance, sender, **kwargs): def cleanup_detached_labels_on_deleted_parent(sender, instance, **kwargs): - for l in instance.labels.all(): - if l.is_candidate_for_detach(): - l.delete() + for label in instance.labels.all(): + if label.is_candidate_for_detach(): + label.delete() def save_related_job_templates(sender, instance, **kwargs): @@ -393,7 +393,7 @@ def activity_stream_create(sender, instance, created, **kwargs): '{} ({})'.format(c.name, c.id) for c in instance.credentials.iterator() ] - changes['labels'] = [l.name for l in instance.labels.iterator()] + changes['labels'] = [label.name for label in instance.labels.iterator()] if 'extra_vars' in changes: changes['extra_vars'] = instance.display_extra_vars() if type(instance) == OAuth2AccessToken: diff --git a/awx/main/tests/data/inventory/plugins/satellite6/files/foreman.yml b/awx/main/tests/data/inventory/plugins/satellite6/files/foreman.yml index a63520c2f0..11d6f67220 100644 --- a/awx/main/tests/data/inventory/plugins/satellite6/files/foreman.yml +++ b/awx/main/tests/data/inventory/plugins/satellite6/files/foreman.yml @@ -3,19 +3,19 @@ compose: ansible_ssh_host: foreman['ip6'] | default(foreman['ip'], true) group_prefix: foo_group_prefix keyed_groups: -- key: foreman['environment_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_') | regex_replace('none', '') +- key: foreman['environment_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_') | regex_replace('none', '') prefix: foreman_environment_ separator: '' -- key: foreman['location_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_') +- key: foreman['location_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_') prefix: foreman_location_ separator: '' -- key: foreman['organization_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_') +- key: foreman['organization_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_') prefix: foreman_organization_ separator: '' -- key: foreman['content_facet_attributes']['lifecycle_environment_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_') +- key: foreman['content_facet_attributes']['lifecycle_environment_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_') prefix: foreman_lifecycle_environment_ separator: '' -- key: foreman['content_facet_attributes']['content_view_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_') +- key: foreman['content_facet_attributes']['content_view_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_') prefix: foreman_content_view_ separator: '' - key: '"%s-%s-%s" | format(app, tier, color)' diff --git a/awx/main/tests/factories/tower.py b/awx/main/tests/factories/tower.py index 1da5f126f9..87a7b436eb 100644 --- a/awx/main/tests/factories/tower.py +++ b/awx/main/tests/factories/tower.py @@ -319,11 +319,11 @@ def create_organization(name, roles=None, persisted=True, **kwargs): users = generate_users(org, teams, False, persisted, users=kwargs.get('users')) if 'labels' in kwargs: - for l in kwargs['labels']: - if type(l) is Label: - labels[l.name] = l + for label_obj in kwargs['labels']: + if type(label_obj) is Label: + labels[label_obj.name] = label_obj else: - labels[l] = mk_label(l, organization=org, persisted=persisted) + labels[label_obj] = mk_label(label_obj, organization=org, persisted=persisted) if 'notification_templates' in kwargs: for nt in kwargs['notification_templates']: diff --git a/awx/main/tests/functional/analytics/test_collectors.py b/awx/main/tests/functional/analytics/test_collectors.py index 500cd89053..ff53ac6bb4 100644 --- a/awx/main/tests/functional/analytics/test_collectors.py +++ b/awx/main/tests/functional/analytics/test_collectors.py @@ -88,7 +88,7 @@ def test_copy_tables_unified_job_query( with tempfile.TemporaryDirectory() as tmpdir: collectors.copy_tables(time_start, tmpdir, subset="unified_jobs") with open(os.path.join(tmpdir, "unified_jobs_table.csv")) as f: - lines = "".join([l for l in f]) + lines = "".join([line for line in f]) assert project_update_name in lines assert inventory_update_name in lines @@ -139,9 +139,9 @@ def test_copy_tables_workflow_job_node_query(sqlite_copy_expert, workflow_job): reader = csv.reader(f) # Pop the headers next(reader) - lines = [l for l in reader] + lines = [line for line in reader] - ids = [int(l[0]) for l in lines] + ids = [int(line[0]) for line in lines] assert ids == list( workflow_job.workflow_nodes.all().values_list("id", flat=True) diff --git a/awx/main/tests/functional/commands/test_secret_key_regeneration.py b/awx/main/tests/functional/commands/test_secret_key_regeneration.py index dffaacb866..d27b4329cd 100644 --- a/awx/main/tests/functional/commands/test_secret_key_regeneration.py +++ b/awx/main/tests/functional/commands/test_secret_key_regeneration.py @@ -65,6 +65,7 @@ class TestKeyRegeneration: assert nc['token'].startswith(PREFIX) Slack = nt.CLASS_FOR_NOTIFICATION_TYPE[nt.notification_type] + class TestBackend(Slack): def __init__(self, *args, **kw): diff --git a/awx/main/tests/functional/test_dispatch.py b/awx/main/tests/functional/test_dispatch.py index aa3c42ce26..caf54f0161 100644 --- a/awx/main/tests/functional/test_dispatch.py +++ b/awx/main/tests/functional/test_dispatch.py @@ -18,6 +18,8 @@ from awx.main.dispatch.worker import BaseWorker, TaskWorker ''' Prevent logger. calls from triggering database operations ''' + + @pytest.fixture(autouse=True) def _disable_database_settings(mocker): m = mocker.patch('awx.conf.settings.SettingsWrapper.all_supported_settings', new_callable=mock.PropertyMock) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 031945dd6d..57d30bc23f 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -247,7 +247,7 @@ TEMPLATES = [ 'loaders': [( 'django.template.loaders.cached.Loader', ('django.template.loaders.filesystem.Loader', - 'django.template.loaders.app_directories.Loader',), + 'django.template.loaders.app_directories.Loader',), )], 'builtins': ['awx.main.templatetags.swagger'], }, From c53e5bdbcf16e8bd3cd844686998cdd3447aaf29 Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Thu, 4 Jun 2020 15:10:14 -0400 Subject: [PATCH 017/494] properly write rsyslog configuration as 0640 see: https://github.com/ansible/tower/issues/4383 --- awx/main/utils/external_logging.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/awx/main/utils/external_logging.py b/awx/main/utils/external_logging.py index 743e0f45ab..3714a94828 100644 --- a/awx/main/utils/external_logging.py +++ b/awx/main/utils/external_logging.py @@ -1,6 +1,6 @@ import os import shutil -import tempfile +import tempfile import urllib.parse as urlparse from django.conf import settings @@ -117,6 +117,7 @@ def reconfigure_rsyslog(): with tempfile.TemporaryDirectory(prefix='rsyslog-conf-') as temp_dir: path = temp_dir + '/rsyslog.conf.temp' with open(path, 'w') as f: + os.chmod(path, 0o640) f.write(tmpl + '\n') shutil.move(path, '/var/lib/awx/rsyslog/rsyslog.conf') supervisor_service_command(command='restart', service='awx-rsyslogd') From 49053d34734cd65b619524c3521d4da431c92960 Mon Sep 17 00:00:00 2001 From: Alex Corey Date: Tue, 2 Jun 2020 14:27:27 -0400 Subject: [PATCH 018/494] Adds Inventory Sources Schedules --- .../src/api/models/InventorySources.js | 5 +- .../src/components/Schedule/Schedule.jsx | 9 +++- .../ScheduleList/ScheduleListItem.jsx | 48 +++++++++---------- .../src/screens/Inventory/Inventories.jsx | 7 ++- .../InventorySource/InventorySource.jsx | 28 ++++++++++- 5 files changed, 68 insertions(+), 29 deletions(-) diff --git a/awx/ui_next/src/api/models/InventorySources.js b/awx/ui_next/src/api/models/InventorySources.js index 292aebf290..8d20076ba8 100644 --- a/awx/ui_next/src/api/models/InventorySources.js +++ b/awx/ui_next/src/api/models/InventorySources.js @@ -1,8 +1,11 @@ import Base from '../Base'; import NotificationsMixin from '../mixins/Notifications.mixin'; import LaunchUpdateMixin from '../mixins/LaunchUpdate.mixin'; +import SchedulesMixin from '../mixins/Schedules.mixin'; -class InventorySources extends LaunchUpdateMixin(NotificationsMixin(Base)) { +class InventorySources extends LaunchUpdateMixin( + NotificationsMixin(SchedulesMixin(Base)) +) { constructor(http) { super(http); this.baseUrl = '/api/v2/inventory_sources/'; diff --git a/awx/ui_next/src/components/Schedule/Schedule.jsx b/awx/ui_next/src/components/Schedule/Schedule.jsx index 6282e50bfb..260655f27c 100644 --- a/awx/ui_next/src/components/Schedule/Schedule.jsx +++ b/awx/ui_next/src/components/Schedule/Schedule.jsx @@ -35,7 +35,6 @@ function Schedule({ i18n, setBreadcrumb, unifiedJobTemplate }) { try { const { data } = await SchedulesAPI.readDetail(scheduleId); setSchedule(data); - setBreadcrumb(unifiedJobTemplate, data); } catch (err) { setContentError(err); } finally { @@ -44,8 +43,14 @@ function Schedule({ i18n, setBreadcrumb, unifiedJobTemplate }) { }; loadData(); - }, [location.pathname, scheduleId, unifiedJobTemplate, setBreadcrumb]); + }, [location.pathname, scheduleId]); + useEffect(() => { + if (schedule) { + setBreadcrumb(unifiedJobTemplate, schedule); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [schedule, unifiedJobTemplate]); const tabsArray = [ { name: ( diff --git a/awx/ui_next/src/components/Schedule/ScheduleList/ScheduleListItem.jsx b/awx/ui_next/src/components/Schedule/ScheduleList/ScheduleListItem.jsx index bc04a578d8..0cb8a51eb5 100644 --- a/awx/ui_next/src/components/Schedule/ScheduleList/ScheduleListItem.jsx +++ b/awx/ui_next/src/components/Schedule/ScheduleList/ScheduleListItem.jsx @@ -43,7 +43,7 @@ function ScheduleListItem({ i18n, isSelected, onSelect, schedule }) { switch (schedule.summary_fields.unified_job_template.unified_job_type) { case 'inventory_update': - scheduleBaseUrl = `/inventories/${schedule.summary_fields.inventory.id}/sources/${schedule.summary_fields.unified_job_template.id}/schedules/${schedule.id}`; + scheduleBaseUrl = `/inventories/inventory/${schedule.summary_fields.inventory.id}/sources/${schedule.summary_fields.unified_job_template.id}/schedules/${schedule.id}`; break; case 'job': scheduleBaseUrl = `/templates/job_template/${schedule.summary_fields.unified_job_template.id}/schedules/${schedule.id}`; @@ -98,31 +98,31 @@ function ScheduleListItem({ i18n, isSelected, onSelect, schedule }) { )} , - - - {schedule.summary_fields.user_capabilities.edit ? ( - - - - ) : ( - '' - )} - , ]} /> + + + {schedule.summary_fields.user_capabilities.edit ? ( + + + + ) : ( + '' + )} + ); diff --git a/awx/ui_next/src/screens/Inventory/Inventories.jsx b/awx/ui_next/src/screens/Inventory/Inventories.jsx index c4c18e76cf..9ddf8723c0 100644 --- a/awx/ui_next/src/screens/Inventory/Inventories.jsx +++ b/awx/ui_next/src/screens/Inventory/Inventories.jsx @@ -27,7 +27,7 @@ class Inventories extends Component { }; } - setBreadCrumbConfig = (inventory, nested) => { + setBreadCrumbConfig = (inventory, nested, schedule) => { const { i18n } = this.props; if (!inventory) { return; @@ -80,6 +80,11 @@ class Inventories extends Component { [`${inventorySourcesPath}/${nested?.id}`]: `${nested?.name}`, [`${inventorySourcesPath}/${nested?.id}/details`]: i18n._(t`Details`), [`${inventorySourcesPath}/${nested?.id}/edit`]: i18n._(t`Edit details`), + [`${inventorySourcesPath}/${nested?.id}/schedules`]: i18n._(t`Schedules`), + [`${inventorySourcesPath}/${nested?.id}/schedules/${schedule?.id}`]: `${schedule?.name}`, + [`${inventorySourcesPath}/${nested?.id}/schedules/${schedule?.id}/details`]: i18n._( + t`Schedule Details` + ), }; this.setState({ breadcrumbConfig }); }; diff --git a/awx/ui_next/src/screens/Inventory/InventorySource/InventorySource.jsx b/awx/ui_next/src/screens/Inventory/InventorySource/InventorySource.jsx index c9a7a730ed..ecd065a57b 100644 --- a/awx/ui_next/src/screens/Inventory/InventorySource/InventorySource.jsx +++ b/awx/ui_next/src/screens/Inventory/InventorySource/InventorySource.jsx @@ -19,6 +19,7 @@ import { OrganizationsAPI, } from '../../../api'; import { TabbedCardHeader } from '../../../components/Card'; +import { Schedules } from '../../../components/Schedule'; import CardCloseButton from '../../../components/CardCloseButton'; import ContentError from '../../../components/ContentError'; import ContentLoading from '../../../components/ContentLoading'; @@ -64,6 +65,15 @@ function InventorySource({ i18n, inventory, setBreadcrumb, me }) { } }, [inventory, source, setBreadcrumb]); + const loadSchedules = params => + InventorySourcesAPI.readSchedules(source?.id, params); + + const createSchedule = data => + InventorySourcesAPI.createSchedule(source?.id, data); + + const loadScheduleOptions = () => + InventorySourcesAPI.readScheduleOptions(source?.id); + const tabsArray = [ { name: ( @@ -104,7 +114,9 @@ function InventorySource({ i18n, inventory, setBreadcrumb, me }) { return ( <> - {['edit'].some(name => location.pathname.includes(name)) ? null : ( + {['edit', 'schedules/'].some(name => + location.pathname.includes(name) + ) ? null : ( @@ -144,6 +156,20 @@ function InventorySource({ i18n, inventory, setBreadcrumb, me }) { apiModel={InventorySourcesAPI} /> + + + setBreadcrumb(inventory, source, schedule) + } + unifiedJobTemplate={source} + loadSchedules={loadSchedules} + loadScheduleOptions={loadScheduleOptions} + /> + From 4ce37ec849ca37fcc8c01839879cfdb9de1e013f Mon Sep 17 00:00:00 2001 From: Jim Ladd Date: Fri, 5 Jun 2020 11:13:01 -0700 Subject: [PATCH 019/494] Bump foreman collection to 0.8.1 * New release includes: 'add host_filters and want_ansible_ssh_host like script used to have' --- requirements/collections_requirements.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/collections_requirements.yml b/requirements/collections_requirements.yml index ea7abf5578..9e9634b7b9 100644 --- a/requirements/collections_requirements.yml +++ b/requirements/collections_requirements.yml @@ -7,7 +7,7 @@ collections: - name: amazon.aws version: 0.1.1 # version 0.1.0 seems to have gone missing - name: theforeman.foreman - version: 0.8.0 + version: 0.8.1 - name: google.cloud version: 0.0.9 # contains PR 167, should be good to go - name: openstack.cloud From 7d0c49c0435463ce053ff12fc2ea20f86e8d4756 Mon Sep 17 00:00:00 2001 From: mosad Date: Fri, 5 Jun 2020 23:41:20 +0200 Subject: [PATCH 020/494] Fix conflict --- installer/roles/kubernetes/templates/deployment.yml.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/installer/roles/kubernetes/templates/deployment.yml.j2 b/installer/roles/kubernetes/templates/deployment.yml.j2 index 243b235c91..12ab4c1411 100644 --- a/installer/roles/kubernetes/templates/deployment.yml.j2 +++ b/installer/roles/kubernetes/templates/deployment.yml.j2 @@ -159,7 +159,7 @@ spec: privileged: {{ web_security_context_privileged }} {% endif %} {% endif %} - image: "{{ kubernetes_web_image }}:{{ kubernetes_web_version }}" + image: "{{ kubernetes_awx_image }}:{{ kubernetes_web_version }}" imagePullPolicy: Always ports: - containerPort: 8052 From 9ae344b772635122b926094699b3bb013762388f Mon Sep 17 00:00:00 2001 From: Jim Ladd Date: Tue, 19 May 2020 10:24:24 -0700 Subject: [PATCH 021/494] foreman: use group_prefix for all groups * awx's "compatibility layer" for the foreman plugin had the group_prefix hard-coded to 'foreman_' --- awx/main/models/inventory.py | 10 +++++----- .../inventory/plugins/satellite6/files/foreman.yml | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 0bf553d385..c03cbcd987 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -2621,21 +2621,21 @@ class satellite6(PluginFileInjector): # Compatibility content group_by_hostvar = { - "environment": {"prefix": "foreman_environment_", + "environment": {"prefix": "{}environment_".format(group_prefix), "separator": "", "key": "foreman['environment_name'] | lower | regex_replace(' ', '') | " "regex_replace('[^A-Za-z0-9_]', '_') | regex_replace('none', '')"}, - "location": {"prefix": "foreman_location_", + "location": {"prefix": "{}location_".format(group_prefix), "separator": "", "key": "foreman['location_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')"}, - "organization": {"prefix": "foreman_organization_", + "organization": {"prefix": "{}organization_".format(group_prefix), "separator": "", "key": "foreman['organization_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')"}, - "lifecycle_environment": {"prefix": "foreman_lifecycle_environment_", + "lifecycle_environment": {"prefix": "{}lifecycle_environment_".format(group_prefix), "separator": "", "key": "foreman['content_facet_attributes']['lifecycle_environment_name'] | " "lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')"}, - "content_view": {"prefix": "foreman_content_view_", + "content_view": {"prefix": "{}content_view_".format(group_prefix), "separator": "", "key": "foreman['content_facet_attributes']['content_view_name'] | " "lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')"} diff --git a/awx/main/tests/data/inventory/plugins/satellite6/files/foreman.yml b/awx/main/tests/data/inventory/plugins/satellite6/files/foreman.yml index 11d6f67220..782bb89be7 100644 --- a/awx/main/tests/data/inventory/plugins/satellite6/files/foreman.yml +++ b/awx/main/tests/data/inventory/plugins/satellite6/files/foreman.yml @@ -4,19 +4,19 @@ compose: group_prefix: foo_group_prefix keyed_groups: - key: foreman['environment_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_') | regex_replace('none', '') - prefix: foreman_environment_ + prefix: foo_group_prefixenvironment_ separator: '' - key: foreman['location_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_') - prefix: foreman_location_ + prefix: foo_group_prefixlocation_ separator: '' - key: foreman['organization_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_') - prefix: foreman_organization_ + prefix: foo_group_prefixorganization_ separator: '' - key: foreman['content_facet_attributes']['lifecycle_environment_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_') - prefix: foreman_lifecycle_environment_ + prefix: foo_group_prefixlifecycle_environment_ separator: '' - key: foreman['content_facet_attributes']['content_view_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_') - prefix: foreman_content_view_ + prefix: foo_group_prefixcontent_view_ separator: '' - key: '"%s-%s-%s" | format(app, tier, color)' separator: '' From 1e6437b7739d51ef8f228be9c51457acfc9e12c2 Mon Sep 17 00:00:00 2001 From: mosad Date: Sat, 6 Jun 2020 00:17:26 +0200 Subject: [PATCH 022/494] Resolve conflict 2 --- .../roles/kubernetes/templates/deployment.yml.j2 | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/installer/roles/kubernetes/templates/deployment.yml.j2 b/installer/roles/kubernetes/templates/deployment.yml.j2 index 12ab4c1411..7ab8a2804c 100644 --- a/installer/roles/kubernetes/templates/deployment.yml.j2 +++ b/installer/roles/kubernetes/templates/deployment.yml.j2 @@ -214,12 +214,12 @@ spec: readOnly: true - name: {{ kubernetes_deployment_name }}-supervisor-web-config - mountPath: "/supervisor.conf" + mountPath: "/etc/supervisord.conf" subPath: supervisor.conf readOnly: true - name: {{ kubernetes_deployment_name }}-supervisor-task-config - mountPath: "/supervisor_task.conf" + mountPath: "/etc/supervisord_task.conf" subPath: supervisor_task.conf readOnly: true @@ -294,12 +294,12 @@ spec: readOnly: true - name: {{ kubernetes_deployment_name }}-supervisor-web-config - mountPath: "/supervisor.conf" + mountPath: "/etc/supervisord.conf" subPath: supervisor.conf readOnly: true - name: {{ kubernetes_deployment_name }}-supervisor-task-config - mountPath: "/supervisor_task.conf" + mountPath: "/etc/supervisord_task.conf" subPath: supervisor_task.conf readOnly: true @@ -315,7 +315,7 @@ spec: mountPath: "/var/run/memcached" env: - name: SUPERVISOR_WEB_CONFIG_PATH - value: "/supervisor.conf" + value: "/etc/supervisord.conf" - name: AWX_SKIP_MIGRATIONS value: "1" - name: MY_POD_UID @@ -591,4 +591,4 @@ spec: name: {{ kubernetes_deployment_name }}-web-svc weight: 100 wildcardPolicy: None -{% endif %} +{% endif %} \ No newline at end of file From 9c20b9412af61094d37a6e2d0cb6c9fc7a5d2ad3 Mon Sep 17 00:00:00 2001 From: chris meyers Date: Wed, 13 May 2020 08:30:39 -0400 Subject: [PATCH 023/494] delete and re-add host when ip address changes * The websocket backplane interconnect is done via ip address for Kubernetes and OpenShift. On init run_wsbroadcast reads all Instances from the DB and makes a decision to use the ip address or the hostname based, with preference given to the ip address if defined. For Kubernetes and OpenShift the nodes can load the Instance before the ip_address is set. This would cause the connection to be tried by hostname rather than ip address. This changeset ensures that an ip address set after an Instance record is created will be detected and used. --- awx/main/wsbroadcast.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/awx/main/wsbroadcast.py b/awx/main/wsbroadcast.py index 243981a885..05ab30602f 100644 --- a/awx/main/wsbroadcast.py +++ b/awx/main/wsbroadcast.py @@ -162,6 +162,13 @@ class BroadcastWebsocketManager(object): deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts) new_remote_hosts = set(future_remote_hosts) - set(current_remote_hosts) + remote_addresses = {k: v.remote_host for k, v in self.broadcast_tasks.items()} + for hostname, address in known_hosts.items(): + if hostname in self.broadcast_tasks and \ + address != remote_addresses[hostname]: + deleted_remote_hosts.add(hostname) + new_remote_hosts.add(hostname) + if deleted_remote_hosts: logger.warn(f"Removing {deleted_remote_hosts} from websocket broadcast list") if new_remote_hosts: From 2f7ba75ae4c4a054dde1cae098fc609aae9b6946 Mon Sep 17 00:00:00 2001 From: chris meyers Date: Tue, 12 May 2020 13:23:08 -0400 Subject: [PATCH 024/494] track stats by hostname not remote host/ip * broadcast websockets have stats tracked (i.e. connection status, number of messages total, messages per minute, etc). Previous to this change, stats were tracked by ip address, if it was defined on the instance, XOR hostname. This changeset tracks stats by hostname. --- awx/main/wsbroadcast.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/awx/main/wsbroadcast.py b/awx/main/wsbroadcast.py index 05ab30602f..a97baf45f4 100644 --- a/awx/main/wsbroadcast.py +++ b/awx/main/wsbroadcast.py @@ -37,7 +37,7 @@ def get_broadcast_hosts(): .order_by('hostname') \ .values('hostname', 'ip_address') \ .distinct() - return [i['ip_address'] or i['hostname'] for i in instances] + return {i['hostname']: i['ip_address'] or i['hostname'] for i in instances} def get_local_host(): @@ -149,15 +149,22 @@ class BroadcastWebsocketTask(WebsocketTask): class BroadcastWebsocketManager(object): def __init__(self): self.event_loop = asyncio.get_event_loop() + ''' + { + 'hostname1': BroadcastWebsocketTask(), + 'hostname2': BroadcastWebsocketTask(), + 'hostname3': BroadcastWebsocketTask(), + } + ''' self.broadcast_tasks = dict() - # parallel dict to broadcast_tasks that tracks stats self.local_hostname = get_local_host() self.stats_mgr = BroadcastWebsocketStatsManager(self.event_loop, self.local_hostname) async def run_per_host_websocket(self): while True: - future_remote_hosts = get_broadcast_hosts() + known_hosts = get_broadcast_hosts() + future_remote_hosts = known_hosts.keys() current_remote_hosts = self.broadcast_tasks.keys() deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts) new_remote_hosts = set(future_remote_hosts) - set(current_remote_hosts) @@ -184,7 +191,7 @@ class BroadcastWebsocketManager(object): broadcast_task = BroadcastWebsocketTask(name=self.local_hostname, event_loop=self.event_loop, stats=stats, - remote_host=h) + remote_host=known_hosts[h]) broadcast_task.start() self.broadcast_tasks[h] = broadcast_task From e768d5e7fc9be142dee8a31d942b4711d4a093f8 Mon Sep 17 00:00:00 2001 From: Seth Foster Date: Mon, 8 Jun 2020 17:51:15 -0400 Subject: [PATCH 025/494] Make all_parents_must_converge settable when creating node When targeting, ../workflow_job_templates/id#/workflow_nodes/ endpoint, user could not set all_parents_must_converge to true. awx issue #7063 --- awx/api/serializers.py | 2 +- awx/main/tests/functional/api/test_workflow_node.py | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 75531f6e8a..15fb77f848 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -3600,7 +3600,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer): ujt = self.instance.unified_job_template if ujt is None: ret = {} - for fd in ('workflow_job_template', 'identifier'): + for fd in ('workflow_job_template', 'identifier', 'all_parents_must_converge'): if fd in attrs: ret[fd] = attrs[fd] return ret diff --git a/awx/main/tests/functional/api/test_workflow_node.py b/awx/main/tests/functional/api/test_workflow_node.py index 64c22898df..ec70716f94 100644 --- a/awx/main/tests/functional/api/test_workflow_node.py +++ b/awx/main/tests/functional/api/test_workflow_node.py @@ -71,6 +71,18 @@ def test_node_accepts_prompted_fields(inventory, project, workflow_job_template, user=admin_user, expect=201) +@pytest.mark.django_db +@pytest.mark.parametrize("field_name, field_value", [ + ('all_parents_must_converge', True), + ('all_parents_must_converge', False), +]) +def test_create_node_with_field(field_name, field_value, workflow_job_template, post, admin_user): + url = reverse('api:workflow_job_template_workflow_nodes_list', + kwargs={'pk': workflow_job_template.pk}) + res = post(url, {field_name: field_value}, user=admin_user, expect=201) + assert res.data[field_name] == field_value + + @pytest.mark.django_db class TestApprovalNodes(): def test_approval_node_creation(self, post, approval_node, admin_user): From 71cc359ccf224733ad1d09f0f22269d7040b03ce Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Mon, 8 Jun 2020 18:07:33 -0400 Subject: [PATCH 026/494] don't block on log aggregator socket.send() calls see: https://github.com/ansible/tower/issues/4391 --- awx/main/utils/handlers.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/awx/main/utils/handlers.py b/awx/main/utils/handlers.py index ae0e83a9c5..c5e0014f8e 100644 --- a/awx/main/utils/handlers.py +++ b/awx/main/utils/handlers.py @@ -13,6 +13,10 @@ class RSysLogHandler(logging.handlers.SysLogHandler): append_nul = False + def _connect_unixsocket(self, address): + super(RSysLogHandler, self)._connect_unixsocket(address) + self.socket.setblocking(False) + def emit(self, msg): if not settings.LOG_AGGREGATOR_ENABLED: return @@ -26,6 +30,14 @@ class RSysLogHandler(logging.handlers.SysLogHandler): # unfortunately, we can't log that because...rsyslogd is down (and # would just us back ddown this code path) pass + except BlockingIOError: + # for , rsyslogd is no longer reading from the domain socket, and + # we're unable to write any more to it without blocking (we've seen this behavior + # from time to time when logging is totally misconfigured; + # in this scenario, it also makes more sense to just drop the messages, + # because the alternative is blocking the socket.send() in the + # Python process, which we definitely don't want to do) + pass ColorHandler = logging.StreamHandler From d2bbe7aa1acd6e97138ec736bcde5cfadcf151ae Mon Sep 17 00:00:00 2001 From: Rebeccah Date: Wed, 3 Jun 2020 14:18:19 -0400 Subject: [PATCH 027/494] remove memcache from everywhere and add djagno-redis to cover it --- CONTRIBUTING.md | 1 - awx/settings/defaults.py | 4 +-- docs/clustering.md | 1 - installer/roles/kubernetes/defaults/main.yml | 6 ---- .../kubernetes/templates/deployment.yml.j2 | 34 ------------------- .../roles/local_docker/defaults/main.yml | 3 -- .../roles/local_docker/tasks/compose.yml | 6 ---- .../templates/docker-compose.yml.j2 | 18 +--------- requirements/requirements.in | 2 +- requirements/requirements.txt | 2 +- tools/docker-compose-cluster.yml | 11 +----- tools/docker-compose.yml | 10 +----- 12 files changed, 7 insertions(+), 91 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a1aeffb1ba..dc15b0f0f1 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -157,7 +157,6 @@ If you start a second terminal session, you can take a look at the running conta $ docker ps CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 44251b476f98 gcr.io/ansible-tower-engineering/awx_devel:devel "/entrypoint.sh /bin…" 27 seconds ago Up 23 seconds 0.0.0.0:6899->6899/tcp, 0.0.0.0:7899-7999->7899-7999/tcp, 0.0.0.0:8013->8013/tcp, 0.0.0.0:8043->8043/tcp, 0.0.0.0:8080->8080/tcp, 22/tcp, 0.0.0.0:8888->8888/tcp tools_awx_run_9e820694d57e -b049a43817b4 memcached:alpine "docker-entrypoint.s…" 28 seconds ago Up 26 seconds 0.0.0.0:11211->11211/tcp tools_memcached_1 40de380e3c2e redis:latest "docker-entrypoint.s…" 28 seconds ago Up 26 seconds 0.0.0.0:6379->6379/tcp tools_redis_1 b66a506d3007 postgres:10 "docker-entrypoint.s…" 28 seconds ago Up 26 seconds 0.0.0.0:5432->5432/tcp tools_postgres_1 ``` diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index b300579471..b737a1d0c4 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -441,8 +441,8 @@ CELERYBEAT_SCHEDULE = { # Django Caching Configuration CACHES = { 'default': { - 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', - 'LOCATION': 'unix:/var/run/memcached/memcached.sock' + 'BACKEND': 'django_redis.cache.RedisCache', + 'LOCATION': 'unix:/var/run/redis/redis.sock' }, } diff --git a/docs/clustering.md b/docs/clustering.md index 7b0221beea..ecf9a97dee 100644 --- a/docs/clustering.md +++ b/docs/clustering.md @@ -218,7 +218,6 @@ Each Tower instance is made up of several different services working collaborati * **Callback Receiver** - Receives job events that result from running Ansible jobs. * **Celery** - The worker queue that processes and runs all jobs. * **Redis** - this is used as a queue for AWX to process ansible playbook callback events. -* **Memcached** - A local caching service for the instance it lives on. Tower is configured in such a way that if any of these services or their components fail, then all services are restarted. If these fail sufficiently (often in a short span of time), then the entire instance will be placed offline in an automated fashion in order to allow remediation without causing unexpected behavior. diff --git a/installer/roles/kubernetes/defaults/main.yml b/installer/roles/kubernetes/defaults/main.yml index d300bd7829..5a15a8929a 100644 --- a/installer/roles/kubernetes/defaults/main.yml +++ b/installer/roles/kubernetes/defaults/main.yml @@ -27,12 +27,6 @@ kubernetes_redis_image: "redis" kubernetes_redis_image_tag: "latest" kubernetes_redis_config_mount_path: "/usr/local/etc/redis/redis.conf" -memcached_mem_request: 1 -memcached_cpu_request: 500 - -kubernetes_memcached_version: "latest" -kubernetes_memcached_image: "memcached" - openshift_pg_emptydir: false openshift_pg_pvc_name: postgresql diff --git a/installer/roles/kubernetes/templates/deployment.yml.j2 b/installer/roles/kubernetes/templates/deployment.yml.j2 index c915f5c77f..22ce12153a 100644 --- a/installer/roles/kubernetes/templates/deployment.yml.j2 +++ b/installer/roles/kubernetes/templates/deployment.yml.j2 @@ -161,9 +161,6 @@ spec: - name: {{ kubernetes_deployment_name }}-redis-socket mountPath: "/var/run/redis" - - name: {{ kubernetes_deployment_name }}-memcached-socket - mountPath: "/var/run/memcached" - resources: requests: memory: "{{ web_mem_request }}Gi" @@ -236,9 +233,6 @@ spec: - name: {{ kubernetes_deployment_name }}-redis-socket mountPath: "/var/run/redis" - - - name: {{ kubernetes_deployment_name }}-memcached-socket - mountPath: "/var/run/memcached" env: - name: SUPERVISOR_WEB_CONFIG_PATH value: "/etc/supervisord.conf" @@ -293,31 +287,6 @@ spec: {% endif %} {% if redis_cpu_limit is defined %} cpu: "{{ redis_cpu_limit }}m" -{% endif %} - - name: {{ kubernetes_deployment_name }}-memcached - image: "{{ kubernetes_memcached_image }}:{{ kubernetes_memcached_version }}" - imagePullPolicy: Always - command: - - 'memcached' - - '-s' - - '/var/run/memcached/memcached.sock' - - '-a' - - '0666' - volumeMounts: - - name: {{ kubernetes_deployment_name }}-memcached-socket - mountPath: "/var/run/memcached" - resources: - requests: - memory: "{{ memcached_mem_request }}Gi" - cpu: "{{ memcached_cpu_request }}m" -{% if memcached_mem_limit is defined or memcached_cpu_limit is defined %} - limits: -{% endif %} -{% if memcached_mem_limit is defined %} - memory: "{{ memcached_mem_limit }}Gi" -{% endif %} -{% if memcached_cpu_limit is defined %} - cpu: "{{ memcached_cpu_limit }}m" {% endif %} {% if tolerations is defined %} tolerations: @@ -424,9 +393,6 @@ spec: - name: {{ kubernetes_deployment_name }}-redis-socket emptyDir: {} - - name: {{ kubernetes_deployment_name }}-memcached-socket - emptyDir: {} - --- apiVersion: v1 kind: Service diff --git a/installer/roles/local_docker/defaults/main.yml b/installer/roles/local_docker/defaults/main.yml index 490e1e8fcf..f8e1304702 100644 --- a/installer/roles/local_docker/defaults/main.yml +++ b/installer/roles/local_docker/defaults/main.yml @@ -7,7 +7,4 @@ redis_image: "redis" postgresql_version: "10" postgresql_image: "postgres:{{postgresql_version}}" -memcached_image: "memcached" -memcached_version: "alpine" - compose_start_containers: true diff --git a/installer/roles/local_docker/tasks/compose.yml b/installer/roles/local_docker/tasks/compose.yml index 3212732283..120b81cc1a 100644 --- a/installer/roles/local_docker/tasks/compose.yml +++ b/installer/roles/local_docker/tasks/compose.yml @@ -10,12 +10,6 @@ state: directory mode: 0777 -- name: Create Memcached socket directory - file: - path: "{{ docker_compose_dir }}/memcached_socket" - state: directory - mode: 0777 - - name: Create Docker Compose Configuration template: src: "{{ item }}.j2" diff --git a/installer/roles/local_docker/templates/docker-compose.yml.j2 b/installer/roles/local_docker/templates/docker-compose.yml.j2 index a6cf121593..edfc5e6493 100644 --- a/installer/roles/local_docker/templates/docker-compose.yml.j2 +++ b/installer/roles/local_docker/templates/docker-compose.yml.j2 @@ -7,7 +7,7 @@ services: container_name: awx_web depends_on: - redis - - memcached + - django-redis {% if pg_hostname is not defined %} - postgres {% endif %} @@ -32,7 +32,6 @@ services: - "{{ docker_compose_dir }}/credentials.py:/etc/tower/conf.d/credentials.py" - "{{ docker_compose_dir }}/nginx.conf:/etc/nginx/nginx.conf:ro" - "{{ docker_compose_dir }}/redis_socket:/var/run/redis/:rw" - - "{{ docker_compose_dir }}/memcached_socket:/var/run/memcached/:rw" {% if project_data_dir is defined %} - "{{ project_data_dir +':/var/lib/awx/projects:rw' }}" {% endif %} @@ -76,7 +75,6 @@ services: container_name: awx_task depends_on: - redis - - memcached - web {% if pg_hostname is not defined %} - postgres @@ -93,7 +91,6 @@ services: - "{{ docker_compose_dir }}/environment.sh:/etc/tower/conf.d/environment.sh" - "{{ docker_compose_dir }}/credentials.py:/etc/tower/conf.d/credentials.py" - "{{ docker_compose_dir }}/redis_socket:/var/run/redis/:rw" - - "{{ docker_compose_dir }}/memcached_socket:/var/run/memcached/:rw" {% if project_data_dir is defined %} - "{{ project_data_dir +':/var/lib/awx/projects:rw' }}" {% endif %} @@ -142,19 +139,6 @@ services: volumes: - "{{ docker_compose_dir }}/redis.conf:/usr/local/etc/redis/redis.conf:ro" - "{{ docker_compose_dir }}/redis_socket:/var/run/redis/:rw" - - "{{ docker_compose_dir }}/memcached_socket:/var/run/memcached/:rw" - - memcached: - image: "{{ memcached_image }}:{{ memcached_version }}" - container_name: awx_memcached - command: ["-s", "/var/run/memcached/memcached.sock", "-a", "0666"] - restart: unless-stopped - environment: - http_proxy: {{ http_proxy | default('') }} - https_proxy: {{ https_proxy | default('') }} - no_proxy: {{ no_proxy | default('') }} - volumes: - - "{{ docker_compose_dir }}/memcached_socket:/var/run/memcached/:rw" {% if pg_hostname is not defined %} postgres: diff --git a/requirements/requirements.in b/requirements/requirements.in index c847496844..b03e163e3c 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -17,6 +17,7 @@ django-polymorphic django-pglocks django-qsstats-magic django-radius==1.3.3 # FIX auth does not work with later versions +django-redis django-solo django-split-settings django-taggit @@ -33,7 +34,6 @@ prometheus_client psycopg2 pygerduty pyparsing -python-memcached python-radius python3-saml pyyaml>=5.3.1 # minimum version to pull in new pyyaml for CVE-2017-18342 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 00fbee267d..0d364a4922 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -32,6 +32,7 @@ django-oauth-toolkit==1.1.3 # via -r /awx_devel/requirements/requirements.in django-pglocks==1.0.4 # via -r /awx_devel/requirements/requirements.in django-polymorphic==2.1.2 # via -r /awx_devel/requirements/requirements.in django-qsstats-magic==1.1.0 # via -r /awx_devel/requirements/requirements.in +django-redis==4.5.0 django-radius==1.3.3 # via -r /awx_devel/requirements/requirements.in django-solo==1.1.3 # via -r /awx_devel/requirements/requirements.in django-split-settings==1.0.0 # via -r /awx_devel/requirements/requirements.in @@ -93,7 +94,6 @@ pyrsistent==0.15.7 # via jsonschema python-daemon==2.2.4 # via ansible-runner python-dateutil==2.8.1 # via adal, kubernetes python-ldap==3.2.0 # via django-auth-ldap -python-memcached==1.59 # via -r /awx_devel/requirements/requirements.in python-radius==1.0 # via -r /awx_devel/requirements/requirements.in python-string-utils==1.0.0 # via openshift python3-openid==3.1.0 # via social-auth-core diff --git a/tools/docker-compose-cluster.yml b/tools/docker-compose-cluster.yml index 95f7f5aaa8..9b9bac1fdd 100644 --- a/tools/docker-compose-cluster.yml +++ b/tools/docker-compose-cluster.yml @@ -30,7 +30,6 @@ services: volumes: - "../:/awx_devel" - "./redis/redis_socket_ha_1:/var/run/redis/" - - "./memcached/:/var/run/memcached" - "./docker-compose/supervisor.conf:/etc/supervisord.conf" ports: - "5899-5999:5899-5999" @@ -50,7 +49,6 @@ services: volumes: - "../:/awx_devel" - "./redis/redis_socket_ha_2:/var/run/redis/" - - "./memcached/:/var/run/memcached" - "./docker-compose/supervisor.conf:/etc/supervisord.conf" ports: - "7899-7999:7899-7999" @@ -70,7 +68,6 @@ services: volumes: - "../:/awx_devel" - "./redis/redis_socket_ha_3:/var/run/redis/" - - "./memcached/:/var/run/memcached" - "./docker-compose/supervisor.conf:/etc/supervisord.conf" ports: - "8899-8999:8899-8999" @@ -107,10 +104,4 @@ services: postgres: image: postgres:10 container_name: tools_postgres_1 - memcached: - user: ${CURRENT_UID} - image: memcached:alpine - container_name: tools_memcached_1 - command: ["memcached", "-s", "/var/run/memcached/memcached.sock", "-a", "0666"] - volumes: - - "./memcached/:/var/run/memcached" + diff --git a/tools/docker-compose.yml b/tools/docker-compose.yml index b1e8fb93c7..aadc953f6e 100644 --- a/tools/docker-compose.yml +++ b/tools/docker-compose.yml @@ -23,7 +23,6 @@ services: - "7899-7999:7899-7999" # default port range for sdb-listen links: - postgres - - memcached - redis # - sync # volumes_from: @@ -33,7 +32,7 @@ services: - "../:/awx_devel" - "../awx/projects/:/var/lib/awx/projects/" - "./redis/redis_socket_standalone:/var/run/redis/" - - "./memcached/:/var/run/memcached" + - "./rsyslog/:/var/lib/awx/rsyslog" - "./docker-compose/supervisor.conf:/etc/supervisord.conf" privileged: true tty: true @@ -54,13 +53,6 @@ services: POSTGRES_HOST_AUTH_METHOD: trust volumes: - "awx_db:/var/lib/postgresql/data" - memcached: - user: ${CURRENT_UID} - image: memcached:alpine - container_name: tools_memcached_1 - command: ["memcached", "-s", "/var/run/memcached/memcached.sock", "-a", "0666"] - volumes: - - "./memcached/:/var/run/memcached" redis: image: redis:latest container_name: tools_redis_1 From b09d9cbe41318591d07809561e2b432585c74bc1 Mon Sep 17 00:00:00 2001 From: Rebeccah Hunter Date: Thu, 4 Jun 2020 12:05:03 -0400 Subject: [PATCH 028/494] removed django-redis as a dependency Co-authored-by: Shane McDonald --- installer/roles/local_docker/templates/docker-compose.yml.j2 | 1 - 1 file changed, 1 deletion(-) diff --git a/installer/roles/local_docker/templates/docker-compose.yml.j2 b/installer/roles/local_docker/templates/docker-compose.yml.j2 index edfc5e6493..eaa166a0ab 100644 --- a/installer/roles/local_docker/templates/docker-compose.yml.j2 +++ b/installer/roles/local_docker/templates/docker-compose.yml.j2 @@ -7,7 +7,6 @@ services: container_name: awx_web depends_on: - redis - - django-redis {% if pg_hostname is not defined %} - postgres {% endif %} From 669d4535b1dd11526fabe5806999fbfe859d6b79 Mon Sep 17 00:00:00 2001 From: Rebeccah Date: Thu, 4 Jun 2020 14:38:06 -0400 Subject: [PATCH 029/494] adding isolate db location and ingore for django_redis exceptions --- awx/settings/defaults.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index b737a1d0c4..8df6d4f440 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -439,10 +439,11 @@ CELERYBEAT_SCHEDULE = { } # Django Caching Configuration +DJANGO_REDIS_IGNORE_EXCEPTIONS = True CACHES = { 'default': { 'BACKEND': 'django_redis.cache.RedisCache', - 'LOCATION': 'unix:/var/run/redis/redis.sock' + 'LOCATION': 'unix:/var/run/redis/redis.sock?db=1' }, } From 02cf4585f845fe41b0cf5bfc8450631000bf34e2 Mon Sep 17 00:00:00 2001 From: Rebeccah Date: Thu, 4 Jun 2020 18:15:54 -0400 Subject: [PATCH 030/494] remove memcache license file --- docs/licenses/python-memcached.txt | 556 ----------------------------- 1 file changed, 556 deletions(-) delete mode 100644 docs/licenses/python-memcached.txt diff --git a/docs/licenses/python-memcached.txt b/docs/licenses/python-memcached.txt deleted file mode 100644 index 89b9a159ca..0000000000 --- a/docs/licenses/python-memcached.txt +++ /dev/null @@ -1,556 +0,0 @@ -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF -hereby grants Licensee a nonexclusive, royalty-free, world-wide -license to reproduce, analyze, test, perform and/or display publicly, -prepare derivative works, distribute, and otherwise use Python -alone or in any derivative version, provided, however, that PSF's -License Agreement and PSF's notice of copyright, i.e., "Copyright (c) -2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Python Software Foundation; -All Rights Reserved" are retained in Python alone or in any derivative -version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 -------------------------------------------- - -BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 - -1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an -office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the -Individual or Organization ("Licensee") accessing and otherwise using -this software in source or binary form and its associated -documentation ("the Software"). - -2. Subject to the terms and conditions of this BeOpen Python License -Agreement, BeOpen hereby grants Licensee a non-exclusive, -royalty-free, world-wide license to reproduce, analyze, test, perform -and/or display publicly, prepare derivative works, distribute, and -otherwise use the Software alone or in any derivative version, -provided, however, that the BeOpen Python License is retained in the -Software, alone or in any derivative version prepared by Licensee. - -3. BeOpen is making the Software available to Licensee on an "AS IS" -basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE -SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS -AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY -DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -5. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -6. This License Agreement shall be governed by and interpreted in all -respects by the law of the State of California, excluding conflict of -law provisions. Nothing in this License Agreement shall be deemed to -create any relationship of agency, partnership, or joint venture -between BeOpen and Licensee. This License Agreement does not grant -permission to use BeOpen trademarks or trade names in a trademark -sense to endorse or promote products or services of Licensee, or any -third party. As an exception, the "BeOpen Python" logos available at -http://www.pythonlabs.com/logos.html may be used according to the -permissions granted on that web page. - -7. By copying, installing or otherwise using the software, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 ---------------------------------------- - -1. This LICENSE AGREEMENT is between the Corporation for National -Research Initiatives, having an office at 1895 Preston White Drive, -Reston, VA 20191 ("CNRI"), and the Individual or Organization -("Licensee") accessing and otherwise using Python 1.6.1 software in -source or binary form and its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, CNRI -hereby grants Licensee a nonexclusive, royalty-free, world-wide -license to reproduce, analyze, test, perform and/or display publicly, -prepare derivative works, distribute, and otherwise use Python 1.6.1 -alone or in any derivative version, provided, however, that CNRI's -License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) -1995-2001 Corporation for National Research Initiatives; All Rights -Reserved" are retained in Python 1.6.1 alone or in any derivative -version prepared by Licensee. Alternately, in lieu of CNRI's License -Agreement, Licensee may substitute the following text (omitting the -quotes): "Python 1.6.1 is made available subject to the terms and -conditions in CNRI's License Agreement. This Agreement together with -Python 1.6.1 may be located on the Internet using the following -unique, persistent identifier (known as a handle): 1895.22/1013. This -Agreement may also be obtained from a proxy server on the Internet -using the following URL: http://hdl.handle.net/1895.22/1013". - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python 1.6.1 or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python 1.6.1. - -4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" -basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. This License Agreement shall be governed by the federal -intellectual property law of the United States, including without -limitation the federal copyright law, and, to the extent such -U.S. federal law does not apply, by the law of the Commonwealth of -Virginia, excluding Virginia's conflict of law provisions. -Notwithstanding the foregoing, with regard to derivative works based -on Python 1.6.1 that incorporate non-separable material that was -previously distributed under the GNU General Public License (GPL), the -law of the Commonwealth of Virginia shall govern this License -Agreement only as to issues arising under or with respect to -Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this -License Agreement shall be deemed to create any relationship of -agency, partnership, or joint venture between CNRI and Licensee. This -License Agreement does not grant permission to use CNRI trademarks or -trade name in a trademark sense to endorse or promote products or -services of Licensee, or any third party. - -8. By clicking on the "ACCEPT" button where indicated, or by copying, -installing or otherwise using Python 1.6.1, Licensee agrees to be -bound by the terms and conditions of this License Agreement. - - ACCEPT - - -CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 --------------------------------------------------- - -Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, -The Netherlands. All rights reserved. - -Permission to use, copy, modify, and distribute this software and its -documentation for any purpose and without fee is hereby granted, -provided that the above copyright notice appear in all copies and that -both that copyright notice and this permission notice appear in -supporting documentation, and that the name of Stichting Mathematisch -Centrum or CWI not be used in advertising or publicity pertaining to -distribution of the software without specific, written prior -permission. - -STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO -THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE -FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -This copy of Python includes a copy of bzip2, which is licensed under the following terms: - - -This program, "bzip2", the associated library "libbzip2", and all -documentation, are copyright (C) 1996-2005 Julian R Seward. All -rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. The origin of this software must not be misrepresented; you must - not claim that you wrote the original software. If you use this - software in a product, an acknowledgment in the product - documentation would be appreciated but is not required. - -3. Altered source versions must be plainly marked as such, and must - not be misrepresented as being the original software. - -4. The name of the author may not be used to endorse or promote - products derived from this software without specific prior written - permission. - -THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS -OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE -GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, -WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -Julian Seward, Cambridge, UK. -jseward@acm.org -bzip2/libbzip2 version 1.0.3 of 15 February 2005 - - -This copy of Python includes a copy of db, which is licensed under the following terms: - -/*- - * $Id: LICENSE,v 12.1 2005/06/16 20:20:10 bostic Exp $ - */ - -The following is the license that applies to this copy of the Berkeley DB -software. For a license to use the Berkeley DB software under conditions -other than those described here, or to purchase support for this software, -please contact Sleepycat Software by email at info@sleepycat.com, or on -the Web at http://www.sleepycat.com. - -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= -/* - * Copyright (c) 1990-2005 - * Sleepycat Software. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Redistributions in any form must be accompanied by information on - * how to obtain complete source code for the DB software and any - * accompanying software that uses the DB software. The source code - * must either be included in the distribution or be available for no - * more than the cost of distribution plus a nominal fee, and must be - * freely redistributable under reasonable conditions. For an - * executable file, complete source code means the source code for all - * modules it contains. It does not include source code for modules or - * files that typically accompany the major components of the operating - * system on which the executable file runs. - * - * THIS SOFTWARE IS PROVIDED BY SLEEPYCAT SOFTWARE ``AS IS'' AND ANY EXPRESS - * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR - * NON-INFRINGEMENT, ARE DISCLAIMED. IN NO EVENT SHALL SLEEPYCAT SOFTWARE - * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF - * THE POSSIBILITY OF SUCH DAMAGE. - */ -/* - * Copyright (c) 1990, 1993, 1994, 1995 - * The Regents of the University of California. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the University nor the names of its contributors - * may be used to endorse or promote products derived from this software - * without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS - * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY - * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF - * SUCH DAMAGE. - */ -/* - * Copyright (c) 1995, 1996 - * The President and Fellows of Harvard University. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the University nor the names of its contributors - * may be used to endorse or promote products derived from this software - * without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY HARVARD AND ITS CONTRIBUTORS ``AS IS'' AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL HARVARD OR ITS CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS - * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY - * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF - * SUCH DAMAGE. - */ - -This copy of Python includes a copy of openssl, which is licensed under the following terms: - - - LICENSE ISSUES - ============== - - The OpenSSL toolkit stays under a dual license, i.e. both the conditions of - the OpenSSL License and the original SSLeay license apply to the toolkit. - See below for the actual license texts. Actually both licenses are BSD-style - Open Source licenses. In case of any license issues related to OpenSSL - please contact openssl-core@openssl.org. - - OpenSSL License - --------------- - -/* ==================================================================== - * Copyright (c) 1998-2005 The OpenSSL Project. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided with the - * distribution. - * - * 3. All advertising materials mentioning features or use of this - * software must display the following acknowledgment: - * "This product includes software developed by the OpenSSL Project - * for use in the OpenSSL Toolkit. (http://www.openssl.org/)" - * - * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to - * endorse or promote products derived from this software without - * prior written permission. For written permission, please contact - * openssl-core@openssl.org. - * - * 5. Products derived from this software may not be called "OpenSSL" - * nor may "OpenSSL" appear in their names without prior written - * permission of the OpenSSL Project. - * - * 6. Redistributions of any form whatsoever must retain the following - * acknowledgment: - * "This product includes software developed by the OpenSSL Project - * for use in the OpenSSL Toolkit (http://www.openssl.org/)" - * - * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY - * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR - * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR - * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT - * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, - * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED - * OF THE POSSIBILITY OF SUCH DAMAGE. - * ==================================================================== - * - * This product includes cryptographic software written by Eric Young - * (eay@cryptsoft.com). This product includes software written by Tim - * Hudson (tjh@cryptsoft.com). - * - */ - - Original SSLeay License - ----------------------- - -/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) - * All rights reserved. - * - * This package is an SSL implementation written - * by Eric Young (eay@cryptsoft.com). - * The implementation was written so as to conform with Netscapes SSL. - * - * This library is free for commercial and non-commercial use as long as - * the following conditions are aheared to. The following conditions - * apply to all code found in this distribution, be it the RC4, RSA, - * lhash, DES, etc., code; not just the SSL code. The SSL documentation - * included with this distribution is covered by the same copyright terms - * except that the holder is Tim Hudson (tjh@cryptsoft.com). - * - * Copyright remains Eric Young's, and as such any Copyright notices in - * the code are not to be removed. - * If this package is used in a product, Eric Young should be given attribution - * as the author of the parts of the library used. - * This can be in the form of a textual message at program startup or - * in documentation (online or textual) provided with the package. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * 1. Redistributions of source code must retain the copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. All advertising materials mentioning features or use of this software - * must display the following acknowledgement: - * "This product includes cryptographic software written by - * Eric Young (eay@cryptsoft.com)" - * The word 'cryptographic' can be left out if the rouines from the library - * being used are not cryptographic related :-). - * 4. If you include any Windows specific code (or a derivative thereof) from - * the apps directory (application code) you must include an acknowledgement: - * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" - * - * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS - * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY - * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF - * SUCH DAMAGE. - * - * The licence and distribution terms for any publically available version or - * derivative of this code cannot be changed. i.e. this code cannot simply be - * copied and put under another distribution licence - * [including the GNU Public Licence.] - */ - - -This copy of Python includes a copy of tcl, which is licensed under the following terms: - -This software is copyrighted by the Regents of the University of -California, Sun Microsystems, Inc., Scriptics Corporation, ActiveState -Corporation and other parties. The following terms apply to all files -associated with the software unless explicitly disclaimed in -individual files. - -The authors hereby grant permission to use, copy, modify, distribute, -and license this software and its documentation for any purpose, provided -that existing copyright notices are retained in all copies and that this -notice is included verbatim in any distributions. No written agreement, -license, or royalty fee is required for any of the authorized uses. -Modifications to this software may be copyrighted by their authors -and need not follow the licensing terms described here, provided that -the new terms are clearly indicated on the first page of each file where -they apply. - -IN NO EVENT SHALL THE AUTHORS OR DISTRIBUTORS BE LIABLE TO ANY PARTY -FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES -ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY -DERIVATIVES THEREOF, EVEN IF THE AUTHORS HAVE BEEN ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. - -THE AUTHORS AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES, -INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT. THIS SOFTWARE -IS PROVIDED ON AN "AS IS" BASIS, AND THE AUTHORS AND DISTRIBUTORS HAVE -NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR -MODIFICATIONS. - -GOVERNMENT USE: If you are acquiring this software on behalf of the -U.S. government, the Government shall have only "Restricted Rights" -in the software and related documentation as defined in the Federal -Acquisition Regulations (FARs) in Clause 52.227.19 (c) (2). If you -are acquiring the software on behalf of the Department of Defense, the -software shall be classified as "Commercial Computer Software" and the -Government shall have only "Restricted Rights" as defined in Clause -252.227-7013 (c) (1) of DFARs. Notwithstanding the foregoing, the -authors grant the U.S. Government and others acting in its behalf -permission to use and distribute the software in accordance with the -terms specified in this license. - -This copy of Python includes a copy of tk, which is licensed under the following terms: - -This software is copyrighted by the Regents of the University of -California, Sun Microsystems, Inc., and other parties. The following -terms apply to all files associated with the software unless explicitly -disclaimed in individual files. - -The authors hereby grant permission to use, copy, modify, distribute, -and license this software and its documentation for any purpose, provided -that existing copyright notices are retained in all copies and that this -notice is included verbatim in any distributions. No written agreement, -license, or royalty fee is required for any of the authorized uses. -Modifications to this software may be copyrighted by their authors -and need not follow the licensing terms described here, provided that -the new terms are clearly indicated on the first page of each file where -they apply. - -IN NO EVENT SHALL THE AUTHORS OR DISTRIBUTORS BE LIABLE TO ANY PARTY -FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES -ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY -DERIVATIVES THEREOF, EVEN IF THE AUTHORS HAVE BEEN ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. - -THE AUTHORS AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES, -INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT. THIS SOFTWARE -IS PROVIDED ON AN "AS IS" BASIS, AND THE AUTHORS AND DISTRIBUTORS HAVE -NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR -MODIFICATIONS. - -GOVERNMENT USE: If you are acquiring this software on behalf of the -U.S. government, the Government shall have only "Restricted Rights" -in the software and related documentation as defined in the Federal -Acquisition Regulations (FARs) in Clause 52.227.19 (c) (2). If you -are acquiring the software on behalf of the Department of Defense, the -software shall be classified as "Commercial Computer Software" and the -Government shall have only "Restricted Rights" as defined in Clause -252.227-7013 (c) (1) of DFARs. Notwithstanding the foregoing, the -authors grant the U.S. Government and others acting in its behalf -permission to use and distribute the software in accordance with the -terms specified in this license. From 60800d6740e068a3104a6eb1f99c621b7a2a050c Mon Sep 17 00:00:00 2001 From: Rebeccah Date: Mon, 8 Jun 2020 17:12:59 -0400 Subject: [PATCH 031/494] add license file for django-redis --- docs/licenses/django-redis.txt | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 docs/licenses/django-redis.txt diff --git a/docs/licenses/django-redis.txt b/docs/licenses/django-redis.txt new file mode 100644 index 0000000000..5e1ae723df --- /dev/null +++ b/docs/licenses/django-redis.txt @@ -0,0 +1,26 @@ +Copyright (c) 2011-2016 Andrey Antukh +Copyright (c) 2011 Sean Bleier + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file From e9e410f4f89efe0d99d9520c37a556e69e946001 Mon Sep 17 00:00:00 2001 From: Florian Apolloner Date: Mon, 8 Jun 2020 16:30:52 +0200 Subject: [PATCH 032/494] Send content-type with mattermost notifications, fixes #7264 --- awx/main/notifications/mattermost_backend.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/awx/main/notifications/mattermost_backend.py b/awx/main/notifications/mattermost_backend.py index 7a759d41a3..78a23c72d1 100644 --- a/awx/main/notifications/mattermost_backend.py +++ b/awx/main/notifications/mattermost_backend.py @@ -3,7 +3,6 @@ import logging import requests -import json from django.utils.encoding import smart_text from django.utils.translation import ugettext_lazy as _ @@ -45,7 +44,7 @@ class MattermostBackend(AWXBaseEmailBackend, CustomNotificationBase): payload['text'] = m.subject r = requests.post("{}".format(m.recipients()[0]), - data=json.dumps(payload), verify=(not self.mattermost_no_verify_ssl)) + json=payload, verify=(not self.mattermost_no_verify_ssl)) if r.status_code >= 400: logger.error(smart_text(_("Error sending notification mattermost: {}").format(r.text))) if not self.fail_silently: From 0e5f68ef53ea95c975e872683234948a783cbd7a Mon Sep 17 00:00:00 2001 From: Seth Foster Date: Tue, 9 Jun 2020 11:07:22 -0400 Subject: [PATCH 033/494] Make all_parents_must_converge settable when creating node When targeting, ../workflow_job_templates/id#/workflow_nodes/ endpoint, user could not set all_parents_must_converge to true. 3.7.1 backport for awx issue #7063 --- awx/api/serializers.py | 2 +- awx/main/tests/functional/api/test_workflow_node.py | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index d8152adb34..20239dd38a 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -3612,7 +3612,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer): ujt = self.instance.unified_job_template if ujt is None: ret = {} - for fd in ('workflow_job_template', 'identifier'): + for fd in ('workflow_job_template', 'identifier', 'all_parents_must_converge'): if fd in attrs: ret[fd] = attrs[fd] return ret diff --git a/awx/main/tests/functional/api/test_workflow_node.py b/awx/main/tests/functional/api/test_workflow_node.py index 64c22898df..ec70716f94 100644 --- a/awx/main/tests/functional/api/test_workflow_node.py +++ b/awx/main/tests/functional/api/test_workflow_node.py @@ -71,6 +71,18 @@ def test_node_accepts_prompted_fields(inventory, project, workflow_job_template, user=admin_user, expect=201) +@pytest.mark.django_db +@pytest.mark.parametrize("field_name, field_value", [ + ('all_parents_must_converge', True), + ('all_parents_must_converge', False), +]) +def test_create_node_with_field(field_name, field_value, workflow_job_template, post, admin_user): + url = reverse('api:workflow_job_template_workflow_nodes_list', + kwargs={'pk': workflow_job_template.pk}) + res = post(url, {field_name: field_value}, user=admin_user, expect=201) + assert res.data[field_name] == field_value + + @pytest.mark.django_db class TestApprovalNodes(): def test_approval_node_creation(self, post, approval_node, admin_user): From d7f9e66710c9b1eaa71791f8035474da3c666ab4 Mon Sep 17 00:00:00 2001 From: Rebeccah Date: Mon, 8 Jun 2020 17:54:58 -0400 Subject: [PATCH 034/494] added changelog entry --- CHANGELOG.md | 1 + tools/docker-compose-cluster.yml | 1 - tools/docker-compose.yml | 1 - 3 files changed, 1 insertion(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 64e65baca3..4bd55f4f58 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ This is a list of high-level changes for each release of AWX. A full list of commits can be found at `https://github.com/ansible/awx/releases/tag/`. ## 12.0.0 (TBD) +- Removed memcached as a dependency of AWX (https://github.com/ansible/awx/pull/7240) - Moved to a single container image build instead of separate awx_web and awx_task images. The container image is just `awx` (https://github.com/ansible/awx/pull/7228) - Official AWX container image builds now use a two-stage container build process that notably reduces the size of our published images (https://github.com/ansible/awx/pull/7017) - Removed support for HipChat notifications ([EoL announcement](https://www.atlassian.com/partnerships/slack/faq#faq-98b17ca3-247f-423b-9a78-70a91681eff0)); all previously-created HipChat notification templates will be deleted due to this removal. diff --git a/tools/docker-compose-cluster.yml b/tools/docker-compose-cluster.yml index 9b9bac1fdd..ffd8cff101 100644 --- a/tools/docker-compose-cluster.yml +++ b/tools/docker-compose-cluster.yml @@ -104,4 +104,3 @@ services: postgres: image: postgres:10 container_name: tools_postgres_1 - diff --git a/tools/docker-compose.yml b/tools/docker-compose.yml index aadc953f6e..a7e174aced 100644 --- a/tools/docker-compose.yml +++ b/tools/docker-compose.yml @@ -32,7 +32,6 @@ services: - "../:/awx_devel" - "../awx/projects/:/var/lib/awx/projects/" - "./redis/redis_socket_standalone:/var/run/redis/" - - "./rsyslog/:/var/lib/awx/rsyslog" - "./docker-compose/supervisor.conf:/etc/supervisord.conf" privileged: true tty: true From fa1294922b56d9bf4c80e96166afec3de8bd8dbe Mon Sep 17 00:00:00 2001 From: nixocio Date: Tue, 12 May 2020 16:27:30 -0400 Subject: [PATCH 035/494] Add support Prompt on Launch for Workflow Job Template Add support Prompt on Launch for Workflow Job Template see: https://github.com/ansible/awx/issues/5819 --- .../WorkflowJobTemplateEdit.jsx | 2 +- .../shared/WorkflowJobTemplateForm.jsx | 105 ++++++++++++------ .../shared/WorkflowJobTemplateForm.test.jsx | 31 ++++-- 3 files changed, 95 insertions(+), 43 deletions(-) diff --git a/awx/ui_next/src/screens/Template/WorkflowJobTemplateEdit/WorkflowJobTemplateEdit.jsx b/awx/ui_next/src/screens/Template/WorkflowJobTemplateEdit/WorkflowJobTemplateEdit.jsx index 81478eb9a4..be61a0ee43 100644 --- a/awx/ui_next/src/screens/Template/WorkflowJobTemplateEdit/WorkflowJobTemplateEdit.jsx +++ b/awx/ui_next/src/screens/Template/WorkflowJobTemplateEdit/WorkflowJobTemplateEdit.jsx @@ -19,7 +19,7 @@ function WorkflowJobTemplateEdit({ template }) { webhook_key, ...templatePayload } = values; - templatePayload.inventory = inventory?.id; + templatePayload.inventory = inventory?.id || null; templatePayload.organization = organization?.id; templatePayload.webhook_credential = webhook_credential?.id || null; diff --git a/awx/ui_next/src/screens/Template/shared/WorkflowJobTemplateForm.jsx b/awx/ui_next/src/screens/Template/shared/WorkflowJobTemplateForm.jsx index 50aca155b6..8306b849cf 100644 --- a/awx/ui_next/src/screens/Template/shared/WorkflowJobTemplateForm.jsx +++ b/awx/ui_next/src/screens/Template/shared/WorkflowJobTemplateForm.jsx @@ -1,13 +1,13 @@ import React, { useState } from 'react'; import { t } from '@lingui/macro'; - import PropTypes, { shape } from 'prop-types'; import { withI18n } from '@lingui/react'; import { useField, withFormik } from 'formik'; -import { Form, FormGroup, Checkbox } from '@patternfly/react-core'; +import { Form, FormGroup, Checkbox, TextInput } from '@patternfly/react-core'; import { required } from '../../../util/validators'; +import FieldWithPrompt from '../../../components/FieldWithPrompt'; import FormField, { FieldTooltip, FormSubmitError, @@ -36,19 +36,20 @@ function WorkflowJobTemplateForm({ i18n, submitError, }) { - const [hasContentError, setContentError] = useState(null); - - const [organizationField, organizationMeta, organizationHelpers] = useField( - 'organization' + const [enableWebhooks, setEnableWebhooks] = useState( + Boolean(template.webhook_service) ); + const [hasContentError, setContentError] = useState(null); + const [askInventoryOnLaunchField] = useField('ask_inventory_on_launch'); const [inventoryField, inventoryMeta, inventoryHelpers] = useField( 'inventory' ); const [labelsField, , labelsHelpers] = useField('labels'); - - const [enableWebhooks, setEnableWebhooks] = useState( - Boolean(template.webhook_service) + const [limitField, limitMeta, limitHelpers] = useField('limit'); + const [organizationField, organizationMeta, organizationHelpers] = useField( + 'organization' ); + const [scmField, , scmHelpers] = useField('scm_branch'); if (hasContentError) { return ; @@ -79,39 +80,74 @@ function WorkflowJobTemplateForm({ value={organizationField.value} isValid={!organizationMeta.error} /> - - + + inventoryHelpers.setTouched()} onChange={value => { - inventoryHelpers.setValue(value || null); + inventoryHelpers.setValue(value); + }} + required={askInventoryOnLaunchField.value} + touched={inventoryMeta.touched} + error={inventoryMeta.error} + /> + {(inventoryMeta.touched || askInventoryOnLaunchField.value) && + inventoryMeta.error && ( +
+ {inventoryMeta.error} +
+ )} +
+ + + { + limitHelpers.setValue(value); }} /> -
- - + + + > + { + scmHelpers.setValue(value); + }} + /> + @@ -133,6 +169,7 @@ function WorkflowJobTemplateForm({ id="wfjt-variables" name="extra_vars" label={i18n._(t`Variables`)} + promptId="template-ask-variables-on-launch" tooltip={i18n._( t`Pass extra command line variables to the playbook. This is the -e or --extra-vars command line parameter for ansible-playbook. Provide key/value pairs using either YAML or JSON. Refer to the Ansible Tower documentation for example syntax.` )} diff --git a/awx/ui_next/src/screens/Template/shared/WorkflowJobTemplateForm.test.jsx b/awx/ui_next/src/screens/Template/shared/WorkflowJobTemplateForm.test.jsx index 4cddaec8e7..f572c320c4 100644 --- a/awx/ui_next/src/screens/Template/shared/WorkflowJobTemplateForm.test.jsx +++ b/awx/ui_next/src/screens/Template/shared/WorkflowJobTemplateForm.test.jsx @@ -114,9 +114,9 @@ describe('', () => { 'FormField[name="name"]', 'FormField[name="description"]', 'FormGroup[label="Organization"]', - 'FormGroup[label="Inventory"]', - 'FormField[name="limit"]', - 'FormField[name="scm_branch"]', + 'FieldWithPrompt[label="Inventory"]', + 'FieldWithPrompt[label="Limit"]', + 'FieldWithPrompt[label="Source control branch"]', 'FormGroup[label="Labels"]', 'VariablesField', ]; @@ -137,11 +137,6 @@ describe('', () => { element: 'wfjt-description', value: { value: 'new bar', name: 'description' }, }, - { element: 'wfjt-limit', value: { value: 1234567890, name: 'limit' } }, - { - element: 'wfjt-scm_branch', - value: { value: 'new branch', name: 'scm_branch' }, - }, ]; const changeInputs = async ({ element, value }) => { wrapper.find(`input#${element}`).simulate('change', { @@ -177,6 +172,26 @@ describe('', () => { inputsToChange.map(input => assertChanges(input)); }); + test('test changes in FieldWithPrompt', async () => { + await act(async () => { + wrapper.find('TextInputBase#text-wfjt-scm-branch').prop('onChange')( + 'main' + ); + wrapper.find('TextInputBase#text-wfjt-limit').prop('onChange')( + 1234567890 + ); + }); + + wrapper.update(); + + expect(wrapper.find('input#text-wfjt-scm-branch').prop('value')).toEqual( + 'main' + ); + expect(wrapper.find('input#text-wfjt-limit').prop('value')).toEqual( + 1234567890 + ); + }); + test('webhooks and enable concurrent jobs functions properly', async () => { act(() => { wrapper.find('Checkbox[aria-label="Enable Webhook"]').invoke('onChange')( From 2bbbb04499629ef1be269116a525ed31d229bf60 Mon Sep 17 00:00:00 2001 From: Shane McDonald Date: Tue, 9 Jun 2020 10:19:53 -0400 Subject: [PATCH 036/494] Bump version to 12.0.0 --- CHANGELOG.md | 2 +- VERSION | 2 +- awxkit/VERSION | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4bd55f4f58..5d248c8173 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,7 @@ This is a list of high-level changes for each release of AWX. A full list of commits can be found at `https://github.com/ansible/awx/releases/tag/`. -## 12.0.0 (TBD) +## 12.0.0 (Jun 9, 2020) - Removed memcached as a dependency of AWX (https://github.com/ansible/awx/pull/7240) - Moved to a single container image build instead of separate awx_web and awx_task images. The container image is just `awx` (https://github.com/ansible/awx/pull/7228) - Official AWX container image builds now use a two-stage container build process that notably reduces the size of our published images (https://github.com/ansible/awx/pull/7017) diff --git a/VERSION b/VERSION index b85c6c7b03..4044f90867 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -11.2.0 +12.0.0 diff --git a/awxkit/VERSION b/awxkit/VERSION index b85c6c7b03..4044f90867 100644 --- a/awxkit/VERSION +++ b/awxkit/VERSION @@ -1 +1 @@ -11.2.0 +12.0.0 From 0ad78874ce36234a964b7971770811229936652e Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Tue, 9 Jun 2020 12:29:33 -0400 Subject: [PATCH 037/494] remove TCP ports for redis (it only listens on a unix domain socket) --- CONTRIBUTING.md | 2 +- tools/docker-compose-cluster.yml | 6 ------ tools/docker-compose.yml | 2 -- 3 files changed, 1 insertion(+), 9 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index dc15b0f0f1..42516b9a15 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -157,7 +157,7 @@ If you start a second terminal session, you can take a look at the running conta $ docker ps CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 44251b476f98 gcr.io/ansible-tower-engineering/awx_devel:devel "/entrypoint.sh /bin…" 27 seconds ago Up 23 seconds 0.0.0.0:6899->6899/tcp, 0.0.0.0:7899-7999->7899-7999/tcp, 0.0.0.0:8013->8013/tcp, 0.0.0.0:8043->8043/tcp, 0.0.0.0:8080->8080/tcp, 22/tcp, 0.0.0.0:8888->8888/tcp tools_awx_run_9e820694d57e -40de380e3c2e redis:latest "docker-entrypoint.s…" 28 seconds ago Up 26 seconds 0.0.0.0:6379->6379/tcp tools_redis_1 +40de380e3c2e redis:latest "docker-entrypoint.s…" 28 seconds ago Up 26 seconds b66a506d3007 postgres:10 "docker-entrypoint.s…" 28 seconds ago Up 26 seconds 0.0.0.0:5432->5432/tcp tools_postgres_1 ``` **NOTE** diff --git a/tools/docker-compose-cluster.yml b/tools/docker-compose-cluster.yml index ffd8cff101..7aec34d8e4 100644 --- a/tools/docker-compose-cluster.yml +++ b/tools/docker-compose-cluster.yml @@ -79,8 +79,6 @@ services: volumes: - "./redis/redis.conf:/usr/local/etc/redis/redis.conf" - "./redis/redis_socket_ha_1:/var/run/redis/" - ports: - - "63791:63791" redis_2: user: ${CURRENT_UID} image: redis:latest @@ -89,8 +87,6 @@ services: volumes: - "./redis/redis.conf:/usr/local/etc/redis/redis.conf" - "./redis/redis_socket_ha_2:/var/run/redis/" - ports: - - "63792:63792" redis_3: user: ${CURRENT_UID} image: redis:latest @@ -99,8 +95,6 @@ services: volumes: - "./redis/redis.conf:/usr/local/etc/redis/redis.conf" - "./redis/redis_socket_ha_3:/var/run/redis/" - ports: - - "63793:63793" postgres: image: postgres:10 container_name: tools_postgres_1 diff --git a/tools/docker-compose.yml b/tools/docker-compose.yml index a7e174aced..9c5808bd41 100644 --- a/tools/docker-compose.yml +++ b/tools/docker-compose.yml @@ -55,8 +55,6 @@ services: redis: image: redis:latest container_name: tools_redis_1 - ports: - - "6379:6379" user: ${CURRENT_UID} volumes: - "./redis/redis.conf:/usr/local/etc/redis/redis.conf" From 123346241965f9e496a7de70f893ff38968b29cf Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Tue, 9 Jun 2020 12:39:10 -0400 Subject: [PATCH 038/494] add some new changelog entries for 12.0.0 --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5d248c8173..d0d4a1b10b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,9 @@ This is a list of high-level changes for each release of AWX. A full list of com - Fixed a bug that caused CyberArk AIM credential plugin looks to hang forever in some environments (https://github.com/ansible/awx/issues/6986) - Fixed a bug that caused ANY/ALL converage settings not to properly save when editing approval nodes in the UI (https://github.com/ansible/awx/issues/6998) - Fixed a bug that broke support for the satellite6_group_prefix source variable (https://github.com/ansible/awx/issues/7031) +- Fixed a bug that prevented changes to workflow node convergence settings when approval nodes were in use (https://github.com/ansible/awx/issues/7063) +- Fixed a bug that caused notifications to fail on newer version of Mattermost (https://github.com/ansible/awx/issues/7264) +- Fixed a bug (by upgrading to 0.8.1 of the foreman collection) that prevented host_filters from working properly with Foreman-based inventory (https://github.com/ansible/awx/issues/7225) - Fixed a bug that prevented the usage of the Conjur credential plugin with secrets that contain spaces (https://github.com/ansible/awx/issues/7191) - Fixed a bug in awx-manage run_wsbroadcast --status in kubernetes (https://github.com/ansible/awx/pull/7009) - Fixed a bug that broke notification toggles for system jobs in the UI (https://github.com/ansible/awx/pull/7042) From c1f6fec53216dcf1837ae89f00c6d6746e513790 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 22 Jan 2020 16:09:53 -0500 Subject: [PATCH 039/494] Add skeleton of a custom 'export' resource This only takes a flag for users at the moment, and does nothing in particular with it. --- awxkit/awxkit/cli/resource.py | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index c70aa5050d..11a5c5e6b8 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -123,6 +123,41 @@ class Config(CustomCommand): } +class Export(CustomCommand): + name = 'export' + help_text = 'export resources from Tower as yaml' + + def extend_parser(self, parser): + resources = parser.add_argument_group('resources') + resources.add_argument('--users', nargs='?', const='') + + def handle(self, client, parser): + self.extend_parser(parser) + + if client.help: + parser.print_help() + raise SystemExit() + + parsed = parser.parse_known_args()[0] + + data = {} + for resource in ('users',): + value = getattr(parsed, resource, None) + if value is None: + print("Pulling no users.") + continue + if value: + print("Pulling users: {}".format(value)) + pass + else: + print("Pulling all users.") + pass + + data[resource] = {} + + return data + + def parse_resource(client, skip_deprecated=False): subparsers = client.parser.add_subparsers( dest='resource', From 4312395a3afdab0604e071e5052db1a93a3f6276 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 27 Jan 2020 16:16:34 -0500 Subject: [PATCH 040/494] Normalize the requested resource into a pk using the pk_or_name helper function. Also, authenticate to the API. --- awxkit/awxkit/cli/resource.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 11a5c5e6b8..5baecc7ec1 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -131,6 +131,14 @@ class Export(CustomCommand): resources = parser.add_argument_group('resources') resources.add_argument('--users', nargs='?', const='') + def get_resources(self, client, resource, value): + if value: + from .options import pk_or_name + + print("Pulling {}: {}".format(resource, pk_or_name(client.v2, resource, value))) + else: + print("Pulling all {}.".format(resource)) + def handle(self, client, parser): self.extend_parser(parser) @@ -138,20 +146,16 @@ class Export(CustomCommand): parser.print_help() raise SystemExit() + client.authenticate() parsed = parser.parse_known_args()[0] data = {} for resource in ('users',): value = getattr(parsed, resource, None) if value is None: - print("Pulling no users.") + print("Pulling no {}.".format(resource)) continue - if value: - print("Pulling users: {}".format(value)) - pass - else: - print("Pulling all users.") - pass + self.get_resources(client, resource, value) data[resource] = {} From f21d6b1fc4a947807b2ff5d5001e237be415a778 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Tue, 28 Jan 2020 11:07:16 -0500 Subject: [PATCH 041/494] Actually query the API for the user or users requested --- awxkit/awxkit/cli/resource.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 5baecc7ec1..5629742511 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -132,12 +132,14 @@ class Export(CustomCommand): resources.add_argument('--users', nargs='?', const='') def get_resources(self, client, resource, value): + api_resource = getattr(client.v2, resource) if value: from .options import pk_or_name - print("Pulling {}: {}".format(resource, pk_or_name(client.v2, resource, value))) + pk = pk_or_name(client.v2, resource, value) + return api_resource.get(id=pk).json['results'] else: - print("Pulling all {}.".format(resource)) + return api_resource.get(all_pages=True).json['results'] def handle(self, client, parser): self.extend_parser(parser) @@ -153,11 +155,10 @@ class Export(CustomCommand): for resource in ('users',): value = getattr(parsed, resource, None) if value is None: - print("Pulling no {}.".format(resource)) continue - self.get_resources(client, resource, value) + resources = self.get_resources(client, resource, value) or [] - data[resource] = {} + data[resource] = resources return data From c1a07ff00b08ad35674e979b3f6f675e1a8f34bf Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 29 Jan 2020 13:21:13 -0500 Subject: [PATCH 042/494] Limit export output to only those fields needed to create the resource --- awxkit/awxkit/cli/resource.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 5629742511..c899126f96 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -133,13 +133,16 @@ class Export(CustomCommand): def get_resources(self, client, resource, value): api_resource = getattr(client.v2, resource) + post_fields = api_resource.options().json['actions']['POST'] if value: from .options import pk_or_name pk = pk_or_name(client.v2, resource, value) - return api_resource.get(id=pk).json['results'] + results = api_resource.get(id=pk).json['results'] else: - return api_resource.get(all_pages=True).json['results'] + results = api_resource.get(all_pages=True).json['results'] + + return [{key: r[key] for key in post_fields if key in r} for r in results] def handle(self, client, parser): self.extend_parser(parser) @@ -156,7 +159,7 @@ class Export(CustomCommand): value = getattr(parsed, resource, None) if value is None: continue - resources = self.get_resources(client, resource, value) or [] + resources = self.get_resources(client, resource, value) data[resource] = resources From 9f7fecf8da117bf91e819a6a330711ad3ffcf995 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 29 Jan 2020 14:31:38 -0500 Subject: [PATCH 043/494] Add basic import command --- awxkit/awxkit/cli/resource.py | 29 ++++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index c899126f96..47b8c83a39 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -1,4 +1,6 @@ +import json import os +import sys from awxkit import api, config from awxkit.utils import to_str @@ -123,9 +125,34 @@ class Config(CustomCommand): } +class Import(CustomCommand): + name = 'import' + help_text = 'import resources into Tower' + + def create_resource(self, client, resource, asset): + api_resource = getattr(client.v2, resource) + if resource == 'users' and 'password' not in asset: + asset['password'] = 'password' + api_resource.post(asset) + + def handle(self, client, parser): + if client.help: + parser.print_help() + raise SystemExit() + + data = json.load(sys.stdin) + client.authenticate() + + for resource, assets in data.items(): + for asset in assets: + self.create_resource(client, resource, asset) + + return {} + + class Export(CustomCommand): name = 'export' - help_text = 'export resources from Tower as yaml' + help_text = 'export resources from Tower' def extend_parser(self, parser): resources = parser.add_argument_group('resources') From 2127f3c96dda39fa591d09231c9f067df080f5f6 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 29 Jan 2020 14:41:39 -0500 Subject: [PATCH 044/494] Add organizations as a resource type to export --- awxkit/awxkit/cli/resource.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 47b8c83a39..107dccf86c 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -157,6 +157,7 @@ class Export(CustomCommand): def extend_parser(self, parser): resources = parser.add_argument_group('resources') resources.add_argument('--users', nargs='?', const='') + resources.add_argument('--organizations', nargs='?', const='') def get_resources(self, client, resource, value): api_resource = getattr(client.v2, resource) @@ -182,7 +183,7 @@ class Export(CustomCommand): parsed = parser.parse_known_args()[0] data = {} - for resource in ('users',): + for resource in ('users', 'organizations'): value = getattr(parsed, resource, None) if value is None: continue From 9280198b0f00d2d43732e7a2205418bac11bed59 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 30 Jan 2020 16:50:16 -0500 Subject: [PATCH 045/494] Use a list of exportable resources to build up the parser Also, handle an implicit export of everything. --- awxkit/awxkit/cli/resource.py | 34 ++++++++++++++++++++++++++++------ 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 107dccf86c..1a4af456a7 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -44,6 +44,20 @@ DEPRECATED_RESOURCES_REVERSE = dict( (v, k) for k, v in DEPRECATED_RESOURCES.items() ) +EXPORTABLE_RESOURCES = [ + 'users', + 'organizations', + 'teams', + # 'credential_types', + # 'credentials', + # 'notification_templates', + # 'inventory_scripts', + # 'projects', + # 'inventory', + # 'job_templates', + # 'workflow_job_templates', +] + class CustomCommand(metaclass=CustomRegistryMeta): """Base class for implementing custom commands. @@ -156,12 +170,17 @@ class Export(CustomCommand): def extend_parser(self, parser): resources = parser.add_argument_group('resources') - resources.add_argument('--users', nargs='?', const='') - resources.add_argument('--organizations', nargs='?', const='') + + for resource in EXPORTABLE_RESOURCES: + # This parsing pattern will result in 3 different possible outcomes: + # 1) the resource flag is not used at all, which will result in the attr being None + # 2) the resource flag is used with no argument, which will result in the attr being '' + # 3) the resource flag is used with an argument, and the attr will be that argument's value + resources.add_argument('--{}'.format(resource), nargs='?', const='') def get_resources(self, client, resource, value): api_resource = getattr(client.v2, resource) - post_fields = api_resource.options().json['actions']['POST'] + post_options = api_resource.options().json['actions']['POST'] if value: from .options import pk_or_name @@ -170,7 +189,7 @@ class Export(CustomCommand): else: results = api_resource.get(all_pages=True).json['results'] - return [{key: r[key] for key in post_fields if key in r} for r in results] + return [{key: r[key] for key in post_options if key in r} for r in results] def handle(self, client, parser): self.extend_parser(parser) @@ -182,10 +201,13 @@ class Export(CustomCommand): client.authenticate() parsed = parser.parse_known_args()[0] + # If no resource flags are explicitly used, export everything. + all_resources = all(getattr(parsed, resource, None) is None for resource in EXPORTABLE_RESOURCES) + data = {} - for resource in ('users', 'organizations'): + for resource in EXPORTABLE_RESOURCES: value = getattr(parsed, resource, None) - if value is None: + if value is None and not all_resources: continue resources = self.get_resources(client, resource, value) From 37cbf7691b8d67fb6931088e8a7b5c9b3e195fba Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 3 Feb 2020 11:06:08 -0500 Subject: [PATCH 046/494] Split out separate methods for the OPTIONS call and massaging each asset dict --- awxkit/awxkit/cli/resource.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 1a4af456a7..45415e2378 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -178,18 +178,25 @@ class Export(CustomCommand): # 3) the resource flag is used with an argument, and the attr will be that argument's value resources.add_argument('--{}'.format(resource), nargs='?', const='') - def get_resources(self, client, resource, value): - api_resource = getattr(client.v2, resource) - post_options = api_resource.options().json['actions']['POST'] + def get_resource_options(self, endpoint): + return endpoint.options().json['actions']['POST'] + + def get_assets(self, endpoint, value): if value: from .options import pk_or_name pk = pk_or_name(client.v2, resource, value) - results = api_resource.get(id=pk).json['results'] + results = endpoint.get(id=pk).json['results'] else: - results = api_resource.get(all_pages=True).json['results'] + results = endpoint.get(all_pages=True).json['results'] - return [{key: r[key] for key in post_options if key in r} for r in results] + return results + + def enhance_asset(self, endpoint, asset, options): + fields = {key: asset[key] for key in options if key in asset} + fk_fields = {} + related_fields = {} + return dict(**fields, **fk_fields, **related_fields) def handle(self, client, parser): self.extend_parser(parser) @@ -209,9 +216,11 @@ class Export(CustomCommand): value = getattr(parsed, resource, None) if value is None and not all_resources: continue - resources = self.get_resources(client, resource, value) + endpoint = getattr(client.v2, resource) + options = self.get_resource_options(endpoint) + assets = self.get_assets(endpoint, value) - data[resource] = resources + data[resource] = [self.enhance_asset(endpoint, asset, options) for asset in assets] return data From dfe34563aa1b092e81aafb946d22565f4d061652 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Tue, 11 Feb 2020 14:43:30 -0500 Subject: [PATCH 047/494] Resolve the natural keys of assets we've already collected --- awxkit/awxkit/cli/resource.py | 60 ++++++++++++++++++++++++++++++++--- 1 file changed, 56 insertions(+), 4 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 45415e2378..8d08ffdf3f 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -51,13 +51,28 @@ EXPORTABLE_RESOURCES = [ # 'credential_types', # 'credentials', # 'notification_templates', - # 'inventory_scripts', # 'projects', # 'inventory', # 'job_templates', # 'workflow_job_templates', ] +NATURAL_KEYS = { + 'user': ('username',), + 'organization': ('name',), + 'team': ('organization', 'name'), + 'credential_type': ('name', 'kind'), + 'credential': ('organization', 'name', 'credential_type'), + 'notification_template': ('organization', 'name'), + 'project': ('organization', 'name'), + 'inventory': ('organization', 'name'), + 'job_template': ('name',), + 'workflow_job_template': ('organization', 'name'), + + # related resources + 'role': ('name',), # FIXME: we also need the content_object, itself as a natural key representation +} + class CustomCommand(metaclass=CustomRegistryMeta): """Base class for implementing custom commands. @@ -168,6 +183,10 @@ class Export(CustomCommand): name = 'export' help_text = 'export resources from Tower' + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._natural_keys = {} + def extend_parser(self, parser): resources = parser.add_argument_group('resources') @@ -181,6 +200,16 @@ class Export(CustomCommand): def get_resource_options(self, endpoint): return endpoint.options().json['actions']['POST'] + def register_natural_key(self, asset): + natural_key = {'type': asset['type']} + lookup = NATURAL_KEYS.get(asset['type']) + if callable(lookup): + natural_key.update(lookup(asset)) + else: + natural_key.update((key, asset[key]) for key in lookup or ()) + + self._natural_keys[asset['url']] = natural_key + def get_assets(self, endpoint, value): if value: from .options import pk_or_name @@ -190,12 +219,35 @@ class Export(CustomCommand): else: results = endpoint.get(all_pages=True).json['results'] + for asset in results: + self.register_natural_key(asset) + return results + def get_natural_key(self, asset_url): + if asset_url not in self._natural_keys: + # FIXME: + # get the asset by following the url + # prune down the data using NATURAL_KEYS + # register the natural key dict + return {} + + return self._natural_keys[asset_url] + def enhance_asset(self, endpoint, asset, options): - fields = {key: asset[key] for key in options if key in asset} - fk_fields = {} - related_fields = {} + fields = { + key: asset[key] for key in options + if key in asset and options[key]['type'] != 'id' + } + + fk_fields = { + key: self.get_natural_key(asset['related'][key]) for key in options + if key in asset and options[key]['type'] == 'id' + } + + related = {} + + related_fields = {'related': related} if related else {} return dict(**fields, **fk_fields, **related_fields) def handle(self, client, parser): From 6162ff043931b3da75dd43d3cadcb85977e648e6 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Tue, 11 Feb 2020 15:16:41 -0500 Subject: [PATCH 048/494] Start to record the role membership but not really, since it still isn't fully capturing the role content_object --- awxkit/awxkit/cli/resource.py | 38 +++++++++++++++++++++++------------ 1 file changed, 25 insertions(+), 13 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 8d08ffdf3f..6af5513557 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -4,7 +4,7 @@ import sys from awxkit import api, config from awxkit.utils import to_str -from awxkit.api.pages import Page +from awxkit.api.pages import Page, TentativePage from awxkit.cli.format import FORMATTERS, format_response, add_authentication_arguments from awxkit.cli.utils import CustomRegistryMeta, cprint @@ -201,15 +201,30 @@ class Export(CustomCommand): return endpoint.options().json['actions']['POST'] def register_natural_key(self, asset): + if asset['url'] in self._natural_keys: + return + natural_key = {'type': asset['type']} lookup = NATURAL_KEYS.get(asset['type']) if callable(lookup): natural_key.update(lookup(asset)) else: - natural_key.update((key, asset[key]) for key in lookup or ()) + natural_key.update((key, asset.get(key)) for key in lookup or ()) self._natural_keys[asset['url']] = natural_key + def get_natural_key(self, url=None, asset=None): + if url is None: + url = asset['url'] + if url not in self._natural_keys: + if asset is None: + # get the asset by following the url + raise Exception("Oops!") + + self.register_natural_key(asset) + + return self._natural_keys[url] + def get_assets(self, endpoint, value): if value: from .options import pk_or_name @@ -224,16 +239,6 @@ class Export(CustomCommand): return results - def get_natural_key(self, asset_url): - if asset_url not in self._natural_keys: - # FIXME: - # get the asset by following the url - # prune down the data using NATURAL_KEYS - # register the natural key dict - return {} - - return self._natural_keys[asset_url] - def enhance_asset(self, endpoint, asset, options): fields = { key: asset[key] for key in options @@ -241,11 +246,18 @@ class Export(CustomCommand): } fk_fields = { - key: self.get_natural_key(asset['related'][key]) for key in options + key: self.get_natural_key(url=asset['related'][key]) for key in options if key in asset and options[key]['type'] == 'id' } related = {} + for k, v in asset['related'].items(): + if k != 'roles': + continue + related_endpoint = TentativePage(v, endpoint.connection) + data = related_endpoint.get(all_pages=True).json + if 'results' in data: + related[k] = [self.get_natural_key(asset=x) for x in data['results']] related_fields = {'related': related} if related else {} return dict(**fields, **fk_fields, **related_fields) From fb066eb52e8bc4e5c446d5fbbee81754be8ddf58 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 17 Feb 2020 11:07:52 -0500 Subject: [PATCH 049/494] Fold the other methods doing API calls into get_assets() --- awxkit/awxkit/cli/resource.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 6af5513557..a4cc2e4af1 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -225,11 +225,12 @@ class Export(CustomCommand): return self._natural_keys[url] - def get_assets(self, endpoint, value): + def get_assets(self, resource, value): + endpoint = getattr(self.v2, resource) if value: from .options import pk_or_name - pk = pk_or_name(client.v2, resource, value) + pk = pk_or_name(self.v2, resource, value) results = endpoint.get(id=pk).json['results'] else: results = endpoint.get(all_pages=True).json['results'] @@ -237,7 +238,8 @@ class Export(CustomCommand): for asset in results: self.register_natural_key(asset) - return results + options = self.get_resource_options(endpoint) + return [self.enhance_asset(endpoint, asset, options) for asset in results] def enhance_asset(self, endpoint, asset, options): fields = { @@ -275,16 +277,13 @@ class Export(CustomCommand): # If no resource flags are explicitly used, export everything. all_resources = all(getattr(parsed, resource, None) is None for resource in EXPORTABLE_RESOURCES) + self.v2 = client.v2 + data = {} for resource in EXPORTABLE_RESOURCES: value = getattr(parsed, resource, None) - if value is None and not all_resources: - continue - endpoint = getattr(client.v2, resource) - options = self.get_resource_options(endpoint) - assets = self.get_assets(endpoint, value) - - data[resource] = [self.enhance_asset(endpoint, asset, options) for asset in assets] + if all_resources or value is not None: + data[resource] = self.get_assets(resource, value) return data From e4383c505f2cab71fdff3fd8cf9509ade0240d93 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 17 Feb 2020 14:37:11 -0500 Subject: [PATCH 050/494] Make more use of the functionality of Page in get_assets and related methods. Also, rename get_resource_options -> get_options and enhance_asset -> serialize_asset. --- awxkit/awxkit/cli/resource.py | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index a4cc2e4af1..98d6cd8708 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -197,7 +197,7 @@ class Export(CustomCommand): # 3) the resource flag is used with an argument, and the attr will be that argument's value resources.add_argument('--{}'.format(resource), nargs='?', const='') - def get_resource_options(self, endpoint): + def get_options(self, endpoint): return endpoint.options().json['actions']['POST'] def register_natural_key(self, asset): @@ -231,32 +231,31 @@ class Export(CustomCommand): from .options import pk_or_name pk = pk_or_name(self.v2, resource, value) - results = endpoint.get(id=pk).json['results'] + results = endpoint.get(id=pk).results else: - results = endpoint.get(all_pages=True).json['results'] + results = endpoint.get(all_pages=True).results for asset in results: - self.register_natural_key(asset) + self.register_natural_key(asset.json) - options = self.get_resource_options(endpoint) - return [self.enhance_asset(endpoint, asset, options) for asset in results] + options = self.get_options(endpoint) + return [self.serialize_asset(asset, options) for asset in results] - def enhance_asset(self, endpoint, asset, options): + def serialize_asset(self, asset, options): fields = { - key: asset[key] for key in options - if key in asset and options[key]['type'] != 'id' + key: asset.json[key] for key in options + if key in asset.json and options[key]['type'] != 'id' } fk_fields = { - key: self.get_natural_key(url=asset['related'][key]) for key in options - if key in asset and options[key]['type'] == 'id' + key: self.get_natural_key(url=asset.related[key].endpoint) for key in options + if key in asset.json and options[key]['type'] == 'id' } related = {} - for k, v in asset['related'].items(): + for k, related_endpoint in asset.related.items(): if k != 'roles': continue - related_endpoint = TentativePage(v, endpoint.connection) data = related_endpoint.get(all_pages=True).json if 'results' in data: related[k] = [self.get_natural_key(asset=x) for x in data['results']] From 0877e5305c60301a13a843742cc3f0253eb2a642 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 17 Feb 2020 15:16:21 -0500 Subject: [PATCH 051/494] Modify get_natural_key to work on Pages --- awxkit/awxkit/cli/resource.py | 34 ++++++++-------------------------- 1 file changed, 8 insertions(+), 26 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 98d6cd8708..f4f3a837a6 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -200,30 +200,15 @@ class Export(CustomCommand): def get_options(self, endpoint): return endpoint.options().json['actions']['POST'] - def register_natural_key(self, asset): - if asset['url'] in self._natural_keys: - return - - natural_key = {'type': asset['type']} - lookup = NATURAL_KEYS.get(asset['type']) + def get_natural_key(self, page): + natural_key = {'type': page.type} + lookup = NATURAL_KEYS.get(page.type) if callable(lookup): - natural_key.update(lookup(asset)) + natural_key.update(lookup(page)) else: - natural_key.update((key, asset.get(key)) for key in lookup or ()) + natural_key.update((key, page[key]) for key in lookup or ()) - self._natural_keys[asset['url']] = natural_key - - def get_natural_key(self, url=None, asset=None): - if url is None: - url = asset['url'] - if url not in self._natural_keys: - if asset is None: - # get the asset by following the url - raise Exception("Oops!") - - self.register_natural_key(asset) - - return self._natural_keys[url] + return natural_key def get_assets(self, resource, value): endpoint = getattr(self.v2, resource) @@ -235,9 +220,6 @@ class Export(CustomCommand): else: results = endpoint.get(all_pages=True).results - for asset in results: - self.register_natural_key(asset.json) - options = self.get_options(endpoint) return [self.serialize_asset(asset, options) for asset in results] @@ -248,7 +230,7 @@ class Export(CustomCommand): } fk_fields = { - key: self.get_natural_key(url=asset.related[key].endpoint) for key in options + key: self.get_natural_key(asset.related[key].get()) for key in options if key in asset.json and options[key]['type'] == 'id' } @@ -258,7 +240,7 @@ class Export(CustomCommand): continue data = related_endpoint.get(all_pages=True).json if 'results' in data: - related[k] = [self.get_natural_key(asset=x) for x in data['results']] + related[k] = [self.get_natural_key(x) for x in data.results] related_fields = {'related': related} if related else {} return dict(**fields, **fk_fields, **related_fields) From 6f28361bf5d63148ec471bc8d2321cd0da6f3e7c Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 17 Feb 2020 15:37:12 -0500 Subject: [PATCH 052/494] Make serialize_asset use Pages to a greater extent --- awxkit/awxkit/cli/resource.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index f4f3a837a6..5499bd19c6 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -225,20 +225,20 @@ class Export(CustomCommand): def serialize_asset(self, asset, options): fields = { - key: asset.json[key] for key in options - if key in asset.json and options[key]['type'] != 'id' + key: asset[key] for key in options + if key in asset.json and key not in asset.related } fk_fields = { key: self.get_natural_key(asset.related[key].get()) for key in options - if key in asset.json and options[key]['type'] == 'id' + if key in asset.related } related = {} for k, related_endpoint in asset.related.items(): if k != 'roles': continue - data = related_endpoint.get(all_pages=True).json + data = related_endpoint.get(all_pages=True) if 'results' in data: related[k] = [self.get_natural_key(x) for x in data.results] From 372570ce8e9476b09d4bec866a581ed780072070 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 17 Feb 2020 16:04:51 -0500 Subject: [PATCH 053/494] Extract out get_natural_key as a standalone function --- awxkit/awxkit/cli/resource.py | 41 ++++++++++++++++++++--------------- 1 file changed, 24 insertions(+), 17 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 5499bd19c6..80a06c71aa 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -70,10 +70,31 @@ NATURAL_KEYS = { 'workflow_job_template': ('organization', 'name'), # related resources - 'role': ('name',), # FIXME: we also need the content_object, itself as a natural key representation + 'role': ('name', ':content_object'), } +def get_natural_key(page): + natural_key = {'type': page.type} + lookup = NATURAL_KEYS.get(page.type, ()) + + for key in lookup or (): + if key.startswith(':'): + # treat it like a special-case related object + related_objs = [ + related for name, related in page.related.items() + if name not in ('users', 'teams') + ] + if related_objs: + natural_key[key[1:]] = get_natural_key(related_objs[0].get()) + elif key in page.related: + natural_key[key] = get_natural_key(page.related[key].get()) + else: + natural_key[key] = page[key] + + return natural_key + + class CustomCommand(metaclass=CustomRegistryMeta): """Base class for implementing custom commands. @@ -183,10 +204,6 @@ class Export(CustomCommand): name = 'export' help_text = 'export resources from Tower' - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._natural_keys = {} - def extend_parser(self, parser): resources = parser.add_argument_group('resources') @@ -200,16 +217,6 @@ class Export(CustomCommand): def get_options(self, endpoint): return endpoint.options().json['actions']['POST'] - def get_natural_key(self, page): - natural_key = {'type': page.type} - lookup = NATURAL_KEYS.get(page.type) - if callable(lookup): - natural_key.update(lookup(page)) - else: - natural_key.update((key, page[key]) for key in lookup or ()) - - return natural_key - def get_assets(self, resource, value): endpoint = getattr(self.v2, resource) if value: @@ -230,7 +237,7 @@ class Export(CustomCommand): } fk_fields = { - key: self.get_natural_key(asset.related[key].get()) for key in options + key: get_natural_key(asset.related[key].get()) for key in options if key in asset.related } @@ -240,7 +247,7 @@ class Export(CustomCommand): continue data = related_endpoint.get(all_pages=True) if 'results' in data: - related[k] = [self.get_natural_key(x) for x in data.results] + related[k] = [get_natural_key(x) for x in data.results] related_fields = {'related': related} if related else {} return dict(**fields, **fk_fields, **related_fields) From f53920d3bf238ea721a5a1c2c129fca8fb6c9972 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 19 Feb 2020 14:11:27 -0500 Subject: [PATCH 054/494] Begin changing Import to work with the structure created by Export --- awxkit/awxkit/cli/resource.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 80a06c71aa..81eb58dac2 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -179,11 +179,12 @@ class Import(CustomCommand): name = 'import' help_text = 'import resources into Tower' - def create_resource(self, client, resource, asset): - api_resource = getattr(client.v2, resource) - if resource == 'users' and 'password' not in asset: - asset['password'] = 'password' - api_resource.post(asset) + def create_assets(self, resource, assets): + endpoint = getattr(self.v2, resource) + for asset in assets: + if resource == 'users' and 'password' not in asset: + asset['password'] = 'password' + endpoint.post({k: v for k, v in asset.items() if k != 'related'}) def handle(self, client, parser): if client.help: @@ -192,10 +193,10 @@ class Import(CustomCommand): data = json.load(sys.stdin) client.authenticate() + self.v2 = client.v2 - for resource, assets in data.items(): - for asset in assets: - self.create_resource(client, resource, asset) + for resource, assets in data.items(): # FIXME: do a topological sort by dependencies + self.create_assets(resource, assets) return {} From 21e36ad19a0c15ba7c7b7e198f12ae1a0d932885 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 20 Feb 2020 13:46:03 -0500 Subject: [PATCH 055/494] Add the calculated natural key to the export data for each asset --- awxkit/awxkit/cli/resource.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 81eb58dac2..042a8e3572 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -4,7 +4,7 @@ import sys from awxkit import api, config from awxkit.utils import to_str -from awxkit.api.pages import Page, TentativePage +from awxkit.api.pages import Page from awxkit.cli.format import FORMATTERS, format_response, add_authentication_arguments from awxkit.cli.utils import CustomRegistryMeta, cprint @@ -75,8 +75,8 @@ NATURAL_KEYS = { def get_natural_key(page): - natural_key = {'type': page.type} - lookup = NATURAL_KEYS.get(page.type, ()) + natural_key = {'type': page['type']} + lookup = NATURAL_KEYS.get(page['type'], ()) for key in lookup or (): if key.startswith(':'): @@ -236,6 +236,7 @@ class Export(CustomCommand): key: asset[key] for key in options if key in asset.json and key not in asset.related } + fields['natural_key'] = get_natural_key(asset) fk_fields = { key: get_natural_key(asset.related[key].get()) for key in options From 95b22bf05b08dda92ffbfa27bc77f723c73c995d Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 24 Feb 2020 14:34:55 -0500 Subject: [PATCH 056/494] Begin to deal with existing matching assets by using a registry allowing the lookup of Page objects by (frozen) natural keys. --- awxkit/awxkit/cli/resource.py | 47 +++++++++++++++++++++++++++++++++-- 1 file changed, 45 insertions(+), 2 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 042a8e3572..7f3e95e9ee 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -175,16 +175,51 @@ class Config(CustomCommand): } +def freeze(key): + return frozenset((k, freeze(v) if isinstance(v, dict) else v) for k, v in key.items()) + + class Import(CustomCommand): name = 'import' help_text = 'import resources into Tower' + def __init__(self, *args, **kwargs): + super(Import, self).__init__(*args, **kwargs) + self._natural_key = {} + self._options = {} + def create_assets(self, resource, assets): endpoint = getattr(self.v2, resource) + options = self._options[resource] for asset in assets: + post_data = {} if resource == 'users' and 'password' not in asset: - asset['password'] = 'password' - endpoint.post({k: v for k, v in asset.items() if k != 'related'}) + post_data['password'] = 'password' + for field, value in asset.items(): + if k in ('related', 'natural_key'): + continue + if options[field]['type'] == 'id': + post_data[field] = self._natural_key[freeze(value)]['id'] # FIXME: may not be registered + else: + post_data[field] = value + + natural_key = freeze(asset['natural_key']) + if natural_key in self._natural_key: + page = self._natural_key[natural_key] + page = page.put(post_data) + else: + page = endpoint.post(post_data) + + self._natural_key[freeze(get_natural_key(page))] = page + + def register_existing_assets(self, resource): + endpoint = getattr(self.v2, resource) + options = endpoint.options().json['actions']['POST'] + self._options[resource] = options + + results = endpoint.get(all_pages=True).results + for asset in results: + self._natural_key[freeze(get_natural_key(asset))] = asset def handle(self, client, parser): if client.help: @@ -195,9 +230,17 @@ class Import(CustomCommand): client.authenticate() self.v2 = client.v2 + for resource in data: + self.register_existing_assets(resource) + for resource, assets in data.items(): # FIXME: do a topological sort by dependencies self.create_assets(resource, assets) + # should we delete existing unpatched assets? + + # loop over the sorted assets + # resolve and add in the m2m relateds + return {} From 65e16dc7ae4ecd1abad446ca8cf5ab6f189160de Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 24 Feb 2020 17:04:30 -0500 Subject: [PATCH 057/494] Get and assign the related objects --- awxkit/awxkit/cli/resource.py | 54 ++++++++++++++++++++++++++++------- 1 file changed, 44 insertions(+), 10 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 7f3e95e9ee..82906fb3f9 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -3,6 +3,7 @@ import os import sys from awxkit import api, config +import awxkit.exceptions as exc from awxkit.utils import to_str from awxkit.api.pages import Page from awxkit.cli.format import FORMATTERS, format_response, add_authentication_arguments @@ -188,6 +189,13 @@ class Import(CustomCommand): self._natural_key = {} self._options = {} + def get_by_natural_key(self, key, fetch=True): + frozen_key = freeze(key) + if frozen_key not in self._natural_key and fetch: + pass + + return self._natural_key.get(frozen_key) + def create_assets(self, resource, assets): endpoint = getattr(self.v2, resource) options = self._options[resource] @@ -196,19 +204,18 @@ class Import(CustomCommand): if resource == 'users' and 'password' not in asset: post_data['password'] = 'password' for field, value in asset.items(): - if k in ('related', 'natural_key'): + if field in ('related', 'natural_key'): continue if options[field]['type'] == 'id': - post_data[field] = self._natural_key[freeze(value)]['id'] # FIXME: may not be registered + post_data[field] = self.get_by_natural_key(value)['id'] else: post_data[field] = value - natural_key = freeze(asset['natural_key']) - if natural_key in self._natural_key: - page = self._natural_key[natural_key] - page = page.put(post_data) - else: + page = self.get_by_natural_key(asset['natural_key'], fetch=False) + if page is None: page = endpoint.post(post_data) + else: + page = page.put(post_data) self._natural_key[freeze(get_natural_key(page))] = page @@ -221,6 +228,33 @@ class Import(CustomCommand): for asset in results: self._natural_key[freeze(get_natural_key(asset))] = asset + def assign_roles(self, page, roles): + role_endpoint = page.json['related']['roles'] + for role in roles: + if 'content_object' not in role: + continue # admin role + obj_page = self.get_by_natural_key(role['content_object']) + if obj_page is not None: + role_page = obj_page.get_object_role(role['name'], by_name=True) + try: + role_endpoint.post({'id': role_page['id']}) + except exc.NoContent: # desired exception on successful (dis)association + pass + else: + pass # admin role + + def assign_related(self, page, name, related_set): + pass + + def assign_related_assets(self, resource, assets): + for asset in assets: + page = self.get_by_natural_key(asset['natural_key']) + for name, S in asset.get('related', {}).items(): + if name == 'roles': + self.assign_roles(page, S) + else: + self.assign_related(page, name, S) + def handle(self, client, parser): if client.help: parser.print_help() @@ -236,10 +270,10 @@ class Import(CustomCommand): for resource, assets in data.items(): # FIXME: do a topological sort by dependencies self.create_assets(resource, assets) - # should we delete existing unpatched assets? + # FIXME: should we delete existing unpatched assets? - # loop over the sorted assets - # resolve and add in the m2m relateds + for resource, assets in data.items(): + self.assign_related_assets(resource, assets) return {} From d191edcaf194e10ef79b5888867cb7e7df34180f Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 28 Feb 2020 14:21:15 -0500 Subject: [PATCH 058/494] Fix a Python 2 syntax error it doesn't like multiple **kwargs, apparently. --- awxkit/awxkit/cli/resource.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 82906fb3f9..821a74f27d 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -329,7 +329,10 @@ class Export(CustomCommand): related[k] = [get_natural_key(x) for x in data.results] related_fields = {'related': related} if related else {} - return dict(**fields, **fk_fields, **related_fields) + + fields.update(fk_fields) + fields.update(related_fields) + return fields def handle(self, client, parser): self.extend_parser(parser) From 2c00d42ced76bed24a5cfc42a5ed6963ede34ae5 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 9 Mar 2020 16:00:28 -0400 Subject: [PATCH 059/494] Only use a default password if an import user doesn't already exist --- awxkit/awxkit/cli/resource.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 821a74f27d..ad700cc23c 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -201,8 +201,6 @@ class Import(CustomCommand): options = self._options[resource] for asset in assets: post_data = {} - if resource == 'users' and 'password' not in asset: - post_data['password'] = 'password' for field, value in asset.items(): if field in ('related', 'natural_key'): continue @@ -213,6 +211,9 @@ class Import(CustomCommand): page = self.get_by_natural_key(asset['natural_key'], fetch=False) if page is None: + if resource == 'users': + # We should only impose a default password if the resource doesn't exist. + post_data.setdefault('password', 'password') page = endpoint.post(post_data) else: page = page.put(post_data) From ea5b810e87ba044cf9740bc0277246f8a4da933c Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 11 Mar 2020 16:16:23 -0400 Subject: [PATCH 060/494] Sort the asset groups to be imported by their dependency relationships --- awxkit/awxkit/cli/resource.py | 31 ++++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index ad700cc23c..c30ec6fc56 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -1,3 +1,4 @@ +import itertools import json import os import sys @@ -5,6 +6,7 @@ import sys from awxkit import api, config import awxkit.exceptions as exc from awxkit.utils import to_str +from awxkit.api.mixins import has_create from awxkit.api.pages import Page from awxkit.cli.format import FORMATTERS, format_response, add_authentication_arguments from awxkit.cli.utils import CustomRegistryMeta, cprint @@ -49,13 +51,13 @@ EXPORTABLE_RESOURCES = [ 'users', 'organizations', 'teams', - # 'credential_types', - # 'credentials', - # 'notification_templates', - # 'projects', - # 'inventory', - # 'job_templates', - # 'workflow_job_templates', + 'credential_types', + 'credentials', + 'notification_templates', + 'projects', + 'inventory', + 'job_templates', + 'workflow_job_templates', ] NATURAL_KEYS = { @@ -188,6 +190,8 @@ class Import(CustomCommand): super(Import, self).__init__(*args, **kwargs) self._natural_key = {} self._options = {} + self._resource_page = {} + self._page_resource = {} def get_by_natural_key(self, key, fetch=True): frozen_key = freeze(key) @@ -222,6 +226,9 @@ class Import(CustomCommand): def register_existing_assets(self, resource): endpoint = getattr(self.v2, resource) + self._resource_page[resource] = endpoint._create().__item_class__ + self._page_resource[self._resource_page[resource]] = resource + options = endpoint.options().json['actions']['POST'] self._options[resource] = options @@ -268,13 +275,15 @@ class Import(CustomCommand): for resource in data: self.register_existing_assets(resource) - for resource, assets in data.items(): # FIXME: do a topological sort by dependencies - self.create_assets(resource, assets) + for page_cls in itertools.chain(*has_create.page_creation_order(*self._page_resource.keys())): + resource = self._page_resource[page_cls] + cprint("importing {}".format(resource), 'red', file=client.stderr) + self.create_assets(resource, data.get(resource, [])) # FIXME: should we delete existing unpatched assets? - for resource, assets in data.items(): - self.assign_related_assets(resource, assets) + # for resource, assets in data.items(): + # self.assign_related_assets(resource, assets) return {} From 3f204659a8778cf59d32494c6720763723b6c5b2 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 13 Mar 2020 10:04:54 -0400 Subject: [PATCH 061/494] Temporarily disable export of resources that may not work yet --- awxkit/awxkit/cli/resource.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index c30ec6fc56..d404abb878 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -51,13 +51,13 @@ EXPORTABLE_RESOURCES = [ 'users', 'organizations', 'teams', - 'credential_types', - 'credentials', - 'notification_templates', - 'projects', - 'inventory', - 'job_templates', - 'workflow_job_templates', + # 'credential_types', + # 'credentials', + # 'notification_templates', + # 'projects', + # 'inventory', + # 'job_templates', + # 'workflow_job_templates', ] NATURAL_KEYS = { From 868aafb263804b176d73d216a2615e982da98d77 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 13 Mar 2020 14:24:29 -0400 Subject: [PATCH 062/494] Filter out managed credential types since we cannot patch them upon import. --- awxkit/awxkit/cli/resource.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index d404abb878..3f3e0d28e6 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -51,7 +51,7 @@ EXPORTABLE_RESOURCES = [ 'users', 'organizations', 'teams', - # 'credential_types', + 'credential_types', # 'credentials', # 'notification_templates', # 'projects', @@ -316,9 +316,14 @@ class Export(CustomCommand): results = endpoint.get(all_pages=True).results options = self.get_options(endpoint) - return [self.serialize_asset(asset, options) for asset in results] + assets = (self.serialize_asset(asset, options) for asset in results) + return [asset for asset in assets if asset is not None] def serialize_asset(self, asset, options): + # Drop any (credential_type) assets that are being managed by the Tower instance. + if asset.json.get('managed_by_tower'): + return None + fields = { key: asset[key] for key in options if key in asset.json and key not in asset.related From d20fa03034bfc19931f27aab6967eafb831e7a38 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 13 Mar 2020 15:43:11 -0400 Subject: [PATCH 063/494] Create new Import.dependent_resources method that yields up resource names, even ones that aren't explicitly in the import data, in dependency order. --- awxkit/awxkit/cli/resource.py | 33 +++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 3f3e0d28e6..f82102a3ac 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -190,8 +190,6 @@ class Import(CustomCommand): super(Import, self).__init__(*args, **kwargs) self._natural_key = {} self._options = {} - self._resource_page = {} - self._page_resource = {} def get_by_natural_key(self, key, fetch=True): frozen_key = freeze(key) @@ -200,9 +198,14 @@ class Import(CustomCommand): return self._natural_key.get(frozen_key) - def create_assets(self, resource, assets): + def create_assets(self, data, resource): + if resource not in data: + return + cprint("importing {}".format(resource), 'red', file=client.stderr) + endpoint = getattr(self.v2, resource) options = self._options[resource] + assets = data[resource] for asset in assets: post_data = {} for field, value in asset.items(): @@ -226,9 +229,6 @@ class Import(CustomCommand): def register_existing_assets(self, resource): endpoint = getattr(self.v2, resource) - self._resource_page[resource] = endpoint._create().__item_class__ - self._page_resource[self._resource_page[resource]] = resource - options = endpoint.options().json['actions']['POST'] self._options[resource] = options @@ -263,6 +263,16 @@ class Import(CustomCommand): else: self.assign_related(page, name, S) + def dependent_resources(self, data): + page_resource = {} + for resource in data: + endpoint = getattr(self.v2, resource) + page_cls = endpoint._create().__item_class__ + page_resource[page_cls] = resource + + for page_cls in itertools.chain(*has_create.page_creation_order(*page_resource.keys())): + yield page_resource[page_cls] + def handle(self, client, parser): if client.help: parser.print_help() @@ -272,15 +282,10 @@ class Import(CustomCommand): client.authenticate() self.v2 = client.v2 - for resource in data: + for resource in self.dependent_resources(data): self.register_existing_assets(resource) - - for page_cls in itertools.chain(*has_create.page_creation_order(*self._page_resource.keys())): - resource = self._page_resource[page_cls] - cprint("importing {}".format(resource), 'red', file=client.stderr) - self.create_assets(resource, data.get(resource, [])) - - # FIXME: should we delete existing unpatched assets? + self.create_assets(data, resource) + # FIXME: should we delete existing unpatched assets? # for resource, assets in data.items(): # self.assign_related_assets(resource, assets) From 55f79a45699ac5cdd9c98e9a7438fa73e5f12ead Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 13 Mar 2020 16:47:44 -0400 Subject: [PATCH 064/494] Fix a couple of flaws - The dependency ordering may spit out page types that weren't in the import data set. Make sure to be able to map those to resources anyway. - freeze() needs to be able to deal with nullable foreign keys. --- awxkit/awxkit/cli/resource.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index f82102a3ac..c108427208 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -179,6 +179,8 @@ class Config(CustomCommand): def freeze(key): + if key is None: + return None return frozenset((k, freeze(v) if isinstance(v, dict) else v) for k, v in key.items()) @@ -199,9 +201,9 @@ class Import(CustomCommand): return self._natural_key.get(frozen_key) def create_assets(self, data, resource): - if resource not in data: + if resource not in data or resource not in EXPORTABLE_RESOURCES: return - cprint("importing {}".format(resource), 'red', file=client.stderr) + cprint("importing {}".format(resource), 'red', file=self.client.stderr) endpoint = getattr(self.v2, resource) options = self._options[resource] @@ -212,7 +214,8 @@ class Import(CustomCommand): if field in ('related', 'natural_key'): continue if options[field]['type'] == 'id': - post_data[field] = self.get_by_natural_key(value)['id'] + page = self.get_by_natural_key(value) + post_data[field] = page['id'] if page is not None else None else: post_data[field] = value @@ -264,13 +267,11 @@ class Import(CustomCommand): self.assign_related(page, name, S) def dependent_resources(self, data): - page_resource = {} - for resource in data: - endpoint = getattr(self.v2, resource) - page_cls = endpoint._create().__item_class__ - page_resource[page_cls] = resource + page_resource = {getattr(self.v2, resource)._create().__item_class__: resource + for resource in self.v2.json} + data_pages = [getattr(self.v2, resource)._create().__item_class__ for resource in data] - for page_cls in itertools.chain(*has_create.page_creation_order(*page_resource.keys())): + for page_cls in itertools.chain(*has_create.page_creation_order(*data_pages)): yield page_resource[page_cls] def handle(self, client, parser): @@ -280,6 +281,7 @@ class Import(CustomCommand): data = json.load(sys.stdin) client.authenticate() + self.client = client self.v2 = client.v2 for resource in self.dependent_resources(data): From ad574eb896ab34a3b4c181eb9cd9df4cecb24e0f Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 13 Mar 2020 16:51:17 -0400 Subject: [PATCH 065/494] Enable credential export --- awxkit/awxkit/cli/resource.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index c108427208..737331f947 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -52,7 +52,7 @@ EXPORTABLE_RESOURCES = [ 'organizations', 'teams', 'credential_types', - # 'credentials', + 'credentials', # 'notification_templates', # 'projects', # 'inventory', From 6a9add4fe3e4749d9ea339c47d2b8c2adb100a3c Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Tue, 17 Mar 2020 15:33:28 -0400 Subject: [PATCH 066/494] Deal somewhat reasonably with missing fields as part of the natural key - JobTemplate.organization has recently been added, we need to support with and without - WorkflowJobTemplateNode is shortly going to get an identifier field, and we will need to support both with and without --- awxkit/awxkit/cli/resource.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 737331f947..35f562bb7e 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -69,7 +69,7 @@ NATURAL_KEYS = { 'notification_template': ('organization', 'name'), 'project': ('organization', 'name'), 'inventory': ('organization', 'name'), - 'job_template': ('name',), + 'job_template': ('organization', 'name'), 'workflow_job_template': ('organization', 'name'), # related resources @@ -92,9 +92,11 @@ def get_natural_key(page): natural_key[key[1:]] = get_natural_key(related_objs[0].get()) elif key in page.related: natural_key[key] = get_natural_key(page.related[key].get()) - else: + elif key in page: natural_key[key] = page[key] + if not natural_key: + return None return natural_key @@ -195,7 +197,7 @@ class Import(CustomCommand): def get_by_natural_key(self, key, fetch=True): frozen_key = freeze(key) - if frozen_key not in self._natural_key and fetch: + if frozen_key is not None and frozen_key not in self._natural_key and fetch: pass return self._natural_key.get(frozen_key) @@ -228,7 +230,12 @@ class Import(CustomCommand): else: page = page.put(post_data) - self._natural_key[freeze(get_natural_key(page))] = page + self.register_page(page) + + def register_page(self, page): + natural_key = freeze(get_natural_key(page)) + if natural_key is not None: + self._natural_key[natural_key] = page def register_existing_assets(self, resource): endpoint = getattr(self.v2, resource) @@ -236,8 +243,8 @@ class Import(CustomCommand): self._options[resource] = options results = endpoint.get(all_pages=True).results - for asset in results: - self._natural_key[freeze(get_natural_key(asset))] = asset + for page in results: + self.register_page(page) def assign_roles(self, page, roles): role_endpoint = page.json['related']['roles'] @@ -260,6 +267,7 @@ class Import(CustomCommand): def assign_related_assets(self, resource, assets): for asset in assets: page = self.get_by_natural_key(asset['natural_key']) + # FIXME: deal with `page is None` case for name, S in asset.get('related', {}).items(): if name == 'roles': self.assign_roles(page, S) From 19c92a705543ef1316da61e9cff44422c187a61a Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Tue, 17 Mar 2020 16:38:48 -0400 Subject: [PATCH 067/494] Enable notification templates --- awxkit/awxkit/cli/resource.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 35f562bb7e..d7ebe698d0 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -53,7 +53,7 @@ EXPORTABLE_RESOURCES = [ 'teams', 'credential_types', 'credentials', - # 'notification_templates', + 'notification_templates', # 'projects', # 'inventory', # 'job_templates', From e4146e9bc72beec6e4147c1590b9bfe983aeb895 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 20 Mar 2020 14:02:12 -0400 Subject: [PATCH 068/494] Move the export logic onto methods on the ApiV2 class making it easier to invoke programmatically. --- awxkit/awxkit/api/pages/api.py | 116 ++++++++++++++++++++++++++++++++- awxkit/awxkit/cli/resource.py | 116 ++------------------------------- 2 files changed, 119 insertions(+), 113 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 40348ec5d7..323afbd44a 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -3,6 +3,60 @@ from . import base from . import page +EXPORTABLE_RESOURCES = [ + 'users', + 'organizations', + 'teams', + 'credential_types', + 'credentials', + 'notification_templates', + # 'projects', + # 'inventory', + # 'job_templates', + # 'workflow_job_templates', +] + + +NATURAL_KEYS = { + 'user': ('username',), + 'organization': ('name',), + 'team': ('organization', 'name'), + 'credential_type': ('name', 'kind'), + 'credential': ('organization', 'name', 'credential_type'), + 'notification_template': ('organization', 'name'), + 'project': ('organization', 'name'), + 'inventory': ('organization', 'name'), + 'job_template': ('organization', 'name'), + 'workflow_job_template': ('organization', 'name'), + + # related resources + 'role': ('name', ':content_object'), +} + + +def get_natural_key(page): + natural_key = {'type': page['type']} + lookup = NATURAL_KEYS.get(page['type'], ()) + + for key in lookup or (): + if key.startswith(':'): + # treat it like a special-case related object + related_objs = [ + related for name, related in page.related.items() + if name not in ('users', 'teams') + ] + if related_objs: + natural_key[key[1:]] = get_natural_key(related_objs[0].get()) + elif key in page.related: + natural_key[key] = get_natural_key(page.related[key].get()) + elif key in page: + natural_key[key] = page[key] + + if not natural_key: + return None + return natural_key + + class Api(base.Base): pass @@ -13,7 +67,67 @@ page.register_page(resources.api, Api) class ApiV2(base.Base): - pass + def _get_options(self, endpoint): + return endpoint.options().json['actions']['POST'] + + def _serialize_asset(self, asset, options): + # Drop any (credential_type) assets that are being managed by the Tower instance. + if asset.json.get('managed_by_tower'): + return None + + fields = { + key: asset[key] for key in options + if key in asset.json and key not in asset.related + } + fields['natural_key'] = get_natural_key(asset) + + fk_fields = { + key: get_natural_key(asset.related[key].get()) for key in options + if key in asset.related + } + + related = {} + for k, related_endpoint in asset.related.items(): + if k != 'roles': + continue + data = related_endpoint.get(all_pages=True) + if 'results' in data: + related[k] = [get_natural_key(x) for x in data.results] + + related_fields = {'related': related} if related else {} + + fields.update(fk_fields) + fields.update(related_fields) + return fields + + def _get_assets(self, resource, value): + endpoint = getattr(self, resource) + if value: + from awxkit.cli.options import pk_or_name + + pk = pk_or_name(self, resource, value) + results = endpoint.get(id=pk).results + else: + results = endpoint.get(all_pages=True).results + + options = self._get_options(endpoint) + assets = (self._serialize_asset(asset, options) for asset in results) + return [asset for asset in assets if asset is not None] + + def export_assets(self, **kwargs): + # If no resource kwargs are explicitly used, export everything. + all_resources = all(kwargs.get(resource) is None for resource in EXPORTABLE_RESOURCES) + + data = {} + for resource in EXPORTABLE_RESOURCES: + value = kwargs.get(resource) + if all_resources or value is not None: + data[resource] = self._get_assets(resource, value) + + return data + + def import_assets(self): + pass page.register_page(resources.v2, ApiV2) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index d7ebe698d0..88a3118718 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -8,6 +8,7 @@ import awxkit.exceptions as exc from awxkit.utils import to_str from awxkit.api.mixins import has_create from awxkit.api.pages import Page +from awxkit.api.pages.api import EXPORTABLE_RESOURCES, get_natural_key from awxkit.cli.format import FORMATTERS, format_response, add_authentication_arguments from awxkit.cli.utils import CustomRegistryMeta, cprint @@ -47,58 +48,6 @@ DEPRECATED_RESOURCES_REVERSE = dict( (v, k) for k, v in DEPRECATED_RESOURCES.items() ) -EXPORTABLE_RESOURCES = [ - 'users', - 'organizations', - 'teams', - 'credential_types', - 'credentials', - 'notification_templates', - # 'projects', - # 'inventory', - # 'job_templates', - # 'workflow_job_templates', -] - -NATURAL_KEYS = { - 'user': ('username',), - 'organization': ('name',), - 'team': ('organization', 'name'), - 'credential_type': ('name', 'kind'), - 'credential': ('organization', 'name', 'credential_type'), - 'notification_template': ('organization', 'name'), - 'project': ('organization', 'name'), - 'inventory': ('organization', 'name'), - 'job_template': ('organization', 'name'), - 'workflow_job_template': ('organization', 'name'), - - # related resources - 'role': ('name', ':content_object'), -} - - -def get_natural_key(page): - natural_key = {'type': page['type']} - lookup = NATURAL_KEYS.get(page['type'], ()) - - for key in lookup or (): - if key.startswith(':'): - # treat it like a special-case related object - related_objs = [ - related for name, related in page.related.items() - if name not in ('users', 'teams') - ] - if related_objs: - natural_key[key[1:]] = get_natural_key(related_objs[0].get()) - elif key in page.related: - natural_key[key] = get_natural_key(page.related[key].get()) - elif key in page: - natural_key[key] = page[key] - - if not natural_key: - return None - return natural_key - class CustomCommand(metaclass=CustomRegistryMeta): """Base class for implementing custom commands. @@ -317,53 +266,6 @@ class Export(CustomCommand): # 3) the resource flag is used with an argument, and the attr will be that argument's value resources.add_argument('--{}'.format(resource), nargs='?', const='') - def get_options(self, endpoint): - return endpoint.options().json['actions']['POST'] - - def get_assets(self, resource, value): - endpoint = getattr(self.v2, resource) - if value: - from .options import pk_or_name - - pk = pk_or_name(self.v2, resource, value) - results = endpoint.get(id=pk).results - else: - results = endpoint.get(all_pages=True).results - - options = self.get_options(endpoint) - assets = (self.serialize_asset(asset, options) for asset in results) - return [asset for asset in assets if asset is not None] - - def serialize_asset(self, asset, options): - # Drop any (credential_type) assets that are being managed by the Tower instance. - if asset.json.get('managed_by_tower'): - return None - - fields = { - key: asset[key] for key in options - if key in asset.json and key not in asset.related - } - fields['natural_key'] = get_natural_key(asset) - - fk_fields = { - key: get_natural_key(asset.related[key].get()) for key in options - if key in asset.related - } - - related = {} - for k, related_endpoint in asset.related.items(): - if k != 'roles': - continue - data = related_endpoint.get(all_pages=True) - if 'results' in data: - related[k] = [get_natural_key(x) for x in data.results] - - related_fields = {'related': related} if related else {} - - fields.update(fk_fields) - fields.update(related_fields) - return fields - def handle(self, client, parser): self.extend_parser(parser) @@ -371,21 +273,11 @@ class Export(CustomCommand): parser.print_help() raise SystemExit() - client.authenticate() parsed = parser.parse_known_args()[0] + kwargs = {resource: getattr(parsed, resource, None) for resource in EXPORTABLE_RESOURCES} - # If no resource flags are explicitly used, export everything. - all_resources = all(getattr(parsed, resource, None) is None for resource in EXPORTABLE_RESOURCES) - - self.v2 = client.v2 - - data = {} - for resource in EXPORTABLE_RESOURCES: - value = getattr(parsed, resource, None) - if all_resources or value is not None: - data[resource] = self.get_assets(resource, value) - - return data + client.authenticate() + return client.v2.export_assets(**kwargs) def parse_resource(client, skip_deprecated=False): From 3860c7597fb87675592ddaf7bb2f73ad15d395de Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 20 Mar 2020 14:33:31 -0400 Subject: [PATCH 069/494] Move the import logic into methods on the ApiV2 class --- awxkit/awxkit/api/pages/api.py | 127 ++++++++++++++++++++++++++++++--- awxkit/awxkit/cli/resource.py | 115 +---------------------------- 2 files changed, 120 insertions(+), 122 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 323afbd44a..ec1e246707 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -1,6 +1,10 @@ +import itertools + from awxkit.api.resources import resources +import awxkit.exceptions as exc from . import base from . import page +from ..mixins import has_create EXPORTABLE_RESOURCES = [ @@ -34,29 +38,35 @@ NATURAL_KEYS = { } -def get_natural_key(page): - natural_key = {'type': page['type']} - lookup = NATURAL_KEYS.get(page['type'], ()) +def get_natural_key(pg): + natural_key = {'type': pg['type']} + lookup = NATURAL_KEYS.get(pg['type'], ()) for key in lookup or (): if key.startswith(':'): # treat it like a special-case related object related_objs = [ - related for name, related in page.related.items() + related for name, related in pg.related.items() if name not in ('users', 'teams') ] if related_objs: natural_key[key[1:]] = get_natural_key(related_objs[0].get()) - elif key in page.related: - natural_key[key] = get_natural_key(page.related[key].get()) - elif key in page: - natural_key[key] = page[key] + elif key in pg.related: + natural_key[key] = get_natural_key(pg.related[key].get()) + elif key in pg: + natural_key[key] = pg[key] if not natural_key: return None return natural_key +def freeze(key): + if key is None: + return None + return frozenset((k, freeze(v) if isinstance(v, dict) else v) for k, v in key.items()) + + class Api(base.Base): pass @@ -114,6 +124,97 @@ class ApiV2(base.Base): assets = (self._serialize_asset(asset, options) for asset in results) return [asset for asset in assets if asset is not None] + def _dependent_resources(self, data): + page_resource = {getattr(self, resource)._create().__item_class__: resource + for resource in self.json} + data_pages = [getattr(self, resource)._create().__item_class__ for resource in data] + + for page_cls in itertools.chain(*has_create.page_creation_order(*data_pages)): + yield page_resource[page_cls] + + def _register_page(self, page): + natural_key = freeze(get_natural_key(page)) + if natural_key is not None: + if getattr(self, '_natural_key', None) is None: + self._natural_key = {} + + self._natural_key[natural_key] = page + + def _register_existing_assets(self, resource): + endpoint = getattr(self, resource) + options = self._get_options(endpoint) + if getattr(self, '_options', None) is None: + self._options = {} + self._options[resource] = options + + results = endpoint.get(all_pages=True).results + for pg in results: + self._register_page(pg) + + def _get_by_natural_key(self, key, fetch=True): + frozen_key = freeze(key) + if frozen_key is not None and frozen_key not in self._natural_key and fetch: + pass # FIXME + + return self._natural_key.get(frozen_key) + + def _create_assets(self, data, resource): + if resource not in data or resource not in EXPORTABLE_RESOURCES: + return + + endpoint = getattr(self, resource) + options = self._options[resource] + assets = data[resource] + for asset in assets: + post_data = {} + for field, value in asset.items(): + if field not in options: + continue + if options[field]['type'] == 'id': + page = self._get_by_natural_key(value) + post_data[field] = page['id'] if page is not None else None + else: + post_data[field] = value + + page = self._get_by_natural_key(asset['natural_key'], fetch=False) + if page is None: + if resource == 'users': + # We should only impose a default password if the resource doesn't exist. + post_data.setdefault('password', 'abc123') + page = endpoint.post(post_data) + else: + page = page.put(post_data) + + self._register_page(page) + + def _assign_related(self, page, name, related_set): + pass # FIXME + + def _assign_roles(self, page, roles): + role_endpoint = page.json['related']['roles'] + for role in roles: + if 'content_object' not in role: + continue # admin role + obj_page = self._get_by_natural_key(role['content_object']) + if obj_page is not None: + role_page = obj_page.get_object_role(role['name'], by_name=True) + try: + role_endpoint.post({'id': role_page['id']}) + except exc.NoContent: # desired exception on successful (dis)association + pass + else: + pass # admin role + + def assign_related_assets(self, resource, assets): + for asset in assets: + page = self._get_by_natural_key(asset['natural_key']) + # FIXME: deal with `page is None` case + for name, S in asset.get('related', {}).items(): + if name == 'roles': + self._assign_roles(page, S) + else: + self._assign_related(page, name, S) + def export_assets(self, **kwargs): # If no resource kwargs are explicitly used, export everything. all_resources = all(kwargs.get(resource) is None for resource in EXPORTABLE_RESOURCES) @@ -126,8 +227,14 @@ class ApiV2(base.Base): return data - def import_assets(self): - pass + def import_assets(self, data): + for resource in self._dependent_resources(data): + self._register_existing_assets(resource) + self._create_assets(data, resource) + # FIXME: should we delete existing unpatched assets? + + # for resource, assets in data.items(): + # self.assign_related_assets(resource, assets) page.register_page(resources.v2, ApiV2) diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py index 88a3118718..f22795fab2 100644 --- a/awxkit/awxkit/cli/resource.py +++ b/awxkit/awxkit/cli/resource.py @@ -1,14 +1,11 @@ -import itertools import json import os import sys from awxkit import api, config -import awxkit.exceptions as exc from awxkit.utils import to_str -from awxkit.api.mixins import has_create from awxkit.api.pages import Page -from awxkit.api.pages.api import EXPORTABLE_RESOURCES, get_natural_key +from awxkit.api.pages.api import EXPORTABLE_RESOURCES from awxkit.cli.format import FORMATTERS, format_response, add_authentication_arguments from awxkit.cli.utils import CustomRegistryMeta, cprint @@ -129,125 +126,19 @@ class Config(CustomCommand): } -def freeze(key): - if key is None: - return None - return frozenset((k, freeze(v) if isinstance(v, dict) else v) for k, v in key.items()) - - class Import(CustomCommand): name = 'import' help_text = 'import resources into Tower' - def __init__(self, *args, **kwargs): - super(Import, self).__init__(*args, **kwargs) - self._natural_key = {} - self._options = {} - - def get_by_natural_key(self, key, fetch=True): - frozen_key = freeze(key) - if frozen_key is not None and frozen_key not in self._natural_key and fetch: - pass - - return self._natural_key.get(frozen_key) - - def create_assets(self, data, resource): - if resource not in data or resource not in EXPORTABLE_RESOURCES: - return - cprint("importing {}".format(resource), 'red', file=self.client.stderr) - - endpoint = getattr(self.v2, resource) - options = self._options[resource] - assets = data[resource] - for asset in assets: - post_data = {} - for field, value in asset.items(): - if field in ('related', 'natural_key'): - continue - if options[field]['type'] == 'id': - page = self.get_by_natural_key(value) - post_data[field] = page['id'] if page is not None else None - else: - post_data[field] = value - - page = self.get_by_natural_key(asset['natural_key'], fetch=False) - if page is None: - if resource == 'users': - # We should only impose a default password if the resource doesn't exist. - post_data.setdefault('password', 'password') - page = endpoint.post(post_data) - else: - page = page.put(post_data) - - self.register_page(page) - - def register_page(self, page): - natural_key = freeze(get_natural_key(page)) - if natural_key is not None: - self._natural_key[natural_key] = page - - def register_existing_assets(self, resource): - endpoint = getattr(self.v2, resource) - options = endpoint.options().json['actions']['POST'] - self._options[resource] = options - - results = endpoint.get(all_pages=True).results - for page in results: - self.register_page(page) - - def assign_roles(self, page, roles): - role_endpoint = page.json['related']['roles'] - for role in roles: - if 'content_object' not in role: - continue # admin role - obj_page = self.get_by_natural_key(role['content_object']) - if obj_page is not None: - role_page = obj_page.get_object_role(role['name'], by_name=True) - try: - role_endpoint.post({'id': role_page['id']}) - except exc.NoContent: # desired exception on successful (dis)association - pass - else: - pass # admin role - - def assign_related(self, page, name, related_set): - pass - - def assign_related_assets(self, resource, assets): - for asset in assets: - page = self.get_by_natural_key(asset['natural_key']) - # FIXME: deal with `page is None` case - for name, S in asset.get('related', {}).items(): - if name == 'roles': - self.assign_roles(page, S) - else: - self.assign_related(page, name, S) - - def dependent_resources(self, data): - page_resource = {getattr(self.v2, resource)._create().__item_class__: resource - for resource in self.v2.json} - data_pages = [getattr(self.v2, resource)._create().__item_class__ for resource in data] - - for page_cls in itertools.chain(*has_create.page_creation_order(*data_pages)): - yield page_resource[page_cls] - def handle(self, client, parser): if client.help: parser.print_help() raise SystemExit() data = json.load(sys.stdin) + client.authenticate() - self.client = client - self.v2 = client.v2 - - for resource in self.dependent_resources(data): - self.register_existing_assets(resource) - self.create_assets(data, resource) - # FIXME: should we delete existing unpatched assets? - - # for resource, assets in data.items(): - # self.assign_related_assets(resource, assets) + client.v2.import_assets(data) return {} From e0f6af47001b38e0a615960551bdfe7328c9e2e8 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 23 Mar 2020 16:42:34 -0400 Subject: [PATCH 070/494] Enable the export of several relation field types --- awxkit/awxkit/api/pages/api.py | 27 +++++++++++++++++++++++---- awxkit/awxkit/api/pages/page.py | 2 +- 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index ec1e246707..92295dd8f6 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -21,6 +21,15 @@ EXPORTABLE_RESOURCES = [ ] +EXPORTABLE_RELATIONS = [ + 'Roles', + 'NotificationTemplates', + 'Labels', + 'SurveySpec', + 'WorkflowJobTemplateNodes', +] + + NATURAL_KEYS = { 'user': ('username',), 'organization': ('name',), @@ -35,6 +44,9 @@ NATURAL_KEYS = { # related resources 'role': ('name', ':content_object'), + 'notification_template': ('organization', 'name'), + 'label': ('organization', 'name'), # FIXME: label will need to be fully constructed from this + 'workflow_job_template_node': ('workflow_job_template', 'identifier'), } @@ -98,11 +110,18 @@ class ApiV2(base.Base): related = {} for k, related_endpoint in asset.related.items(): - if k != 'roles': + if not related_endpoint: + continue + if k == 'object_roles': + continue + rel = related_endpoint._create() + if rel.__class__.__name__ not in EXPORTABLE_RELATIONS: continue data = related_endpoint.get(all_pages=True) if 'results' in data: related[k] = [get_natural_key(x) for x in data.results] + else: + related[k] = data.json related_fields = {'related': related} if related else {} @@ -115,7 +134,7 @@ class ApiV2(base.Base): if value: from awxkit.cli.options import pk_or_name - pk = pk_or_name(self, resource, value) + pk = pk_or_name(self, resource, value) # TODO: decide whether to support multiple results = endpoint.get(id=pk).results else: results = endpoint.get(all_pages=True).results @@ -205,7 +224,7 @@ class ApiV2(base.Base): else: pass # admin role - def assign_related_assets(self, resource, assets): + def _assign_related_assets(self, resource, assets): for asset in assets: page = self._get_by_natural_key(asset['natural_key']) # FIXME: deal with `page is None` case @@ -234,7 +253,7 @@ class ApiV2(base.Base): # FIXME: should we delete existing unpatched assets? # for resource, assets in data.items(): - # self.assign_related_assets(resource, assets) + # self._assign_related_assets(resource, assets) page.register_page(resources.v2, ApiV2) diff --git a/awxkit/awxkit/api/pages/page.py b/awxkit/awxkit/api/pages/page.py index 7e714bbb05..256424a214 100644 --- a/awxkit/awxkit/api/pages/page.py +++ b/awxkit/awxkit/api/pages/page.py @@ -273,7 +273,7 @@ class Page(object): def get(self, all_pages=False, **query_parameters): r = self.connection.get(self.endpoint, query_parameters) page = self.page_identity(r) - if all_pages and page.next: + if all_pages and getattr(page, 'next', None): paged_results = [r.json()['results']] while page.next: r = self.connection.get(self.next, query_parameters) From 01d575f833b63291d817997beb8d4a6bbfcd7aa9 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Tue, 24 Mar 2020 15:56:40 -0400 Subject: [PATCH 071/494] Enable the remaining top-level exportable resources and rearrange the import/export methods a bit. --- awxkit/awxkit/api/pages/api.py | 50 ++++++++++++++++++++-------------- 1 file changed, 29 insertions(+), 21 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 92295dd8f6..ca93782e10 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -14,10 +14,10 @@ EXPORTABLE_RESOURCES = [ 'credential_types', 'credentials', 'notification_templates', - # 'projects', - # 'inventory', - # 'job_templates', - # 'workflow_job_templates', + 'projects', + 'inventory', + 'job_templates', + 'workflow_job_templates', ] @@ -89,9 +89,13 @@ page.register_page(resources.api, Api) class ApiV2(base.Base): + # Common import/export methods + def _get_options(self, endpoint): return endpoint.options().json['actions']['POST'] + # Export methods + def _serialize_asset(self, asset, options): # Drop any (credential_type) assets that are being managed by the Tower instance. if asset.json.get('managed_by_tower'): @@ -143,6 +147,20 @@ class ApiV2(base.Base): assets = (self._serialize_asset(asset, options) for asset in results) return [asset for asset in assets if asset is not None] + def export_assets(self, **kwargs): + # If no resource kwargs are explicitly used, export everything. + all_resources = all(kwargs.get(resource) is None for resource in EXPORTABLE_RESOURCES) + + data = {} + for resource in EXPORTABLE_RESOURCES: + value = kwargs.get(resource) + if all_resources or value is not None: + data[resource] = self._get_assets(resource, value) + + return data + + # Import methods + def _dependent_resources(self, data): page_resource = {getattr(self, resource)._create().__item_class__: resource for resource in self.json} @@ -153,6 +171,8 @@ class ApiV2(base.Base): def _register_page(self, page): natural_key = freeze(get_natural_key(page)) + # FIXME: we need to keep a reference for the case where we + # don't have a natural key, so we can delete if natural_key is not None: if getattr(self, '_natural_key', None) is None: self._natural_key = {} @@ -206,9 +226,6 @@ class ApiV2(base.Base): self._register_page(page) - def _assign_related(self, page, name, related_set): - pass # FIXME - def _assign_roles(self, page, roles): role_endpoint = page.json['related']['roles'] for role in roles: @@ -224,6 +241,9 @@ class ApiV2(base.Base): else: pass # admin role + def _assign_related(self, page, name, related_set): + pass # FIXME + def _assign_related_assets(self, resource, assets): for asset in assets: page = self._get_by_natural_key(asset['natural_key']) @@ -234,26 +254,14 @@ class ApiV2(base.Base): else: self._assign_related(page, name, S) - def export_assets(self, **kwargs): - # If no resource kwargs are explicitly used, export everything. - all_resources = all(kwargs.get(resource) is None for resource in EXPORTABLE_RESOURCES) - - data = {} - for resource in EXPORTABLE_RESOURCES: - value = kwargs.get(resource) - if all_resources or value is not None: - data[resource] = self._get_assets(resource, value) - - return data - def import_assets(self, data): for resource in self._dependent_resources(data): self._register_existing_assets(resource) self._create_assets(data, resource) # FIXME: should we delete existing unpatched assets? - # for resource, assets in data.items(): - # self._assign_related_assets(resource, assets) + for resource, assets in data.items(): + self._assign_related_assets(resource, assets) page.register_page(resources.v2, ApiV2) From 30a3e3e1720d6b8819feb4d0f21ebb3d43eae04d Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Tue, 24 Mar 2020 16:40:23 -0400 Subject: [PATCH 072/494] Deal with lack of permissions --- awxkit/awxkit/api/pages/api.py | 51 ++++++++++++++++++---------------- 1 file changed, 27 insertions(+), 24 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index ca93782e10..e1bf098eae 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -92,7 +92,7 @@ class ApiV2(base.Base): # Common import/export methods def _get_options(self, endpoint): - return endpoint.options().json['actions']['POST'] + return endpoint.options().json['actions'].get('POST', {}) # Export methods @@ -101,31 +101,34 @@ class ApiV2(base.Base): if asset.json.get('managed_by_tower'): return None - fields = { - key: asset[key] for key in options - if key in asset.json and key not in asset.related - } - fields['natural_key'] = get_natural_key(asset) + try: + fields = { + key: asset[key] for key in options + if key in asset.json and key not in asset.related + } + fields['natural_key'] = get_natural_key(asset) - fk_fields = { - key: get_natural_key(asset.related[key].get()) for key in options - if key in asset.related - } + fk_fields = { + key: get_natural_key(asset.related[key].get()) for key in options + if key in asset.related + } - related = {} - for k, related_endpoint in asset.related.items(): - if not related_endpoint: - continue - if k == 'object_roles': - continue - rel = related_endpoint._create() - if rel.__class__.__name__ not in EXPORTABLE_RELATIONS: - continue - data = related_endpoint.get(all_pages=True) - if 'results' in data: - related[k] = [get_natural_key(x) for x in data.results] - else: - related[k] = data.json + related = {} + for k, related_endpoint in asset.related.items(): + if not related_endpoint: + continue + if k == 'object_roles': + continue + rel = related_endpoint._create() + if rel.__class__.__name__ not in EXPORTABLE_RELATIONS: + continue + data = related_endpoint.get(all_pages=True) + if 'results' in data: + related[k] = [get_natural_key(x) for x in data.results] + else: + related[k] = data.json + except exc.Forbidden: + return None related_fields = {'related': related} if related else {} From a1f7d0b78163fa231e7529bc288be7545c0792ca Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 27 Mar 2020 11:06:32 -0400 Subject: [PATCH 073/494] Fix a bug with inventory.variables Using Page[key] instead of Page.json[key] causes inner json blob strings to automatically get parsed, which is not what we want with this field. --- awxkit/awxkit/api/pages/api.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index e1bf098eae..2bb63df3b1 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -102,8 +102,9 @@ class ApiV2(base.Base): return None try: + # Note: doing asset[key] automatically parses json blob strings, which can be a problem. fields = { - key: asset[key] for key in options + key: asset.json[key] for key in options if key in asset.json and key not in asset.related } fields['natural_key'] = get_natural_key(asset) From 972d3ab53551606acea06a9d0d1ed39499b974ad Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 27 Mar 2020 16:08:45 -0400 Subject: [PATCH 074/494] Export full related objects under some conditions --- awxkit/awxkit/api/pages/api.py | 28 +++++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 2bb63df3b1..342c36c56c 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -24,6 +24,10 @@ EXPORTABLE_RESOURCES = [ EXPORTABLE_RELATIONS = [ 'Roles', 'NotificationTemplates', +] + + +EXPORTABLE_DEPENDENT_OBJECTS = [ 'Labels', 'SurveySpec', 'WorkflowJobTemplateNodes', @@ -105,7 +109,7 @@ class ApiV2(base.Base): # Note: doing asset[key] automatically parses json blob strings, which can be a problem. fields = { key: asset.json[key] for key in options - if key in asset.json and key not in asset.related + if key in asset.json and key not in asset.related and key != 'id' } fields['natural_key'] = get_natural_key(asset) @@ -115,19 +119,29 @@ class ApiV2(base.Base): } related = {} - for k, related_endpoint in asset.related.items(): - if not related_endpoint: + for key, related_endpoint in asset.related.items(): + if key in asset.json or not related_endpoint: continue - if k == 'object_roles': + if key == 'object_roles': continue rel = related_endpoint._create() - if rel.__class__.__name__ not in EXPORTABLE_RELATIONS: + + if rel.__class__.__name__ in EXPORTABLE_RELATIONS: + by_natural_key = True + elif rel.__class__.__name__ in EXPORTABLE_DEPENDENT_OBJECTS: + by_natural_key = False + else: continue + data = related_endpoint.get(all_pages=True) if 'results' in data: - related[k] = [get_natural_key(x) for x in data.results] + related_options = self._get_options(related_endpoint) + related[key] = [ + get_natural_key(x) if by_natural_key else self._serialize_asset(x, related_options) + for x in data.results + ] else: - related[k] = data.json + related[key] = data.json except exc.Forbidden: return None From 07ba521b8bc959f2d7a9aab7f368f4c35da8877e Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 27 Mar 2020 16:29:31 -0400 Subject: [PATCH 075/494] Enable schedules --- awxkit/awxkit/api/pages/api.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 342c36c56c..8b45213459 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -31,6 +31,7 @@ EXPORTABLE_DEPENDENT_OBJECTS = [ 'Labels', 'SurveySpec', 'WorkflowJobTemplateNodes', + 'Schedules', ] @@ -51,6 +52,7 @@ NATURAL_KEYS = { 'notification_template': ('organization', 'name'), 'label': ('organization', 'name'), # FIXME: label will need to be fully constructed from this 'workflow_job_template_node': ('workflow_job_template', 'identifier'), + 'schedule': ('unified_job_template', 'name'), } From e92c8cfdccee15e19e4c47a55749414d978713cb Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 30 Mar 2020 16:07:12 -0400 Subject: [PATCH 076/494] Remove the natural key lookup dict and make each Page responsible instead --- awxkit/awxkit/api/pages/api.py | 55 ++----------------- awxkit/awxkit/api/pages/credentials.py | 3 + awxkit/awxkit/api/pages/inventory.py | 1 + awxkit/awxkit/api/pages/job_templates.py | 1 + awxkit/awxkit/api/pages/labels.py | 1 + .../api/pages/notification_templates.py | 1 + awxkit/awxkit/api/pages/organizations.py | 2 + awxkit/awxkit/api/pages/page.py | 21 +++++++ awxkit/awxkit/api/pages/projects.py | 1 + awxkit/awxkit/api/pages/roles.py | 14 ++++- awxkit/awxkit/api/pages/schedules.py | 2 +- awxkit/awxkit/api/pages/teams.py | 1 + awxkit/awxkit/api/pages/users.py | 2 + .../api/pages/workflow_job_template_nodes.py | 1 + .../api/pages/workflow_job_templates.py | 1 + awxkit/awxkit/exceptions.py | 5 ++ 16 files changed, 61 insertions(+), 51 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 8b45213459..a79e49d57e 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -35,50 +35,6 @@ EXPORTABLE_DEPENDENT_OBJECTS = [ ] -NATURAL_KEYS = { - 'user': ('username',), - 'organization': ('name',), - 'team': ('organization', 'name'), - 'credential_type': ('name', 'kind'), - 'credential': ('organization', 'name', 'credential_type'), - 'notification_template': ('organization', 'name'), - 'project': ('organization', 'name'), - 'inventory': ('organization', 'name'), - 'job_template': ('organization', 'name'), - 'workflow_job_template': ('organization', 'name'), - - # related resources - 'role': ('name', ':content_object'), - 'notification_template': ('organization', 'name'), - 'label': ('organization', 'name'), # FIXME: label will need to be fully constructed from this - 'workflow_job_template_node': ('workflow_job_template', 'identifier'), - 'schedule': ('unified_job_template', 'name'), -} - - -def get_natural_key(pg): - natural_key = {'type': pg['type']} - lookup = NATURAL_KEYS.get(pg['type'], ()) - - for key in lookup or (): - if key.startswith(':'): - # treat it like a special-case related object - related_objs = [ - related for name, related in pg.related.items() - if name not in ('users', 'teams') - ] - if related_objs: - natural_key[key[1:]] = get_natural_key(related_objs[0].get()) - elif key in pg.related: - natural_key[key] = get_natural_key(pg.related[key].get()) - elif key in pg: - natural_key[key] = pg[key] - - if not natural_key: - return None - return natural_key - - def freeze(key): if key is None: return None @@ -113,10 +69,11 @@ class ApiV2(base.Base): key: asset.json[key] for key in options if key in asset.json and key not in asset.related and key != 'id' } - fields['natural_key'] = get_natural_key(asset) + fields['natural_key'] = asset.get_natural_key() fk_fields = { - key: get_natural_key(asset.related[key].get()) for key in options + # FIXME: use caching by url + key: asset.related[key].get().get_natural_key() for key in options if key in asset.related } @@ -124,7 +81,7 @@ class ApiV2(base.Base): for key, related_endpoint in asset.related.items(): if key in asset.json or not related_endpoint: continue - if key == 'object_roles': + if key == 'object_roles': # FIXME continue rel = related_endpoint._create() @@ -139,7 +96,7 @@ class ApiV2(base.Base): if 'results' in data: related_options = self._get_options(related_endpoint) related[key] = [ - get_natural_key(x) if by_natural_key else self._serialize_asset(x, related_options) + x.get_natural_key() if by_natural_key else self._serialize_asset(x, related_options) for x in data.results ] else: @@ -190,7 +147,7 @@ class ApiV2(base.Base): yield page_resource[page_cls] def _register_page(self, page): - natural_key = freeze(get_natural_key(page)) + natural_key = freeze(page.get_natural_key()) # FIXME: we need to keep a reference for the case where we # don't have a natural key, so we can delete if natural_key is not None: diff --git a/awxkit/awxkit/api/pages/credentials.py b/awxkit/awxkit/api/pages/credentials.py index 8a5e7e0eca..88f89dd3b3 100644 --- a/awxkit/awxkit/api/pages/credentials.py +++ b/awxkit/awxkit/api/pages/credentials.py @@ -149,6 +149,8 @@ def get_payload_field_and_value_from_kwargs_or_config_cred( class CredentialType(HasCreate, base.Base): + NATURAL_KEY = ('name', 'kind') + def silent_delete(self): if not self.managed_by_tower: return super(CredentialType, self).silent_delete() @@ -204,6 +206,7 @@ class Credential(HasCopy, HasCreate, base.Base): dependencies = [CredentialType] optional_dependencies = [Organization, User, Team] + NATURAL_KEY = ('organization', 'name', 'credential_type') def payload( self, diff --git a/awxkit/awxkit/api/pages/inventory.py b/awxkit/awxkit/api/pages/inventory.py index 2b057153d3..6d89fcba3a 100644 --- a/awxkit/awxkit/api/pages/inventory.py +++ b/awxkit/awxkit/api/pages/inventory.py @@ -32,6 +32,7 @@ log = logging.getLogger(__name__) class Inventory(HasCopy, HasCreate, HasInstanceGroups, HasVariables, base.Base): dependencies = [Organization] + NATURAL_KEY = ('organization', 'name') def print_ini(self): """Print an ini version of the inventory""" diff --git a/awxkit/awxkit/api/pages/job_templates.py b/awxkit/awxkit/api/pages/job_templates.py index ad59bfb742..cd45fc0c87 100644 --- a/awxkit/awxkit/api/pages/job_templates.py +++ b/awxkit/awxkit/api/pages/job_templates.py @@ -24,6 +24,7 @@ class JobTemplate( UnifiedJobTemplate): optional_dependencies = [Inventory, Credential, Project] + NATURAL_KEY = ('organization', 'name') def launch(self, payload={}): """Launch the job_template using related->launch endpoint.""" diff --git a/awxkit/awxkit/api/pages/labels.py b/awxkit/awxkit/api/pages/labels.py index f545ef3776..a76b6920a5 100644 --- a/awxkit/awxkit/api/pages/labels.py +++ b/awxkit/awxkit/api/pages/labels.py @@ -9,6 +9,7 @@ from . import page class Label(HasCreate, base.Base): dependencies = [Organization] + NATURAL_KEY = ('organization', 'name') def silent_delete(self): """Label pages do not support DELETE requests. Here, we override the base page object diff --git a/awxkit/awxkit/api/pages/notification_templates.py b/awxkit/awxkit/api/pages/notification_templates.py index f69bb7b8a7..ff192d4433 100644 --- a/awxkit/awxkit/api/pages/notification_templates.py +++ b/awxkit/awxkit/api/pages/notification_templates.py @@ -24,6 +24,7 @@ notification_types = ( class NotificationTemplate(HasCopy, HasCreate, base.Base): dependencies = [Organization] + NATURAL_KEY = ('organization', 'name') def test(self): """Create test notification""" diff --git a/awxkit/awxkit/api/pages/organizations.py b/awxkit/awxkit/api/pages/organizations.py index cdc24083b8..413ecf4961 100644 --- a/awxkit/awxkit/api/pages/organizations.py +++ b/awxkit/awxkit/api/pages/organizations.py @@ -8,6 +8,8 @@ from . import page class Organization(HasCreate, HasInstanceGroups, HasNotifications, base.Base): + NATURAL_KEY = ('name',) + def add_admin(self, user): if isinstance(user, page.Page): user = user.json diff --git a/awxkit/awxkit/api/pages/page.py b/awxkit/awxkit/api/pages/page.py index 256424a214..9effa8ef64 100644 --- a/awxkit/awxkit/api/pages/page.py +++ b/awxkit/awxkit/api/pages/page.py @@ -317,6 +317,24 @@ class Page(object): page_cls = get_registered_page(endpoint) return page_cls(self.connection, endpoint=endpoint).get(**kw) + def get_natural_key(self): + if not getattr(self, 'NATURAL_KEY', None): + raise exc.NoNaturalKey( + "Page does not have a natural key: {}".format(getattr(self, 'endpoint', repr(self.__class__))) + ) + natural_key = {} + for key in self.NATURAL_KEY: + if key in self.related: + # FIXME: use caching by url + natural_key[key] = self.related[key].get().get_natural_key() + elif key in self: + natural_key[key] = self[key] + if not natural_key: + return None + + natural_key['type'] = self['type'] + return natural_key + _exception_map = {http.NO_CONTENT: exc.NoContent, http.NOT_FOUND: exc.NotFound, @@ -376,6 +394,9 @@ class PageList(object): def create(self, *a, **kw): return self.__item_class__(self.connection).create(*a, **kw) + def get_natural_key(self): + raise exc.NoNaturalKey + class TentativePage(str): diff --git a/awxkit/awxkit/api/pages/projects.py b/awxkit/awxkit/api/pages/projects.py index 584c151f78..e40191260c 100644 --- a/awxkit/awxkit/api/pages/projects.py +++ b/awxkit/awxkit/api/pages/projects.py @@ -14,6 +14,7 @@ class Project(HasCopy, HasCreate, HasNotifications, UnifiedJobTemplate): optional_dependencies = [Credential, Organization] optional_schedule_fields = tuple() + NATURAL_KEY = ('organization', 'name') def payload(self, organization, scm_type='git', **kwargs): payload = PseudoNamespace( diff --git a/awxkit/awxkit/api/pages/roles.py b/awxkit/awxkit/api/pages/roles.py index f8d6e39f70..a08d670840 100644 --- a/awxkit/awxkit/api/pages/roles.py +++ b/awxkit/awxkit/api/pages/roles.py @@ -5,7 +5,19 @@ from . import page class Role(base.Base): - pass + NATURAL_KEY = ('name',) + + def get_natural_key(self): + natural_key = super(Role, self).get_natural_key() + related_objs = [ + related for name, related in self.related.items() + if name not in ('users', 'teams') + ] + if related_objs: + # FIXME: use caching by url + natural_key['content_object'] = related_objs[0].get().get_natural_key() + + return natural_key page.register_page(resources.role, Role) diff --git a/awxkit/awxkit/api/pages/schedules.py b/awxkit/awxkit/api/pages/schedules.py index b0b21645eb..8603b2ad5b 100644 --- a/awxkit/awxkit/api/pages/schedules.py +++ b/awxkit/awxkit/api/pages/schedules.py @@ -8,7 +8,7 @@ from . import base class Schedule(UnifiedJob): - pass + NATURAL_KEY = ('unified_job_template', 'name') page.register_page([resources.schedule, diff --git a/awxkit/awxkit/api/pages/teams.py b/awxkit/awxkit/api/pages/teams.py index fc1e9de3f8..cb5577b5b2 100644 --- a/awxkit/awxkit/api/pages/teams.py +++ b/awxkit/awxkit/api/pages/teams.py @@ -11,6 +11,7 @@ from . import page class Team(HasCreate, base.Base): dependencies = [Organization] + NATURAL_KEY = ('organization', 'name') def add_user(self, user): if isinstance(user, page.Page): diff --git a/awxkit/awxkit/api/pages/users.py b/awxkit/awxkit/api/pages/users.py index 4039ef2e9d..22ab78dd11 100644 --- a/awxkit/awxkit/api/pages/users.py +++ b/awxkit/awxkit/api/pages/users.py @@ -9,6 +9,8 @@ from . import page class User(HasCreate, base.Base): + NATURAL_KEY = ('username',) + def payload(self, **kwargs): payload = PseudoNamespace( username=kwargs.get('username') or 'User-{}'.format( diff --git a/awxkit/awxkit/api/pages/workflow_job_template_nodes.py b/awxkit/awxkit/api/pages/workflow_job_template_nodes.py index 01e0b41e0a..1b61754928 100644 --- a/awxkit/awxkit/api/pages/workflow_job_template_nodes.py +++ b/awxkit/awxkit/api/pages/workflow_job_template_nodes.py @@ -10,6 +10,7 @@ from . import page class WorkflowJobTemplateNode(HasCreate, base.Base): dependencies = [WorkflowJobTemplate, UnifiedJobTemplate] + NATURAL_KEY = ('workflow_job_template', 'identifier') def payload(self, workflow_job_template, unified_job_template, **kwargs): if not unified_job_template: diff --git a/awxkit/awxkit/api/pages/workflow_job_templates.py b/awxkit/awxkit/api/pages/workflow_job_templates.py index 6a28891e04..17f3b56342 100644 --- a/awxkit/awxkit/api/pages/workflow_job_templates.py +++ b/awxkit/awxkit/api/pages/workflow_job_templates.py @@ -13,6 +13,7 @@ from . import page class WorkflowJobTemplate(HasCopy, HasCreate, HasNotifications, HasSurvey, UnifiedJobTemplate): optional_dependencies = [Organization] + NATURAL_KEY = ('organization', 'name') def launch(self, payload={}): """Launch using related->launch endpoint.""" diff --git a/awxkit/awxkit/exceptions.py b/awxkit/awxkit/exceptions.py index c1ff01719a..cf26097cef 100644 --- a/awxkit/awxkit/exceptions.py +++ b/awxkit/awxkit/exceptions.py @@ -101,3 +101,8 @@ class UnexpectedAWXState(Common): class IsMigrating(Common): pass + + +class NoNaturalKey(Common): + + pass From 329293dbf00d374d0e2364fb09db9c515e33ab14 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 30 Mar 2020 16:21:17 -0400 Subject: [PATCH 077/494] Tentatively enable inventory sources --- awxkit/awxkit/api/pages/api.py | 1 + awxkit/awxkit/api/pages/inventory.py | 1 + 2 files changed, 2 insertions(+) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index a79e49d57e..9eb0d86942 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -16,6 +16,7 @@ EXPORTABLE_RESOURCES = [ 'notification_templates', 'projects', 'inventory', + 'inventory_sources', 'job_templates', 'workflow_job_templates', ] diff --git a/awxkit/awxkit/api/pages/inventory.py b/awxkit/awxkit/api/pages/inventory.py index 6d89fcba3a..e00f0d329a 100644 --- a/awxkit/awxkit/api/pages/inventory.py +++ b/awxkit/awxkit/api/pages/inventory.py @@ -474,6 +474,7 @@ class InventorySource(HasCreate, HasNotifications, UnifiedJobTemplate): optional_schedule_fields = tuple() dependencies = [Inventory] optional_dependencies = [Credential, InventoryScript, Project] + NATURAL_KEY = ('organization', 'name', 'inventory') def payload( self, From 0deacc4391245662e2357934392c8f3d7ecc8ce2 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 1 Apr 2020 11:32:34 -0400 Subject: [PATCH 078/494] If attempting to link to a resource that is not yet complete, wait --- awxkit/awxkit/api/pages/api.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 9eb0d86942..905aa3d152 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -173,7 +173,14 @@ class ApiV2(base.Base): if frozen_key is not None and frozen_key not in self._natural_key and fetch: pass # FIXME - return self._natural_key.get(frozen_key) + from awxkit.api.mixins import has_status + + _page = self._natural_key.get(frozen_key) + if isinstance(_page, has_status.HasStatus) and not _page.is_completed: + _page.wait_until_completed() + _page = _page.get() + self._natural_key[frozen_key] = _page + return _page def _create_assets(self, data, resource): if resource not in data or resource not in EXPORTABLE_RESOURCES: From 4262dd38ba697f0746888081baa06f495f175aa8 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 1 Apr 2020 11:50:37 -0400 Subject: [PATCH 079/494] Change the NoNaturalKey exception to no longer derive from Common which seems to be entirely use for response exceptions. Maybe rename Common? --- awxkit/awxkit/exceptions.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/awxkit/awxkit/exceptions.py b/awxkit/awxkit/exceptions.py index cf26097cef..99d75c50d6 100644 --- a/awxkit/awxkit/exceptions.py +++ b/awxkit/awxkit/exceptions.py @@ -103,6 +103,11 @@ class IsMigrating(Common): pass -class NoNaturalKey(Common): +class ImportExportError(Exception): + + pass + + +class NoNaturalKey(ImportExportError): pass From e053a58223ea9866ee8d7f96eab2af35a0b34d44 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 1 Apr 2020 15:34:42 -0400 Subject: [PATCH 080/494] Handle some more bad cases when doing OPTIONS calls - deprecated endpoints - read-only endpoints - insufficient privileges The latter case currently just drops it on the floor, but ought to do something better. --- awxkit/awxkit/api/pages/api.py | 40 +++++++++++++++++++++++++++------- 1 file changed, 32 insertions(+), 8 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 905aa3d152..a3c2144b5b 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -54,8 +54,26 @@ class ApiV2(base.Base): # Common import/export methods - def _get_options(self, endpoint): - return endpoint.options().json['actions'].get('POST', {}) + def _get_options(self, _page): + if getattr(self, '_options', None) is None: + self._options = {} + + if isinstance(_page, page.TentativePage): + url = str(_page) + else: + url = _page.url + + if url in self._options: + return self._options[url] + + options = _page.options() + warning = options.r.headers.get('Warning', '') + if '299' in warning and 'deprecated' in warning: + return self._options.setdefault(url, None) + if 'POST' not in options.r.headers.get('Allow', ''): + return self._options.setdefault(url, None) + + return self._options.setdefault(url, options.json['actions'].get('POST', {})) # Export methods @@ -63,6 +81,8 @@ class ApiV2(base.Base): # Drop any (credential_type) assets that are being managed by the Tower instance. if asset.json.get('managed_by_tower'): return None + if options is None: # Deprecated endpoint or insufficient permissions + return None try: # Note: doing asset[key] automatically parses json blob strings, which can be a problem. @@ -88,14 +108,16 @@ class ApiV2(base.Base): if rel.__class__.__name__ in EXPORTABLE_RELATIONS: by_natural_key = True + related_options = self._get_options(related_endpoint) + if related_options is None: + continue elif rel.__class__.__name__ in EXPORTABLE_DEPENDENT_OBJECTS: - by_natural_key = False + by_natural_key, related_options = False, None else: continue data = related_endpoint.get(all_pages=True) if 'results' in data: - related_options = self._get_options(related_endpoint) related[key] = [ x.get_natural_key() if by_natural_key else self._serialize_asset(x, related_options) for x in data.results @@ -113,6 +135,10 @@ class ApiV2(base.Base): def _get_assets(self, resource, value): endpoint = getattr(self, resource) + options = self._get_options(endpoint) + if options is None: + return None + if value: from awxkit.cli.options import pk_or_name @@ -121,7 +147,6 @@ class ApiV2(base.Base): else: results = endpoint.get(all_pages=True).results - options = self._get_options(endpoint) assets = (self._serialize_asset(asset, options) for asset in results) return [asset for asset in assets if asset is not None] @@ -160,9 +185,8 @@ class ApiV2(base.Base): def _register_existing_assets(self, resource): endpoint = getattr(self, resource) options = self._get_options(endpoint) - if getattr(self, '_options', None) is None: - self._options = {} - self._options[resource] = options + if options is None: + return results = endpoint.get(all_pages=True).results for pg in results: From ab15349c8c792863951763e35ecfe22543393cda Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 2 Apr 2020 10:49:59 -0400 Subject: [PATCH 081/494] Remove the NoNaturalKey error It's too awkward, and it makes more sense to return None instead. --- awxkit/awxkit/api/pages/page.py | 14 ++++++++------ awxkit/awxkit/api/pages/roles.py | 9 +++++++-- awxkit/awxkit/exceptions.py | 5 ----- 3 files changed, 15 insertions(+), 13 deletions(-) diff --git a/awxkit/awxkit/api/pages/page.py b/awxkit/awxkit/api/pages/page.py index 9effa8ef64..8913625b53 100644 --- a/awxkit/awxkit/api/pages/page.py +++ b/awxkit/awxkit/api/pages/page.py @@ -319,14 +319,16 @@ class Page(object): def get_natural_key(self): if not getattr(self, 'NATURAL_KEY', None): - raise exc.NoNaturalKey( - "Page does not have a natural key: {}".format(getattr(self, 'endpoint', repr(self.__class__))) - ) + return None + natural_key = {} for key in self.NATURAL_KEY: if key in self.related: - # FIXME: use caching by url - natural_key[key] = self.related[key].get().get_natural_key() + try: + # FIXME: use caching by url + natural_key[key] = self.related[key].get().get_natural_key() + except exc.Forbidden: + return None elif key in self: natural_key[key] = self[key] if not natural_key: @@ -395,7 +397,7 @@ class PageList(object): return self.__item_class__(self.connection).create(*a, **kw) def get_natural_key(self): - raise exc.NoNaturalKey + return None class TentativePage(str): diff --git a/awxkit/awxkit/api/pages/roles.py b/awxkit/awxkit/api/pages/roles.py index a08d670840..d93de4cad1 100644 --- a/awxkit/awxkit/api/pages/roles.py +++ b/awxkit/awxkit/api/pages/roles.py @@ -1,4 +1,6 @@ from awxkit.api.resources import resources +import awxkit.exceptions as exc + from . import base from . import page @@ -14,8 +16,11 @@ class Role(base.Base): if name not in ('users', 'teams') ] if related_objs: - # FIXME: use caching by url - natural_key['content_object'] = related_objs[0].get().get_natural_key() + try: + # FIXME: use caching by url + natural_key['content_object'] = related_objs[0].get().get_natural_key() + except exc.Forbidden: + return None return natural_key diff --git a/awxkit/awxkit/exceptions.py b/awxkit/awxkit/exceptions.py index 99d75c50d6..596720b59d 100644 --- a/awxkit/awxkit/exceptions.py +++ b/awxkit/awxkit/exceptions.py @@ -106,8 +106,3 @@ class IsMigrating(Common): class ImportExportError(Exception): pass - - -class NoNaturalKey(ImportExportError): - - pass From eb10a1873d550d03cc41cea8304269f64c8e2336 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 2 Apr 2020 14:16:39 -0400 Subject: [PATCH 082/494] Drop objects that cannot be read or do not have a natural key don't fail hard. --- awxkit/awxkit/api/pages/api.py | 91 +++++++++++++++++++-------------- awxkit/awxkit/api/pages/page.py | 6 +++ 2 files changed, 58 insertions(+), 39 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index a3c2144b5b..5e1a91c49c 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -1,4 +1,5 @@ import itertools +import logging from awxkit.api.resources import resources import awxkit.exceptions as exc @@ -7,6 +8,9 @@ from . import page from ..mixins import has_create +log = logging.getLogger(__name__) + + EXPORTABLE_RESOURCES = [ 'users', 'organizations', @@ -73,6 +77,8 @@ class ApiV2(base.Base): if 'POST' not in options.r.headers.get('Allow', ''): return self._options.setdefault(url, None) + # FIXME: if POST isn't in the actions, this is a view where we + # don't have write permissions. Try to do something anyway. return self._options.setdefault(url, options.json['actions'].get('POST', {})) # Export methods @@ -84,53 +90,60 @@ class ApiV2(base.Base): if options is None: # Deprecated endpoint or insufficient permissions return None - try: - # Note: doing asset[key] automatically parses json blob strings, which can be a problem. - fields = { - key: asset.json[key] for key in options - if key in asset.json and key not in asset.related and key != 'id' - } - fields['natural_key'] = asset.get_natural_key() + # Note: doing asset[key] automatically parses json blob strings, which can be a problem. + fields = { + key: asset.json[key] for key in options + if key in asset.json and key not in asset.related and key != 'id' + } + fields['natural_key'] = asset.get_natural_key() - fk_fields = { + for key in options: + if not key in asset.related: + continue + try: # FIXME: use caching by url - key: asset.related[key].get().get_natural_key() for key in options - if key in asset.related - } + fields[key] = asset.related[key].get().get_natural_key() + except exc.Forbidden: + log.warning("This object cannot be read: %s", asset.related[key]) + pass # FIXME: what if the fk is mandatory? - related = {} - for key, related_endpoint in asset.related.items(): - if key in asset.json or not related_endpoint: + related = {} + for key, related_endpoint in asset.related.items(): + if key in asset.json or not related_endpoint: + continue + if key == 'object_roles': + continue # FIXME: we should aggregate all visited roles + + rel = related_endpoint._create() + if rel.__class__.__name__ in EXPORTABLE_RELATIONS: + by_natural_key = True + related_options = self._get_options(related_endpoint) + if related_options is None: continue - if key == 'object_roles': # FIXME - continue - rel = related_endpoint._create() + elif rel.__class__.__name__ in EXPORTABLE_DEPENDENT_OBJECTS: + by_natural_key, related_options = False, None + else: + continue - if rel.__class__.__name__ in EXPORTABLE_RELATIONS: - by_natural_key = True - related_options = self._get_options(related_endpoint) - if related_options is None: - continue - elif rel.__class__.__name__ in EXPORTABLE_DEPENDENT_OBJECTS: - by_natural_key, related_options = False, None - else: - continue + try: + # FIXME: use caching by url + data = rel.get(all_pages=True) + except exc.Forbidden: + log.warning("This object cannot be read: %s", related_endpoint) + continue - data = related_endpoint.get(all_pages=True) - if 'results' in data: - related[key] = [ - x.get_natural_key() if by_natural_key else self._serialize_asset(x, related_options) - for x in data.results - ] - else: - related[key] = data.json - except exc.Forbidden: - return None + if 'results' in data: + results = ( + x.get_natural_key() if by_natural_key else self._serialize_asset(x, related_options) + for x in data.results + ) + related[key] = [x for x in results if x is not None] + else: + related[key] = data.json - related_fields = {'related': related} if related else {} + if related: + fields['related'] = related - fields.update(fk_fields) - fields.update(related_fields) return fields def _get_assets(self, resource, value): diff --git a/awxkit/awxkit/api/pages/page.py b/awxkit/awxkit/api/pages/page.py index 8913625b53..dd73fc4d67 100644 --- a/awxkit/awxkit/api/pages/page.py +++ b/awxkit/awxkit/api/pages/page.py @@ -318,7 +318,10 @@ class Page(object): return page_cls(self.connection, endpoint=endpoint).get(**kw) def get_natural_key(self): + warn = "This object does not have a natural key: %s" + if not getattr(self, 'NATURAL_KEY', None): + log.warning(warn, getattr(self, 'endpoint', '')) return None natural_key = {} @@ -328,10 +331,12 @@ class Page(object): # FIXME: use caching by url natural_key[key] = self.related[key].get().get_natural_key() except exc.Forbidden: + log.warning("This object cannot be read: %s", getattr(self, 'endpoint', '')) return None elif key in self: natural_key[key] = self[key] if not natural_key: + log.warning(warn, getattr(self, 'endpoint', '')) return None natural_key['type'] = self['type'] @@ -397,6 +402,7 @@ class PageList(object): return self.__item_class__(self.connection).create(*a, **kw) def get_natural_key(self): + log.warning("This object does not have a natural key: %s", getattr(self, 'endpoint', '')) return None From a5fa34bd3bd3c04436e8602bfe64e49eef2bd3ce Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 2 Apr 2020 15:20:43 -0400 Subject: [PATCH 083/494] Fall back to parsing the OPTIONS description to determine the needed fields for constructing an object. --- awxkit/awxkit/api/pages/api.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 5e1a91c49c..27d39968d2 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -1,5 +1,6 @@ import itertools import logging +import re from awxkit.api.resources import resources import awxkit.exceptions as exc @@ -7,6 +8,7 @@ from . import base from . import page from ..mixins import has_create +descRE = re.compile('^[*] `(\w+)`: [^(]*\((\w+), ([^)]+)\)') log = logging.getLogger(__name__) @@ -54,6 +56,17 @@ class Api(base.Base): page.register_page(resources.api, Api) +def parse_description(desc): + options = {} + for line in desc[desc.index('POST'):].splitlines(): + match = descRE.match(line) + if not match: + continue + options[match.group(1)] = {'type': match.group(2), + 'required': match.group(3) == 'required'} + return options + + class ApiV2(base.Base): # Common import/export methods @@ -77,9 +90,10 @@ class ApiV2(base.Base): if 'POST' not in options.r.headers.get('Allow', ''): return self._options.setdefault(url, None) - # FIXME: if POST isn't in the actions, this is a view where we - # don't have write permissions. Try to do something anyway. - return self._options.setdefault(url, options.json['actions'].get('POST', {})) + if 'POST' in options.json['actions']: + return self._options.setdefault(url, options.json['actions']['POST']) + else: + return self._options.setdefault(url, parse_description(options.json['description'])) # Export methods From 719f0b407c88e928b0aabcdd4094b815f5fc97ac Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 3 Apr 2020 15:03:54 -0400 Subject: [PATCH 084/494] Enable credential relations but only when it is in an attach/detach list view. --- awxkit/awxkit/api/pages/api.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 27d39968d2..062bad95ed 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -31,6 +31,7 @@ EXPORTABLE_RESOURCES = [ EXPORTABLE_RELATIONS = [ 'Roles', 'NotificationTemplates', + 'Credentials', ] @@ -125,8 +126,6 @@ class ApiV2(base.Base): for key, related_endpoint in asset.related.items(): if key in asset.json or not related_endpoint: continue - if key == 'object_roles': - continue # FIXME: we should aggregate all visited roles rel = related_endpoint._create() if rel.__class__.__name__ in EXPORTABLE_RELATIONS: @@ -134,6 +133,8 @@ class ApiV2(base.Base): related_options = self._get_options(related_endpoint) if related_options is None: continue + if 'id' not in related_options: + continue # This is a read-only or create-only endpoint. elif rel.__class__.__name__ in EXPORTABLE_DEPENDENT_OBJECTS: by_natural_key, related_options = False, None else: From 1b264011a23f1c5d4bf2ce69999b4e9c69593e84 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 3 Apr 2020 16:21:26 -0400 Subject: [PATCH 085/494] Enable interconnections between WFJT Nodes --- awxkit/awxkit/api/pages/api.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 062bad95ed..e6561e5a9c 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -31,6 +31,7 @@ EXPORTABLE_RESOURCES = [ EXPORTABLE_RELATIONS = [ 'Roles', 'NotificationTemplates', + 'WorkflowJobTemplateNodes', 'Credentials', ] @@ -38,8 +39,10 @@ EXPORTABLE_RELATIONS = [ EXPORTABLE_DEPENDENT_OBJECTS = [ 'Labels', 'SurveySpec', - 'WorkflowJobTemplateNodes', 'Schedules', + # WFJT Nodes are a special case, we want full data for the create + # view and natural keys for the attach views. + 'WorkflowJobTemplateNodes', ] @@ -124,19 +127,19 @@ class ApiV2(base.Base): related = {} for key, related_endpoint in asset.related.items(): - if key in asset.json or not related_endpoint: + if key in options or not related_endpoint: continue rel = related_endpoint._create() - if rel.__class__.__name__ in EXPORTABLE_RELATIONS: + related_options = self._get_options(related_endpoint) + if related_options is None: # This is a read-only endpoint. + continue + is_attach = 'id' in related_options # This is not a create-only endpoint. + + if rel.__class__.__name__ in EXPORTABLE_RELATIONS and is_attach: by_natural_key = True - related_options = self._get_options(related_endpoint) - if related_options is None: - continue - if 'id' not in related_options: - continue # This is a read-only or create-only endpoint. elif rel.__class__.__name__ in EXPORTABLE_DEPENDENT_OBJECTS: - by_natural_key, related_options = False, None + by_natural_key = False else: continue From 385725e52aa492eb1f7e23fb587b0b5e4ca1f266 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 6 Apr 2020 10:55:33 -0400 Subject: [PATCH 086/494] Fix the _create_assets method to use _get_options --- awxkit/awxkit/api/pages/api.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index e6561e5a9c..c2ebe8e32b 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -8,7 +8,7 @@ from . import base from . import page from ..mixins import has_create -descRE = re.compile('^[*] `(\w+)`: [^(]*\((\w+), ([^)]+)\)') +descRE = re.compile(r'^[*] `(\w+)`: [^(]*\((\w+), ([^)]+)\)') log = logging.getLogger(__name__) @@ -116,7 +116,7 @@ class ApiV2(base.Base): fields['natural_key'] = asset.get_natural_key() for key in options: - if not key in asset.related: + if key not in asset.related: continue try: # FIXME: use caching by url @@ -242,7 +242,7 @@ class ApiV2(base.Base): return endpoint = getattr(self, resource) - options = self._options[resource] + options = self._get_options(endpoint) assets = data[resource] for asset in assets: post_data = {} From 53d81d42ccbe1825c00726ba8b0bfcfa839bb2f4 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 8 Apr 2020 14:18:49 -0400 Subject: [PATCH 087/494] Hook up creation and attachment of related objects --- awxkit/awxkit/api/pages/api.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index c2ebe8e32b..c1b2b29032 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -282,13 +282,33 @@ class ApiV2(base.Base): pass # admin role def _assign_related(self, page, name, related_set): - pass # FIXME + endpoint = page.related[name] + if isinstance(related_set, dict): # Relateds that are just json blobs, e.g. survey_spec + endpoint.post(related_set) + return + + if 'natural_key' not in related_set[0]: # It is an attach set + for item in related_set: + rel_page = self._get_by_natural_key(item) + if rel_page is None: + continue # FIXME + endpoint.post({'id': rel_page['id']}) + else: # It is a create set + for item in related_set: + data = {key: value for key, value in item.items() + if key not in ('natural_key', 'related')} + endpoint.post(data) + # FIXME: deal with objects that themselves have relateds, e.g. WFJT Nodes + + # FIXME: deal with pruning existing relations that do not match the import set def _assign_related_assets(self, resource, assets): for asset in assets: page = self._get_by_natural_key(asset['natural_key']) # FIXME: deal with `page is None` case for name, S in asset.get('related', {}).items(): + if not S: + continue if name == 'roles': self._assign_roles(page, S) else: From 43b76f45759ace6b26620489724d5ff63abc84d5 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 8 Apr 2020 14:19:23 -0400 Subject: [PATCH 088/494] Deal with unreadable mandatory foreign keys by dropping the parent object. Also, clarify some of the warning log messages. --- awxkit/awxkit/api/pages/api.py | 9 +++++---- awxkit/awxkit/api/pages/page.py | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index c1b2b29032..f54010052d 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -113,7 +113,6 @@ class ApiV2(base.Base): key: asset.json[key] for key in options if key in asset.json and key not in asset.related and key != 'id' } - fields['natural_key'] = asset.get_natural_key() for key in options: if key not in asset.related: @@ -122,8 +121,9 @@ class ApiV2(base.Base): # FIXME: use caching by url fields[key] = asset.related[key].get().get_natural_key() except exc.Forbidden: - log.warning("This object cannot be read: %s", asset.related[key]) - pass # FIXME: what if the fk is mandatory? + log.warning("This foreign key cannot be read: %s", asset.related[key]) + if options[key]['required']: + return None # This is a mandatory foreign key related = {} for key, related_endpoint in asset.related.items(): @@ -147,7 +147,7 @@ class ApiV2(base.Base): # FIXME: use caching by url data = rel.get(all_pages=True) except exc.Forbidden: - log.warning("This object cannot be read: %s", related_endpoint) + log.warning("These related objects cannot be read: %s", related_endpoint) continue if 'results' in data: @@ -162,6 +162,7 @@ class ApiV2(base.Base): if related: fields['related'] = related + fields['natural_key'] = asset.get_natural_key() return fields def _get_assets(self, resource, value): diff --git a/awxkit/awxkit/api/pages/page.py b/awxkit/awxkit/api/pages/page.py index dd73fc4d67..4a824ee290 100644 --- a/awxkit/awxkit/api/pages/page.py +++ b/awxkit/awxkit/api/pages/page.py @@ -331,7 +331,7 @@ class Page(object): # FIXME: use caching by url natural_key[key] = self.related[key].get().get_natural_key() except exc.Forbidden: - log.warning("This object cannot be read: %s", getattr(self, 'endpoint', '')) + log.warning("This foreign key cannot be read: %s", getattr(self, 'endpoint', '')) return None elif key in self: natural_key[key] = self[key] From 6958815f6e1d757972135dfe05f331c684265d46 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 8 Apr 2020 17:48:07 -0400 Subject: [PATCH 089/494] Remove the $encrypted$ placeholders from export values --- awxkit/awxkit/api/pages/api.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index f54010052d..d5314d0baf 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -71,6 +71,16 @@ def parse_description(desc): return options +def remove_encrypted(value): + if value == '$encrypted$': + return '' + if isinstance(value, list): + return [remove_encrypted(item) for item in value] + if isinstance(value, dict): + return {k: remove_encrypted(v) for k, v in value.items()} + return value + + class ApiV2(base.Base): # Common import/export methods @@ -163,7 +173,7 @@ class ApiV2(base.Base): fields['related'] = related fields['natural_key'] = asset.get_natural_key() - return fields + return remove_encrypted(fields) def _get_assets(self, resource, value): endpoint = getattr(self, resource) From 6387258da1b60cb38b12ee41e51b719dc96a6598 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 8 Apr 2020 17:48:36 -0400 Subject: [PATCH 090/494] Only wait for Project updates not other kinds of objects that have a status. --- awxkit/awxkit/api/pages/api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index d5314d0baf..b4a786d7a6 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -239,10 +239,10 @@ class ApiV2(base.Base): if frozen_key is not None and frozen_key not in self._natural_key and fetch: pass # FIXME - from awxkit.api.mixins import has_status + from awxkit.api.pages import projects _page = self._natural_key.get(frozen_key) - if isinstance(_page, has_status.HasStatus) and not _page.is_completed: + if isinstance(_page, projects.Project) and not _page.is_completed: _page.wait_until_completed() _page = _page.get() self._natural_key[frozen_key] = _page From 471dc2babf555274f175c51fcbdabef2cec781f3 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 9 Apr 2020 15:20:59 -0400 Subject: [PATCH 091/494] Deal with relations that we cannot resolve such as due to a lack of permissions. If there is a foreign key to something where we don't have sufficient read permissions, we now drop the parent object from the export. --- awxkit/awxkit/api/pages/api.py | 14 ++++++++++---- awxkit/awxkit/api/pages/page.py | 2 +- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index b4a786d7a6..ddd01803bf 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -129,11 +129,13 @@ class ApiV2(base.Base): continue try: # FIXME: use caching by url - fields[key] = asset.related[key].get().get_natural_key() + natural_key = asset.related[key].get().get_natural_key() except exc.Forbidden: log.warning("This foreign key cannot be read: %s", asset.related[key]) - if options[key]['required']: - return None # This is a mandatory foreign key + return None + if natural_key is None: + return None # This is an unresolvable foreign key + fields[key] = natural_key related = {} for key, related_endpoint in asset.related.items(): @@ -172,7 +174,11 @@ class ApiV2(base.Base): if related: fields['related'] = related - fields['natural_key'] = asset.get_natural_key() + natural_key = asset.get_natural_key() + if natural_key is None: + return None + fields['natural_key'] = natural_key + return remove_encrypted(fields) def _get_assets(self, resource, value): diff --git a/awxkit/awxkit/api/pages/page.py b/awxkit/awxkit/api/pages/page.py index 4a824ee290..91098abe8d 100644 --- a/awxkit/awxkit/api/pages/page.py +++ b/awxkit/awxkit/api/pages/page.py @@ -331,7 +331,7 @@ class Page(object): # FIXME: use caching by url natural_key[key] = self.related[key].get().get_natural_key() except exc.Forbidden: - log.warning("This foreign key cannot be read: %s", getattr(self, 'endpoint', '')) + log.warning("This foreign key cannot be read: %s", self.related[key]) return None elif key in self: natural_key[key] = self[key] From 201de4e18a08c5b3c708eb86c6b7f78e266e1f2d Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 9 Apr 2020 15:22:41 -0400 Subject: [PATCH 092/494] Attempt to deal with foreign keys that get mislabeled as type integer such as through the use of DeprecatedCredentialField. --- awxkit/awxkit/api/pages/api.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index ddd01803bf..da824e999d 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -266,20 +266,24 @@ class ApiV2(base.Base): for field, value in asset.items(): if field not in options: continue - if options[field]['type'] == 'id': + if options[field]['type'] in ('id', 'integer') and isinstance(value, dict): page = self._get_by_natural_key(value) post_data[field] = page['id'] if page is not None else None else: post_data[field] = value page = self._get_by_natural_key(asset['natural_key'], fetch=False) - if page is None: - if resource == 'users': - # We should only impose a default password if the resource doesn't exist. - post_data.setdefault('password', 'abc123') - page = endpoint.post(post_data) - else: - page = page.put(post_data) + try: + if page is None: + if resource == 'users': + # We should only impose a default password if the resource doesn't exist. + post_data.setdefault('password', 'abc123') + page = endpoint.post(post_data) + else: + page = page.put(post_data) + except exc.Common: + log.exception("post_data: %r", post_data) + raise self._register_page(page) From bb66e4633db7c20301abf3af83d29f083c4548cd Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 13 Apr 2020 15:31:11 -0400 Subject: [PATCH 093/494] Split _get_options into two pieces --- awxkit/awxkit/api/pages/api.py | 35 ++++++++++++++++++++-------------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index da824e999d..30907a19d2 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -89,11 +89,7 @@ class ApiV2(base.Base): if getattr(self, '_options', None) is None: self._options = {} - if isinstance(_page, page.TentativePage): - url = str(_page) - else: - url = _page.url - + url = _page.url if isinstance(_page, page.Page) else str(_page) if url in self._options: return self._options[url] @@ -101,13 +97,21 @@ class ApiV2(base.Base): warning = options.r.headers.get('Warning', '') if '299' in warning and 'deprecated' in warning: return self._options.setdefault(url, None) - if 'POST' not in options.r.headers.get('Allow', ''): - return self._options.setdefault(url, None) - if 'POST' in options.json['actions']: - return self._options.setdefault(url, options.json['actions']['POST']) + return self._options.setdefault(url, options) + + def _get_post_fields(self, _page): + options_page = self._get_options(_page) + if options_page is None: + return None + + if 'POST' not in options_page.r.headers.get('Allow', ''): + return None + + if 'POST' in options_page.json['actions']: + return options_page.json['actions']['POST'] else: - return self._options.setdefault(url, parse_description(options.json['description'])) + return parse_description(options_page.json['description']) # Export methods @@ -143,7 +147,7 @@ class ApiV2(base.Base): continue rel = related_endpoint._create() - related_options = self._get_options(related_endpoint) + related_options = self._get_post_fields(related_endpoint) if related_options is None: # This is a read-only endpoint. continue is_attach = 'id' in related_options # This is not a create-only endpoint. @@ -183,7 +187,7 @@ class ApiV2(base.Base): def _get_assets(self, resource, value): endpoint = getattr(self, resource) - options = self._get_options(endpoint) + options = self._get_post_fields(endpoint) if options is None: return None @@ -232,7 +236,7 @@ class ApiV2(base.Base): def _register_existing_assets(self, resource): endpoint = getattr(self, resource) - options = self._get_options(endpoint) + options = self._get_post_fields(endpoint) if options is None: return @@ -255,11 +259,14 @@ class ApiV2(base.Base): return _page def _create_assets(self, data, resource): + # FIXME: this method should work with any list-create + # endpoint, so that we can use it with create relations, e.g. WFJT Nodes + if resource not in data or resource not in EXPORTABLE_RESOURCES: return endpoint = getattr(self, resource) - options = self._get_options(endpoint) + options = self._get_post_fields(endpoint) assets = data[resource] for asset in assets: post_data = {} From 1300d38e47b8353f1b91c4516a14f164aa9e451b Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 13 Apr 2020 15:40:47 -0400 Subject: [PATCH 094/494] Introduce a new PageCache object and split out get_post_fields into its own utility function. --- awxkit/awxkit/api/pages/api.py | 67 ++++----------------------------- awxkit/awxkit/api/pages/page.py | 21 +++++++++++ awxkit/awxkit/api/utils.py | 39 +++++++++++++++++++ 3 files changed, 68 insertions(+), 59 deletions(-) create mode 100644 awxkit/awxkit/api/utils.py diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 30907a19d2..887420b8d7 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -1,15 +1,13 @@ import itertools import logging -import re from awxkit.api.resources import resources import awxkit.exceptions as exc from . import base from . import page +from .. import utils from ..mixins import has_create -descRE = re.compile(r'^[*] `(\w+)`: [^(]*\((\w+), ([^)]+)\)') - log = logging.getLogger(__name__) @@ -60,59 +58,8 @@ class Api(base.Base): page.register_page(resources.api, Api) -def parse_description(desc): - options = {} - for line in desc[desc.index('POST'):].splitlines(): - match = descRE.match(line) - if not match: - continue - options[match.group(1)] = {'type': match.group(2), - 'required': match.group(3) == 'required'} - return options - - -def remove_encrypted(value): - if value == '$encrypted$': - return '' - if isinstance(value, list): - return [remove_encrypted(item) for item in value] - if isinstance(value, dict): - return {k: remove_encrypted(v) for k, v in value.items()} - return value - - class ApiV2(base.Base): - # Common import/export methods - - def _get_options(self, _page): - if getattr(self, '_options', None) is None: - self._options = {} - - url = _page.url if isinstance(_page, page.Page) else str(_page) - if url in self._options: - return self._options[url] - - options = _page.options() - warning = options.r.headers.get('Warning', '') - if '299' in warning and 'deprecated' in warning: - return self._options.setdefault(url, None) - - return self._options.setdefault(url, options) - - def _get_post_fields(self, _page): - options_page = self._get_options(_page) - if options_page is None: - return None - - if 'POST' not in options_page.r.headers.get('Allow', ''): - return None - - if 'POST' in options_page.json['actions']: - return options_page.json['actions']['POST'] - else: - return parse_description(options_page.json['description']) - # Export methods def _serialize_asset(self, asset, options): @@ -147,7 +94,7 @@ class ApiV2(base.Base): continue rel = related_endpoint._create() - related_options = self._get_post_fields(related_endpoint) + related_options = utils.get_post_fields(related_endpoint, self._cache) if related_options is None: # This is a read-only endpoint. continue is_attach = 'id' in related_options # This is not a create-only endpoint. @@ -183,11 +130,11 @@ class ApiV2(base.Base): return None fields['natural_key'] = natural_key - return remove_encrypted(fields) + return utils.remove_encrypted(fields) def _get_assets(self, resource, value): endpoint = getattr(self, resource) - options = self._get_post_fields(endpoint) + options = utils.get_post_fields(endpoint, self._cache) if options is None: return None @@ -203,6 +150,8 @@ class ApiV2(base.Base): return [asset for asset in assets if asset is not None] def export_assets(self, **kwargs): + self._cache = page.PageCache() + # If no resource kwargs are explicitly used, export everything. all_resources = all(kwargs.get(resource) is None for resource in EXPORTABLE_RESOURCES) @@ -236,7 +185,7 @@ class ApiV2(base.Base): def _register_existing_assets(self, resource): endpoint = getattr(self, resource) - options = self._get_post_fields(endpoint) + options = utils.get_post_fields(endpoint, self._cache) if options is None: return @@ -266,7 +215,7 @@ class ApiV2(base.Base): return endpoint = getattr(self, resource) - options = self._get_post_fields(endpoint) + options = utils.get_post_fields(endpoint, self._cache) assets = data[resource] for asset in assets: post_data = {} diff --git a/awxkit/awxkit/api/pages/page.py b/awxkit/awxkit/api/pages/page.py index 91098abe8d..94943ec6b6 100644 --- a/awxkit/awxkit/api/pages/page.py +++ b/awxkit/awxkit/api/pages/page.py @@ -531,3 +531,24 @@ class TentativePage(str): def __ne__(self, other): return self.endpoint != other + + +class PageCache(object): + def __init__(self): + self.options = {} + + def get_options(self, page): + url = page.url if isinstance(page, Page) else str(page) + if url in self.options: + return self.options[url] + + try: + options = page.options() + except exc.Common: + return self.options.setdefault(url, None) + + warning = options.r.headers.get('Warning', '') + if '299' in warning and 'deprecated' in warning: + return self.options.setdefault(url, None) + + return self.options.setdefault(url, options) diff --git a/awxkit/awxkit/api/utils.py b/awxkit/awxkit/api/utils.py new file mode 100644 index 0000000000..1e295c1eb3 --- /dev/null +++ b/awxkit/awxkit/api/utils.py @@ -0,0 +1,39 @@ +import re + + +descRE = re.compile(r'^[*] `(\w+)`: [^(]*\((\w+), ([^)]+)\)') + + +def parse_description(desc): + options = {} + for line in desc[desc.index('POST'):].splitlines(): + match = descRE.match(line) + if not match: + continue + options[match.group(1)] = {'type': match.group(2), + 'required': match.group(3) == 'required'} + return options + + +def remove_encrypted(value): + if value == '$encrypted$': + return '' + if isinstance(value, list): + return [remove_encrypted(item) for item in value] + if isinstance(value, dict): + return {k: remove_encrypted(v) for k, v in value.items()} + return value + + +def get_post_fields(page, cache): + options_page = cache.get_options(page) + if options_page is None: + return None + + if 'POST' not in options_page.r.headers.get('Allow', ''): + return None + + if 'POST' in options_page.json['actions']: + return options_page.json['actions']['POST'] + else: + return parse_description(options_page.json['description']) From 14b5f63bd824785a9afeac4588057ae27b51e179 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 13 Apr 2020 17:27:29 -0400 Subject: [PATCH 095/494] Use the new PageCache to store and reuse Page.get results --- awxkit/awxkit/api/pages/api.py | 33 +++++++++++------------ awxkit/awxkit/api/pages/page.py | 45 ++++++++++++++++++++++++++------ awxkit/awxkit/api/pages/roles.py | 14 +++++----- 3 files changed, 60 insertions(+), 32 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 887420b8d7..c7e90c4f28 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -78,14 +78,13 @@ class ApiV2(base.Base): for key in options: if key not in asset.related: continue - try: - # FIXME: use caching by url - natural_key = asset.related[key].get().get_natural_key() - except exc.Forbidden: - log.warning("This foreign key cannot be read: %s", asset.related[key]) - return None + + related_endpoint = self._cache.get_page(asset.related[key]) + if related_endpoint is None: + return None # This foreign key is unreadable + natural_key = related_endpoint.get_natural_key(self._cache) if natural_key is None: - return None # This is an unresolvable foreign key + return None # This foreign key has unresolvable dependencies fields[key] = natural_key related = {} @@ -106,16 +105,13 @@ class ApiV2(base.Base): else: continue - try: - # FIXME: use caching by url - data = rel.get(all_pages=True) - except exc.Forbidden: - log.warning("These related objects cannot be read: %s", related_endpoint) + data = self._cache.get_page(related_endpoint) + if data is None: continue if 'results' in data: results = ( - x.get_natural_key() if by_natural_key else self._serialize_asset(x, related_options) + x.get_natural_key(self._cache) if by_natural_key else self._serialize_asset(x, related_options) for x in data.results ) related[key] = [x for x in results if x is not None] @@ -125,7 +121,7 @@ class ApiV2(base.Base): if related: fields['related'] = related - natural_key = asset.get_natural_key() + natural_key = asset.get_natural_key(self._cache) if natural_key is None: return None fields['natural_key'] = natural_key @@ -144,7 +140,7 @@ class ApiV2(base.Base): pk = pk_or_name(self, resource, value) # TODO: decide whether to support multiple results = endpoint.get(id=pk).results else: - results = endpoint.get(all_pages=True).results + results = self._cache.get_page(endpoint).results assets = (self._serialize_asset(asset, options) for asset in results) return [asset for asset in assets if asset is not None] @@ -174,7 +170,7 @@ class ApiV2(base.Base): yield page_resource[page_cls] def _register_page(self, page): - natural_key = freeze(page.get_natural_key()) + natural_key = freeze(page.get_natural_key(self._cache)) # FIXME: we need to keep a reference for the case where we # don't have a natural key, so we can delete if natural_key is not None: @@ -189,7 +185,7 @@ class ApiV2(base.Base): if options is None: return - results = endpoint.get(all_pages=True).results + results = self._cache.get_page(endpoint).results for pg in results: self._register_page(pg) @@ -237,6 +233,7 @@ class ApiV2(base.Base): page = endpoint.post(post_data) else: page = page.put(post_data) + # FIXME: created pages need to be put in the cache except exc.Common: log.exception("post_data: %r", post_data) raise @@ -274,7 +271,7 @@ class ApiV2(base.Base): for item in related_set: data = {key: value for key, value in item.items() if key not in ('natural_key', 'related')} - endpoint.post(data) + endpoint.post(data) # FIXME: add the page to the cache # FIXME: deal with objects that themselves have relateds, e.g. WFJT Nodes # FIXME: deal with pruning existing relations that do not match the import set diff --git a/awxkit/awxkit/api/pages/page.py b/awxkit/awxkit/api/pages/page.py index 94943ec6b6..b4f0eaf0b9 100644 --- a/awxkit/awxkit/api/pages/page.py +++ b/awxkit/awxkit/api/pages/page.py @@ -317,9 +317,12 @@ class Page(object): page_cls = get_registered_page(endpoint) return page_cls(self.connection, endpoint=endpoint).get(**kw) - def get_natural_key(self): + def get_natural_key(self, cache=None): warn = "This object does not have a natural key: %s" + if cache is None: + cache = PageCache() + if not getattr(self, 'NATURAL_KEY', None): log.warning(warn, getattr(self, 'endpoint', '')) return None @@ -327,12 +330,10 @@ class Page(object): natural_key = {} for key in self.NATURAL_KEY: if key in self.related: - try: - # FIXME: use caching by url - natural_key[key] = self.related[key].get().get_natural_key() - except exc.Forbidden: - log.warning("This foreign key cannot be read: %s", self.related[key]) + related_endpoint = cache.get_page(self.related[key]) + if related_endpoint is None: return None + natural_key[key] = related_endpoint.get_natural_key(cache=cache) elif key in self: natural_key[key] = self[key] if not natural_key: @@ -401,7 +402,7 @@ class PageList(object): def create(self, *a, **kw): return self.__item_class__(self.connection).create(*a, **kw) - def get_natural_key(self): + def get_natural_key(self, cache=None): log.warning("This object does not have a natural key: %s", getattr(self, 'endpoint', '')) return None @@ -536,19 +537,47 @@ class TentativePage(str): class PageCache(object): def __init__(self): self.options = {} + self.pages_by_url = {} def get_options(self, page): - url = page.url if isinstance(page, Page) else str(page) + url = page.endpoint if isinstance(page, Page) else str(page) if url in self.options: return self.options[url] try: options = page.options() except exc.Common: + log.error("This endpoint raised an error: %s", url) return self.options.setdefault(url, None) warning = options.r.headers.get('Warning', '') if '299' in warning and 'deprecated' in warning: + log.warning("This endpoint is deprecated: %s", url) return self.options.setdefault(url, None) return self.options.setdefault(url, options) + + def set_page(self, page): + self.pages_by_url[page.endpoint] = page + if 'results' in page: + for p in page.results: + self.set_page(p) + return page + + def get_page(self, page): + url = page.endpoint if isinstance(page, Page) else str(page) + if url in self.pages_by_url: + return self.pages_by_url[url] + + try: + page = page.get(all_pages=True) + except exc.Common: + log.error("This endpoint raised an error: %s", url) + return self.pages_by_url.setdefault(url, None) + + warning = page.r.headers.get('Warning', '') + if '299' in warning and 'deprecated' in warning: + log.warning("This endpoint is deprecated: %s", url) + return self.pages_by_url.setdefault(url, None) + + return self.set_page(page) diff --git a/awxkit/awxkit/api/pages/roles.py b/awxkit/awxkit/api/pages/roles.py index d93de4cad1..530dfc36e9 100644 --- a/awxkit/awxkit/api/pages/roles.py +++ b/awxkit/awxkit/api/pages/roles.py @@ -9,18 +9,20 @@ class Role(base.Base): NATURAL_KEY = ('name',) - def get_natural_key(self): - natural_key = super(Role, self).get_natural_key() + def get_natural_key(self, cache=None): + if cache is None: + cache = page.PageCache() + + natural_key = super(Role, self).get_natural_key(cache=cache) related_objs = [ related for name, related in self.related.items() if name not in ('users', 'teams') ] if related_objs: - try: - # FIXME: use caching by url - natural_key['content_object'] = related_objs[0].get().get_natural_key() - except exc.Forbidden: + related_endpoint = cache.get_page(related_objs[0]) + if related_endpoint is None: return None + natural_key['content_object'] = related_endpoint.get_natural_key(cache=cache) return natural_key From f7825aefebaa6bebc965d3dfdb18ca1d677a542f Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 13 Apr 2020 17:57:07 -0400 Subject: [PATCH 096/494] Avoid doing an OPTIONS call unless we know it is a related type we export --- awxkit/awxkit/api/pages/api.py | 9 +++++++-- awxkit/awxkit/api/pages/roles.py | 1 - 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index c7e90c4f28..89751b734d 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -93,14 +93,19 @@ class ApiV2(base.Base): continue rel = related_endpoint._create() + is_relation = rel.__class__.__name__ in EXPORTABLE_RELATIONS + is_dependent = rel.__class__.__name__ in EXPORTABLE_DEPENDENT_OBJECTS + if not (is_relation or is_dependent): + continue + related_options = utils.get_post_fields(related_endpoint, self._cache) if related_options is None: # This is a read-only endpoint. continue is_attach = 'id' in related_options # This is not a create-only endpoint. - if rel.__class__.__name__ in EXPORTABLE_RELATIONS and is_attach: + if is_relation and is_attach: by_natural_key = True - elif rel.__class__.__name__ in EXPORTABLE_DEPENDENT_OBJECTS: + elif is_dependent: by_natural_key = False else: continue diff --git a/awxkit/awxkit/api/pages/roles.py b/awxkit/awxkit/api/pages/roles.py index 530dfc36e9..104af818b1 100644 --- a/awxkit/awxkit/api/pages/roles.py +++ b/awxkit/awxkit/api/pages/roles.py @@ -1,5 +1,4 @@ from awxkit.api.resources import resources -import awxkit.exceptions as exc from . import base from . import page From 9fc1a4bb44a6f52aecbeac6d9234f8be3d47d3a4 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 15 Apr 2020 14:29:29 -0400 Subject: [PATCH 097/494] Change the method that exports list views to take a PageList or TentativePage This will allow the related m2m views to also use this method, with a bit of effort. Also, remove the use of pk_or_name in favor of a new method that reduces the number of api calls. --- awxkit/awxkit/api/pages/api.py | 33 ++++++++++++++++++++------------- 1 file changed, 20 insertions(+), 13 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 89751b734d..fd8362fb16 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -133,23 +133,27 @@ class ApiV2(base.Base): return utils.remove_encrypted(fields) - def _get_assets(self, resource, value): - endpoint = getattr(self, resource) - options = utils.get_post_fields(endpoint, self._cache) - if options is None: + def _export_list(self, endpoint): + post_fields = utils.get_post_fields(endpoint, self._cache) + if post_fields is None: return None - if value: - from awxkit.cli.options import pk_or_name + if isinstance(endpoint, page.TentativePage): + endpoint = self._cache.get_page(endpoint) + if endpoint is None: + return None - pk = pk_or_name(self, resource, value) # TODO: decide whether to support multiple - results = endpoint.get(id=pk).results - else: - results = self._cache.get_page(endpoint).results - - assets = (self._serialize_asset(asset, options) for asset in results) + assets = (self._serialize_asset(asset, post_fields) for asset in endpoint.results) return [asset for asset in assets if asset is not None] + def _filtered_list(self, endpoint, value): + if isinstance(value, int) or value.isdecimal(): + return endpoint.get(id=int(value)) + options = self._cache.get_options(endpoint) + identifier = next(field for field in options['search_fields'] + if field in ('name', 'username', 'hostname')) + return endpoint.get(**{identifier: value}) + def export_assets(self, **kwargs): self._cache = page.PageCache() @@ -160,7 +164,10 @@ class ApiV2(base.Base): for resource in EXPORTABLE_RESOURCES: value = kwargs.get(resource) if all_resources or value is not None: - data[resource] = self._get_assets(resource, value) + endpoint = getattr(self, resource) + if value: + endpoint = self._filtered_list(endpoint, value) + data[resource] = self._export_list(endpoint) return data From 301f15bfdd92e4f2fc6120ae18e74a876270ea91 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 22 Apr 2020 14:32:58 -0400 Subject: [PATCH 098/494] Rename some things for consistency --- awxkit/awxkit/api/pages/api.py | 84 +++++++++++++++++----------------- 1 file changed, 42 insertions(+), 42 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index fd8362fb16..081f5b15e3 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -62,46 +62,46 @@ class ApiV2(base.Base): # Export methods - def _serialize_asset(self, asset, options): + def _export(self, _page, post_fields): # Drop any (credential_type) assets that are being managed by the Tower instance. - if asset.json.get('managed_by_tower'): + if _page.json.get('managed_by_tower'): return None - if options is None: # Deprecated endpoint or insufficient permissions + if post_fields is None: # Deprecated endpoint or insufficient permissions return None - # Note: doing asset[key] automatically parses json blob strings, which can be a problem. + # Note: doing _page[key] automatically parses json blob strings, which can be a problem. fields = { - key: asset.json[key] for key in options - if key in asset.json and key not in asset.related and key != 'id' + key: _page.json[key] for key in post_fields + if key in _page.json and key not in _page.related and key != 'id' } - for key in options: - if key not in asset.related: + for key in post_fields: + if key not in _page.related: continue - related_endpoint = self._cache.get_page(asset.related[key]) - if related_endpoint is None: + rel_endpoint = self._cache.get_page(_page.related[key]) + if rel_endpoint is None: return None # This foreign key is unreadable - natural_key = related_endpoint.get_natural_key(self._cache) + natural_key = rel_endpoint.get_natural_key(self._cache) if natural_key is None: return None # This foreign key has unresolvable dependencies fields[key] = natural_key related = {} - for key, related_endpoint in asset.related.items(): - if key in options or not related_endpoint: + for key, rel_endpoint in _page.related.items(): + if key in post_fields or not rel_endpoint: continue - rel = related_endpoint._create() + rel = rel_endpoint._create() is_relation = rel.__class__.__name__ in EXPORTABLE_RELATIONS is_dependent = rel.__class__.__name__ in EXPORTABLE_DEPENDENT_OBJECTS if not (is_relation or is_dependent): continue - related_options = utils.get_post_fields(related_endpoint, self._cache) - if related_options is None: # This is a read-only endpoint. + rel_post_fields = utils.get_post_fields(rel_endpoint, self._cache) + if rel_post_fields is None: # This is a read-only endpoint. continue - is_attach = 'id' in related_options # This is not a create-only endpoint. + is_attach = 'id' in rel_post_fields # This is not a create-only endpoint. if is_relation and is_attach: by_natural_key = True @@ -110,23 +110,23 @@ class ApiV2(base.Base): else: continue - data = self._cache.get_page(related_endpoint) - if data is None: + rel_page = self._cache.get_page(rel_endpoint) + if rel_page is None: continue - if 'results' in data: + if 'results' in rel_page: results = ( - x.get_natural_key(self._cache) if by_natural_key else self._serialize_asset(x, related_options) - for x in data.results + x.get_natural_key(self._cache) if by_natural_key else self._export(x, rel_post_fields) + for x in rel_page.results ) related[key] = [x for x in results if x is not None] else: - related[key] = data.json + related[key] = rel_page.json if related: fields['related'] = related - natural_key = asset.get_natural_key(self._cache) + natural_key = _page.get_natural_key(self._cache) if natural_key is None: return None fields['natural_key'] = natural_key @@ -143,7 +143,7 @@ class ApiV2(base.Base): if endpoint is None: return None - assets = (self._serialize_asset(asset, post_fields) for asset in endpoint.results) + assets = (self._export(asset, post_fields) for asset in endpoint.results) return [asset for asset in assets if asset is not None] def _filtered_list(self, endpoint, value): @@ -206,13 +206,7 @@ class ApiV2(base.Base): if frozen_key is not None and frozen_key not in self._natural_key and fetch: pass # FIXME - from awxkit.api.pages import projects - _page = self._natural_key.get(frozen_key) - if isinstance(_page, projects.Project) and not _page.is_completed: - _page.wait_until_completed() - _page = _page.get() - self._natural_key[frozen_key] = _page return _page def _create_assets(self, data, resource): @@ -231,26 +225,27 @@ class ApiV2(base.Base): if field not in options: continue if options[field]['type'] in ('id', 'integer') and isinstance(value, dict): - page = self._get_by_natural_key(value) - post_data[field] = page['id'] if page is not None else None + _page = self._get_by_natural_key(value) + post_data[field] = _page['id'] if _page is not None else None else: post_data[field] = value - page = self._get_by_natural_key(asset['natural_key'], fetch=False) + _page = self._get_by_natural_key(asset['natural_key'], fetch=False) try: - if page is None: + if _page is None: if resource == 'users': # We should only impose a default password if the resource doesn't exist. post_data.setdefault('password', 'abc123') - page = endpoint.post(post_data) + _page = endpoint.post(post_data) else: - page = page.put(post_data) + _page = _page.put(post_data) # FIXME: created pages need to be put in the cache - except exc.Common: - log.exception("post_data: %r", post_data) - raise + except exc.Common as e: + log.error("Object import failed: %s.", e) + log.debug("post_data: %r", post_data) + continue - self._register_page(page) + self._register_page(_page) def _assign_roles(self, page, roles): role_endpoint = page.json['related']['roles'] @@ -278,7 +273,10 @@ class ApiV2(base.Base): rel_page = self._get_by_natural_key(item) if rel_page is None: continue # FIXME - endpoint.post({'id': rel_page['id']}) + try: + endpoint.post({'id': rel_page['id']}) + except exc.NoContent: # desired exception on successful (dis)association + pass else: # It is a create set for item in related_set: data = {key: value for key, value in item.items() @@ -301,6 +299,8 @@ class ApiV2(base.Base): self._assign_related(page, name, S) def import_assets(self, data): + self._cache = page.PageCache() + for resource in self._dependent_resources(data): self._register_existing_assets(resource) self._create_assets(data, resource) From 66bc947adbc5090df057e51e76f8c1b12e5c8c14 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 22 Apr 2020 16:46:03 -0400 Subject: [PATCH 099/494] Reuse _import_list for import of related full objects e.g. WFJT Nodes. Also rename _import_list from _create_assets. --- awxkit/awxkit/api/pages/api.py | 70 +++++++++++++++++----------------- 1 file changed, 34 insertions(+), 36 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 081f5b15e3..72d1fa4bf5 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -191,15 +191,14 @@ class ApiV2(base.Base): self._natural_key[natural_key] = page - def _register_existing_assets(self, resource): - endpoint = getattr(self, resource) - options = utils.get_post_fields(endpoint, self._cache) - if options is None: + def _register_existing_assets(self, endpoint): + post_fields = utils.get_post_fields(endpoint, self._cache) + if post_fields is None: return results = self._cache.get_page(endpoint).results - for pg in results: - self._register_page(pg) + for _page in results: + self._register_page(_page) def _get_by_natural_key(self, key, fetch=True): frozen_key = freeze(key) @@ -209,22 +208,14 @@ class ApiV2(base.Base): _page = self._natural_key.get(frozen_key) return _page - def _create_assets(self, data, resource): - # FIXME: this method should work with any list-create - # endpoint, so that we can use it with create relations, e.g. WFJT Nodes - - if resource not in data or resource not in EXPORTABLE_RESOURCES: - return - - endpoint = getattr(self, resource) - options = utils.get_post_fields(endpoint, self._cache) - assets = data[resource] + def _import_list(self, endpoint, assets): + post_fields = utils.get_post_fields(endpoint, self._cache) for asset in assets: post_data = {} for field, value in asset.items(): - if field not in options: + if field not in post_fields: continue - if options[field]['type'] in ('id', 'integer') and isinstance(value, dict): + if post_fields[field]['type'] in ('id', 'integer') and isinstance(value, dict): _page = self._get_by_natural_key(value) post_data[field] = _page['id'] if _page is not None else None else: @@ -233,7 +224,7 @@ class ApiV2(base.Base): _page = self._get_by_natural_key(asset['natural_key'], fetch=False) try: if _page is None: - if resource == 'users': + if asset['natural_key']['type'] == 'user': # We should only impose a default password if the resource doesn't exist. post_data.setdefault('password', 'abc123') _page = endpoint.post(post_data) @@ -262,52 +253,59 @@ class ApiV2(base.Base): else: pass # admin role - def _assign_related(self, page, name, related_set): - endpoint = page.related[name] + def _assign_related(self, _page, name, related_set): + endpoint = _page.related[name] if isinstance(related_set, dict): # Relateds that are just json blobs, e.g. survey_spec endpoint.post(related_set) return if 'natural_key' not in related_set[0]: # It is an attach set + # Try to impedance match + related = endpoint.get(all_pages=True) + existing = {rel['id'] for rel in related.results} for item in related_set: rel_page = self._get_by_natural_key(item) if rel_page is None: continue # FIXME + if rel_page['id'] in existing: + continue try: - endpoint.post({'id': rel_page['id']}) + post_data = {'id': rel_page['id']} + endpoint.post(post_data) except exc.NoContent: # desired exception on successful (dis)association pass + except exc.Common as e: + log.error("Object association failed: %s.", e) + log.debug("post_data: %r", post_data) + raise else: # It is a create set - for item in related_set: - data = {key: value for key, value in item.items() - if key not in ('natural_key', 'related')} - endpoint.post(data) # FIXME: add the page to the cache - # FIXME: deal with objects that themselves have relateds, e.g. WFJT Nodes + self._import_list(endpoint, related_set) # FIXME: deal with pruning existing relations that do not match the import set - def _assign_related_assets(self, resource, assets): + def _assign_related_assets(self, assets): for asset in assets: - page = self._get_by_natural_key(asset['natural_key']) - # FIXME: deal with `page is None` case + _page = self._get_by_natural_key(asset['natural_key']) + # FIXME: deal with `_page is None` case for name, S in asset.get('related', {}).items(): if not S: continue if name == 'roles': - self._assign_roles(page, S) + self._assign_roles(_page, S) else: - self._assign_related(page, name, S) + self._assign_related(_page, name, S) def import_assets(self, data): self._cache = page.PageCache() for resource in self._dependent_resources(data): - self._register_existing_assets(resource) - self._create_assets(data, resource) + endpoint = getattr(self, resource) + self._register_existing_assets(endpoint) + self._import_list(endpoint, data.get(resource) or []) # FIXME: should we delete existing unpatched assets? - for resource, assets in data.items(): - self._assign_related_assets(resource, assets) + for assets in data.values(): + self._assign_related_assets(assets) page.register_page(resources.v2, ApiV2) From 57aff6394c93f6dd8c7b242ca8ec0024f53f2635 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 23 Apr 2020 11:11:35 -0400 Subject: [PATCH 100/494] Log an error and continue when a related object is not found --- awxkit/awxkit/api/pages/api.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 72d1fa4bf5..4c70ed144d 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -286,7 +286,9 @@ class ApiV2(base.Base): def _assign_related_assets(self, assets): for asset in assets: _page = self._get_by_natural_key(asset['natural_key']) - # FIXME: deal with `_page is None` case + if _page is None: + log.error("Related object with natural key not found: %r", asset['natural_key']) + continue for name, S in asset.get('related', {}).items(): if not S: continue From 86afa5cf424646cf9d0ab7ed445baecf199b7c9e Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 23 Apr 2020 13:23:49 -0400 Subject: [PATCH 101/494] Make more use of the PageCache for imports --- awxkit/awxkit/api/pages/api.py | 49 ++++++--------------------------- awxkit/awxkit/api/pages/page.py | 13 +++++++++ awxkit/awxkit/api/utils.py | 6 ++++ 3 files changed, 27 insertions(+), 41 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 4c70ed144d..75dbe26bce 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -44,12 +44,6 @@ EXPORTABLE_DEPENDENT_OBJECTS = [ ] -def freeze(key): - if key is None: - return None - return frozenset((k, freeze(v) if isinstance(v, dict) else v) for k, v in key.items()) - - class Api(base.Base): pass @@ -181,33 +175,6 @@ class ApiV2(base.Base): for page_cls in itertools.chain(*has_create.page_creation_order(*data_pages)): yield page_resource[page_cls] - def _register_page(self, page): - natural_key = freeze(page.get_natural_key(self._cache)) - # FIXME: we need to keep a reference for the case where we - # don't have a natural key, so we can delete - if natural_key is not None: - if getattr(self, '_natural_key', None) is None: - self._natural_key = {} - - self._natural_key[natural_key] = page - - def _register_existing_assets(self, endpoint): - post_fields = utils.get_post_fields(endpoint, self._cache) - if post_fields is None: - return - - results = self._cache.get_page(endpoint).results - for _page in results: - self._register_page(_page) - - def _get_by_natural_key(self, key, fetch=True): - frozen_key = freeze(key) - if frozen_key is not None and frozen_key not in self._natural_key and fetch: - pass # FIXME - - _page = self._natural_key.get(frozen_key) - return _page - def _import_list(self, endpoint, assets): post_fields = utils.get_post_fields(endpoint, self._cache) for asset in assets: @@ -216,12 +183,12 @@ class ApiV2(base.Base): if field not in post_fields: continue if post_fields[field]['type'] in ('id', 'integer') and isinstance(value, dict): - _page = self._get_by_natural_key(value) + _page = self._cache.get_by_natural_key(value) post_data[field] = _page['id'] if _page is not None else None else: post_data[field] = value - _page = self._get_by_natural_key(asset['natural_key'], fetch=False) + _page = self._cache.get_by_natural_key(asset['natural_key']) try: if _page is None: if asset['natural_key']['type'] == 'user': @@ -230,20 +197,19 @@ class ApiV2(base.Base): _page = endpoint.post(post_data) else: _page = _page.put(post_data) - # FIXME: created pages need to be put in the cache except exc.Common as e: log.error("Object import failed: %s.", e) log.debug("post_data: %r", post_data) continue - self._register_page(_page) + self._cache.set_page(_page) def _assign_roles(self, page, roles): role_endpoint = page.json['related']['roles'] for role in roles: if 'content_object' not in role: continue # admin role - obj_page = self._get_by_natural_key(role['content_object']) + obj_page = self._cache.get_by_natural_key(role['content_object']) if obj_page is not None: role_page = obj_page.get_object_role(role['name'], by_name=True) try: @@ -264,7 +230,7 @@ class ApiV2(base.Base): related = endpoint.get(all_pages=True) existing = {rel['id'] for rel in related.results} for item in related_set: - rel_page = self._get_by_natural_key(item) + rel_page = self._cache.get_by_natural_key(item) if rel_page is None: continue # FIXME if rel_page['id'] in existing: @@ -285,7 +251,7 @@ class ApiV2(base.Base): def _assign_related_assets(self, assets): for asset in assets: - _page = self._get_by_natural_key(asset['natural_key']) + _page = self._cache.get_by_natural_key(asset['natural_key']) if _page is None: log.error("Related object with natural key not found: %r", asset['natural_key']) continue @@ -302,7 +268,8 @@ class ApiV2(base.Base): for resource in self._dependent_resources(data): endpoint = getattr(self, resource) - self._register_existing_assets(endpoint) + # Load up existing objects, so that we can try to update or link to them + self._cache.get_page(endpoint) self._import_list(endpoint, data.get(resource) or []) # FIXME: should we delete existing unpatched assets? diff --git a/awxkit/awxkit/api/pages/page.py b/awxkit/awxkit/api/pages/page.py index b4f0eaf0b9..814297a4eb 100644 --- a/awxkit/awxkit/api/pages/page.py +++ b/awxkit/awxkit/api/pages/page.py @@ -15,6 +15,7 @@ from awxkit.utils import ( is_list_or_tuple, to_str ) +from awxkit.api import utils from awxkit.api.client import Connection from awxkit.api.registry import URLRegistry from awxkit.config import config @@ -360,6 +361,8 @@ def exception_from_status_code(status_code): class PageList(object): + NATURAL_KEY = None + @property def __item_class__(self): """Returns the class representing a single 'Page' item @@ -538,6 +541,7 @@ class PageCache(object): def __init__(self): self.options = {} self.pages_by_url = {} + self.pages_by_natural_key = {} def get_options(self, page): url = page.endpoint if isinstance(page, Page) else str(page) @@ -559,6 +563,10 @@ class PageCache(object): def set_page(self, page): self.pages_by_url[page.endpoint] = page + if getattr(page, 'NATURAL_KEY', None): + natural_key = page.get_natural_key(cache=self) + if natural_key is not None: + self.pages_by_natural_key[utils.freeze(natural_key)] = page.endpoint if 'results' in page: for p in page.results: self.set_page(p) @@ -581,3 +589,8 @@ class PageCache(object): return self.pages_by_url.setdefault(url, None) return self.set_page(page) + + def get_by_natural_key(self, natural_key): + endpoint = self.pages_by_natural_key.get(utils.freeze(natural_key)) + if endpoint: + return self.get_page(endpoint) diff --git a/awxkit/awxkit/api/utils.py b/awxkit/awxkit/api/utils.py index 1e295c1eb3..355cdb0158 100644 --- a/awxkit/awxkit/api/utils.py +++ b/awxkit/awxkit/api/utils.py @@ -4,6 +4,12 @@ import re descRE = re.compile(r'^[*] `(\w+)`: [^(]*\((\w+), ([^)]+)\)') +def freeze(key): + if key is None: + return None + return frozenset((k, freeze(v) if isinstance(v, dict) else v) for k, v in key.items()) + + def parse_description(desc): options = {} for line in desc[desc.index('POST'):].splitlines(): From a531b85b31f71df1538e32a20ec8236eca3cb67d Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 23 Apr 2020 15:32:14 -0400 Subject: [PATCH 102/494] Redo waiting until Project updates are complete --- awxkit/awxkit/api/pages/api.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 75dbe26bce..218bc81658 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -195,9 +195,14 @@ class ApiV2(base.Base): # We should only impose a default password if the resource doesn't exist. post_data.setdefault('password', 'abc123') _page = endpoint.post(post_data) + if asset['natural_key']['type'] == 'project': + # When creating a project, we need to wait for its + # first project update to finish so that associated + # JTs have valid options for playbook names + _page.wait_until_completed() else: _page = _page.put(post_data) - except exc.Common as e: + except (exc.Common, AssertionError) as e: log.error("Object import failed: %s.", e) log.debug("post_data: %r", post_data) continue From 76fb605dcdb33438942a4acc67e982bf96af723b Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Tue, 28 Apr 2020 13:50:12 -0400 Subject: [PATCH 103/494] Deal with exports involving foreign keys where you don't have permission --- awxkit/awxkit/api/pages/api.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 218bc81658..28e9393462 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -61,6 +61,7 @@ class ApiV2(base.Base): if _page.json.get('managed_by_tower'): return None if post_fields is None: # Deprecated endpoint or insufficient permissions + log.error("Object export failed: %s", _page.endpoint) return None # Note: doing _page[key] automatically parses json blob strings, which can be a problem. @@ -74,8 +75,12 @@ class ApiV2(base.Base): continue rel_endpoint = self._cache.get_page(_page.related[key]) - if rel_endpoint is None: - return None # This foreign key is unreadable + if rel_endpoint is None: # This foreign key is unreadable + if post_fields[key].get('required'): + log.error("Foreign key export failed: %s", _page.related[key]) + return None + log.error("Foreign key export failed, setting to null: %s", _page.related[key]) + continue natural_key = rel_endpoint.get_natural_key(self._cache) if natural_key is None: return None # This foreign key has unresolvable dependencies From c8288af87fb5734aaefc59643cb711c5a2f1d6bb Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 30 Apr 2020 17:06:48 -0400 Subject: [PATCH 104/494] Make sure we have a cached version of existing objects even if it isn't an object being directly imported. We might need it for relations of things that are being imported. --- awxkit/awxkit/api/pages/api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 28e9393462..522b749c82 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -175,7 +175,7 @@ class ApiV2(base.Base): def _dependent_resources(self, data): page_resource = {getattr(self, resource)._create().__item_class__: resource for resource in self.json} - data_pages = [getattr(self, resource)._create().__item_class__ for resource in data] + data_pages = [getattr(self, resource)._create().__item_class__ for resource in EXPORTABLE_RESOURCES] for page_cls in itertools.chain(*has_create.page_creation_order(*data_pages)): yield page_resource[page_cls] From 82010e4ba28831c2c995353369736aa55ed702b5 Mon Sep 17 00:00:00 2001 From: Alex Corey Date: Tue, 9 Jun 2020 14:43:43 -0400 Subject: [PATCH 105/494] removes extra spacing --- awx/ui_next/src/util/dates.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/awx/ui_next/src/util/dates.jsx b/awx/ui_next/src/util/dates.jsx index ba4df777bf..6b8eeea7e1 100644 --- a/awx/ui_next/src/util/dates.jsx +++ b/awx/ui_next/src/util/dates.jsx @@ -24,8 +24,8 @@ export function timeOfDay() { const second = prependZeros(date.getSeconds()); const time = hour > 12 - ? `${hour - 12}:${minute} :${second} PM` - : `${hour}:${minute}:${second}`; + ? `${hour - 12}:${minute}:${second} PM` + : `${hour}:${minute}:${second} AM`; return time; } From 65fc2db42ff57e717cc207512c3edb337ec37a10 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 30 Apr 2020 18:13:13 -0400 Subject: [PATCH 106/494] Recursively queue up related assets to be created and/or assigned --- awxkit/awxkit/api/pages/api.py | 124 ++++++++++++++++++-------------- awxkit/awxkit/api/pages/page.py | 3 + 2 files changed, 72 insertions(+), 55 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 522b749c82..60e6e959e4 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -1,5 +1,6 @@ import itertools import logging +import queue from awxkit.api.resources import resources import awxkit.exceptions as exc @@ -181,6 +182,7 @@ class ApiV2(base.Base): yield page_resource[page_cls] def _import_list(self, endpoint, assets): + log.debug("_import_list -- endpoint: %s, assets: %s", endpoint.endpoint, repr(assets)) post_fields = utils.get_post_fields(endpoint, self._cache) for asset in assets: post_data = {} @@ -214,67 +216,79 @@ class ApiV2(base.Base): self._cache.set_page(_page) - def _assign_roles(self, page, roles): - role_endpoint = page.json['related']['roles'] - for role in roles: - if 'content_object' not in role: - continue # admin role - obj_page = self._cache.get_by_natural_key(role['content_object']) - if obj_page is not None: - role_page = obj_page.get_object_role(role['name'], by_name=True) - try: - role_endpoint.post({'id': role_page['id']}) - except exc.NoContent: # desired exception on successful (dis)association - pass - else: - pass # admin role - - def _assign_related(self, _page, name, related_set): - endpoint = _page.related[name] - if isinstance(related_set, dict): # Relateds that are just json blobs, e.g. survey_spec - endpoint.post(related_set) - return - - if 'natural_key' not in related_set[0]: # It is an attach set - # Try to impedance match - related = endpoint.get(all_pages=True) - existing = {rel['id'] for rel in related.results} - for item in related_set: - rel_page = self._cache.get_by_natural_key(item) - if rel_page is None: - continue # FIXME - if rel_page['id'] in existing: - continue - try: - post_data = {'id': rel_page['id']} - endpoint.post(post_data) - except exc.NoContent: # desired exception on successful (dis)association - pass - except exc.Common as e: - log.error("Object association failed: %s.", e) - log.debug("post_data: %r", post_data) - raise - else: # It is a create set - self._import_list(endpoint, related_set) - - # FIXME: deal with pruning existing relations that do not match the import set - - def _assign_related_assets(self, assets): - for asset in assets: - _page = self._cache.get_by_natural_key(asset['natural_key']) - if _page is None: - log.error("Related object with natural key not found: %r", asset['natural_key']) - continue + # Queue up everything related to be either created or assigned. for name, S in asset.get('related', {}).items(): if not S: continue if name == 'roles': - self._assign_roles(_page, S) + self._roles.put((_page, S)) else: - self._assign_related(_page, name, S) + self._related.put((_page, name, S)) + + + def _assign_roles(self): + while True: + try: + _page, roles = self._roles.get_nowait() + self._roles.task_done() + role_endpoint = _page.json['related']['roles'] + for role in roles: + if 'content_object' not in role: + continue # admin role + obj_page = self._cache.get_by_natural_key(role['content_object']) + if obj_page is not None: + role_page = obj_page.get_object_role(role['name'], by_name=True) + try: + role_endpoint.post({'id': role_page['id']}) + except exc.NoContent: # desired exception on successful (dis)association + pass + else: + pass # admin role + except queue.Empty: + break + + def _assign_related(self): + while True: + try: + _page, name, related_set = self._related.get_nowait() + self._related.task_done() + endpoint = _page.related[name] + if isinstance(related_set, dict): # Relateds that are just json blobs, e.g. survey_spec + endpoint.post(related_set) + return + + if 'natural_key' not in related_set[0]: # It is an attach set + # Try to impedance match + related = endpoint.get(all_pages=True) + existing = {rel['id'] for rel in related.results} + for item in related_set: + rel_page = self._cache.get_by_natural_key(item) + if rel_page is None: + continue # FIXME + if rel_page['id'] in existing: + continue + try: + post_data = {'id': rel_page['id']} + endpoint.post(post_data) + log.error("endpoint: %s, id: %s", endpoint.endpoint, rel_page['id']) + except exc.NoContent: # desired exception on successful (dis)association + pass + except exc.Common as e: + log.error("Object association failed: %s.", e) + log.debug("post_data: %r", post_data) + raise + else: # It is a create set + self._cache.get_page(endpoint) + self._import_list(endpoint, related_set) + + # FIXME: deal with pruning existing relations that do not match the import set + except queue.Empty: + break def import_assets(self, data): self._cache = page.PageCache() + self._related = queue.Queue() + self._roles = queue.Queue() for resource in self._dependent_resources(data): endpoint = getattr(self, resource) @@ -283,8 +297,8 @@ class ApiV2(base.Base): self._import_list(endpoint, data.get(resource) or []) # FIXME: should we delete existing unpatched assets? - for assets in data.values(): - self._assign_related_assets(assets) + self._assign_related() + self._assign_roles() page.register_page(resources.v2, ApiV2) diff --git a/awxkit/awxkit/api/pages/page.py b/awxkit/awxkit/api/pages/page.py index 814297a4eb..338f370a90 100644 --- a/awxkit/awxkit/api/pages/page.py +++ b/awxkit/awxkit/api/pages/page.py @@ -562,6 +562,7 @@ class PageCache(object): return self.options.setdefault(url, options) def set_page(self, page): + log.debug("set_page: %s", page.endpoint) self.pages_by_url[page.endpoint] = page if getattr(page, 'NATURAL_KEY', None): natural_key = page.get_natural_key(cache=self) @@ -588,9 +589,11 @@ class PageCache(object): log.warning("This endpoint is deprecated: %s", url) return self.pages_by_url.setdefault(url, None) + log.debug("get_page: %s", page.endpoint) return self.set_page(page) def get_by_natural_key(self, natural_key): endpoint = self.pages_by_natural_key.get(utils.freeze(natural_key)) + log.debug("get_by_natural_key: %s, endpoint: %s", repr(natural_key), endpoint) if endpoint: return self.get_page(endpoint) From 1f7c1af64ec2ff69abd317db7e984403247d2e1f Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 1 May 2020 11:44:03 -0400 Subject: [PATCH 107/494] Fix WFJT nodes endpoint to return node pages when posting not the node list page. --- awxkit/awxkit/api/pages/api.py | 1 - awxkit/awxkit/api/pages/page.py | 4 +++- awxkit/awxkit/api/pages/workflow_job_template_nodes.py | 4 ++-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 60e6e959e4..f18a5cce1c 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -225,7 +225,6 @@ class ApiV2(base.Base): else: self._related.put((_page, name, S)) - def _assign_roles(self): while True: try: diff --git a/awxkit/awxkit/api/pages/page.py b/awxkit/awxkit/api/pages/page.py index 338f370a90..65a6290bb6 100644 --- a/awxkit/awxkit/api/pages/page.py +++ b/awxkit/awxkit/api/pages/page.py @@ -562,11 +562,13 @@ class PageCache(object): return self.options.setdefault(url, options) def set_page(self, page): - log.debug("set_page: %s", page.endpoint) + log.debug("set_page: %s %s", type(page), page.endpoint) self.pages_by_url[page.endpoint] = page if getattr(page, 'NATURAL_KEY', None): + log.debug("set_page has natural key fields.") natural_key = page.get_natural_key(cache=self) if natural_key is not None: + log.debug("set_page natural_key: %s", repr(natural_key)) self.pages_by_natural_key[utils.freeze(natural_key)] = page.endpoint if 'results' in page: for p in page.results: diff --git a/awxkit/awxkit/api/pages/workflow_job_template_nodes.py b/awxkit/awxkit/api/pages/workflow_job_template_nodes.py index 1b61754928..5494d6063b 100644 --- a/awxkit/awxkit/api/pages/workflow_job_template_nodes.py +++ b/awxkit/awxkit/api/pages/workflow_job_template_nodes.py @@ -123,8 +123,8 @@ class WorkflowJobTemplateNode(HasCreate, base.Base): page.register_page([resources.workflow_job_template_node, - (resources.workflow_job_template_nodes, - 'post')], + (resources.workflow_job_template_nodes, 'post'), + (resources.workflow_job_template_workflow_nodes, 'post')], WorkflowJobTemplateNode) From aeeed4d3712bd6c6a00e7efe832b9490adbf202f Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 8 May 2020 13:17:19 -0400 Subject: [PATCH 108/494] Add more logging --- awxkit/awxkit/api/pages/api.py | 17 +++++++++++------ awxkit/awxkit/api/pages/page.py | 14 +++++--------- awxkit/awxkit/api/pages/roles.py | 7 +++++++ awxkit/awxkit/api/utils.py | 5 +++++ 4 files changed, 28 insertions(+), 15 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index f18a5cce1c..7615113f83 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -60,6 +60,7 @@ class ApiV2(base.Base): def _export(self, _page, post_fields): # Drop any (credential_type) assets that are being managed by the Tower instance. if _page.json.get('managed_by_tower'): + log.debug("%s is managed by Tower, skipping.", _page.endpoint) return None if post_fields is None: # Deprecated endpoint or insufficient permissions log.error("Object export failed: %s", _page.endpoint) @@ -78,14 +79,16 @@ class ApiV2(base.Base): rel_endpoint = self._cache.get_page(_page.related[key]) if rel_endpoint is None: # This foreign key is unreadable if post_fields[key].get('required'): - log.error("Foreign key export failed: %s", _page.related[key]) + log.error("Foreign key %r export failed for object %s.", key, _page.endpoint) return None - log.error("Foreign key export failed, setting to null: %s", _page.related[key]) + log.warning("Foreign key %r export failed for object %s, setting to null", key, _page.endpoint) continue - natural_key = rel_endpoint.get_natural_key(self._cache) - if natural_key is None: + rel_natural_key = rel_endpoint.get_natural_key(self._cache) + if rel_natural_key is None: + log.error("Unable to construct a natural key for foreign key %r of object %s.", + key, _page.endpoint) return None # This foreign key has unresolvable dependencies - fields[key] = natural_key + fields[key] = rel_natural_key related = {} for key, rel_endpoint in _page.related.items(): @@ -99,7 +102,8 @@ class ApiV2(base.Base): continue rel_post_fields = utils.get_post_fields(rel_endpoint, self._cache) - if rel_post_fields is None: # This is a read-only endpoint. + if rel_post_fields is None: + log.debug("%s is a read-only endpoint.", rel_endpoint) continue is_attach = 'id' in rel_post_fields # This is not a create-only endpoint. @@ -128,6 +132,7 @@ class ApiV2(base.Base): natural_key = _page.get_natural_key(self._cache) if natural_key is None: + log.error("Unable to construct a natural key for object %s.", _page.endpoint) return None fields['natural_key'] = natural_key diff --git a/awxkit/awxkit/api/pages/page.py b/awxkit/awxkit/api/pages/page.py index 65a6290bb6..3ee1c38490 100644 --- a/awxkit/awxkit/api/pages/page.py +++ b/awxkit/awxkit/api/pages/page.py @@ -319,27 +319,23 @@ class Page(object): return page_cls(self.connection, endpoint=endpoint).get(**kw) def get_natural_key(self, cache=None): - warn = "This object does not have a natural key: %s" - if cache is None: cache = PageCache() if not getattr(self, 'NATURAL_KEY', None): - log.warning(warn, getattr(self, 'endpoint', '')) + log.warning("This object does not have a natural key: %s", getattr(self, 'endpoint', '')) return None natural_key = {} for key in self.NATURAL_KEY: if key in self.related: related_endpoint = cache.get_page(self.related[key]) - if related_endpoint is None: - return None - natural_key[key] = related_endpoint.get_natural_key(cache=cache) + if related_endpoint is not None: + natural_key[key] = related_endpoint.get_natural_key(cache=cache) + else: + natural_key[key] = None elif key in self: natural_key[key] = self[key] - if not natural_key: - log.warning(warn, getattr(self, 'endpoint', '')) - return None natural_key['type'] = self['type'] return natural_key diff --git a/awxkit/awxkit/api/pages/roles.py b/awxkit/awxkit/api/pages/roles.py index 104af818b1..2b317dece1 100644 --- a/awxkit/awxkit/api/pages/roles.py +++ b/awxkit/awxkit/api/pages/roles.py @@ -1,9 +1,14 @@ +import logging + from awxkit.api.resources import resources from . import base from . import page +log = logging.getLogger(__name__) + + class Role(base.Base): NATURAL_KEY = ('name',) @@ -20,6 +25,8 @@ class Role(base.Base): if related_objs: related_endpoint = cache.get_page(related_objs[0]) if related_endpoint is None: + log.error("Unable to obtain content_object %s for role %s", + related_objs[0], self.endpoint) return None natural_key['content_object'] = related_endpoint.get_natural_key(cache=cache) diff --git a/awxkit/awxkit/api/utils.py b/awxkit/awxkit/api/utils.py index 355cdb0158..a3e6739b26 100644 --- a/awxkit/awxkit/api/utils.py +++ b/awxkit/awxkit/api/utils.py @@ -1,6 +1,9 @@ +import logging import re +log = logging.getLogger(__name__) + descRE = re.compile(r'^[*] `(\w+)`: [^(]*\((\w+), ([^)]+)\)') @@ -42,4 +45,6 @@ def get_post_fields(page, cache): if 'POST' in options_page.json['actions']: return options_page.json['actions']['POST'] else: + log.warning( + "Insufficient privileges on %s, inferring POST fields from description.", options_page.endpoint) return parse_description(options_page.json['description']) From 19b7c91486c6a401f0fcb82f1c755d94dbc72dca Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 28 May 2020 15:19:34 -0400 Subject: [PATCH 109/494] Return the changed status of imports --- awxkit/awxkit/api/pages/api.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 7615113f83..81aef61902 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -189,6 +189,9 @@ class ApiV2(base.Base): def _import_list(self, endpoint, assets): log.debug("_import_list -- endpoint: %s, assets: %s", endpoint.endpoint, repr(assets)) post_fields = utils.get_post_fields(endpoint, self._cache) + + changed = False + for asset in assets: post_data = {} for field, value in asset.items(): @@ -207,6 +210,7 @@ class ApiV2(base.Base): # We should only impose a default password if the resource doesn't exist. post_data.setdefault('password', 'abc123') _page = endpoint.post(post_data) + changed = True if asset['natural_key']['type'] == 'project': # When creating a project, we need to wait for its # first project update to finish so that associated @@ -214,6 +218,7 @@ class ApiV2(base.Base): _page.wait_until_completed() else: _page = _page.put(post_data) + changed = True except (exc.Common, AssertionError) as e: log.error("Object import failed: %s.", e) log.debug("post_data: %r", post_data) @@ -230,6 +235,8 @@ class ApiV2(base.Base): else: self._related.put((_page, name, S)) + return changed + def _assign_roles(self): while True: try: @@ -294,15 +301,20 @@ class ApiV2(base.Base): self._related = queue.Queue() self._roles = queue.Queue() + changed = False + for resource in self._dependent_resources(data): endpoint = getattr(self, resource) # Load up existing objects, so that we can try to update or link to them self._cache.get_page(endpoint) - self._import_list(endpoint, data.get(resource) or []) + imported = self._import_list(endpoint, data.get(resource) or []) + changed = changed or imported # FIXME: should we delete existing unpatched assets? self._assign_related() self._assign_roles() + return changed + page.register_page(resources.v2, ApiV2) From 18d09f892d6e014c4ddee47967519d5ac2aaecee Mon Sep 17 00:00:00 2001 From: Jim Ladd Date: Fri, 8 May 2020 12:25:14 -0700 Subject: [PATCH 110/494] disable reports option for foreman --- awx/plugins/inventory/foreman.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/awx/plugins/inventory/foreman.py b/awx/plugins/inventory/foreman.py index c3f97710d2..222084b5bd 100755 --- a/awx/plugins/inventory/foreman.py +++ b/awx/plugins/inventory/foreman.py @@ -176,8 +176,6 @@ class ForemanInventory(object): except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): self.want_facts = True - self.want_facts = self.want_facts and self.report_want_facts - try: self.want_hostcollections = config.getboolean('ansible', 'want_hostcollections') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): @@ -272,12 +270,11 @@ class ForemanInventory(object): return results def _use_inventory_report(self): - if not self.foreman_use_reports_api: - return False - status_url = "%s/api/v2/status" % self.foreman_url - result = self._get_json(status_url) - foreman_version = (LooseVersion(result.get('version')) >= LooseVersion('1.24.0')) - return foreman_version + # The options required to enable the reports feature have never been fully + # surfaced by awx. Disabling reports in the foreman script altogether. + # Given that this script is being deprecated in favor of the foreman _plugin_, + # reports should be enabled for the foreman plugin in future version of awx. + return False def _fetch_params(self): options, params = ("no", "yes"), dict() From 1dd9772e4163c437b855a026676a52de31fbe7f9 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Thu, 4 Apr 2019 12:20:35 -0400 Subject: [PATCH 111/494] Allow use of fallback instance_ids --- .../management/commands/inventory_import.py | 22 +++++++++++++------ awx/settings/defaults.py | 2 +- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/awx/main/management/commands/inventory_import.py b/awx/main/management/commands/inventory_import.py index 9dad2c9a39..c5d1491365 100644 --- a/awx/main/management/commands/inventory_import.py +++ b/awx/main/management/commands/inventory_import.py @@ -271,7 +271,7 @@ class Command(BaseCommand): logging.DEBUG, 0])) logger.setLevel(log_levels.get(self.verbosity, 0)) - def _get_instance_id(self, from_dict, default=''): + def _get_instance_id(self, variables, default=''): ''' Retrieve the instance ID from the given dict of host variables. @@ -279,15 +279,23 @@ class Command(BaseCommand): the lookup will traverse into nested dicts, equivalent to: from_dict.get('foo', {}).get('bar', default) + + Multiple ID variables may be specified as 'foo.bar,foobar', so that + it will first try to find 'bar' inside of 'foo', and if unable, + will try to find 'foobar' as a fallback ''' instance_id = default if getattr(self, 'instance_id_var', None): - for key in self.instance_id_var.split('.'): - if not hasattr(from_dict, 'get'): - instance_id = default + for single_instance_id in self.instance_id_var.split(','): + from_dict = variables + for key in single_instance_id.split('.'): + if not hasattr(from_dict, 'get'): + instance_id = default + break + instance_id = from_dict.get(key, default) + from_dict = instance_id + if instance_id: break - instance_id = from_dict.get(key, default) - from_dict = instance_id return smart_text(instance_id) def _get_enabled(self, from_dict, default=None): @@ -422,7 +430,7 @@ class Command(BaseCommand): for mem_host in self.all_group.all_hosts.values(): instance_id = self._get_instance_id(mem_host.variables) if not instance_id: - logger.warning('Host "%s" has no "%s" variable', + logger.warning('Host "%s" has no "%s" variable(s)', mem_host.name, self.instance_id_var) continue mem_host.instance_id = instance_id diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 57d30bc23f..f35a8983bc 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -708,7 +708,7 @@ EC2_ENABLED_VAR = 'ec2_state' EC2_ENABLED_VALUE = 'running' # Inventory variable name containing unique instance ID. -EC2_INSTANCE_ID_VAR = 'ec2_id' +EC2_INSTANCE_ID_VAR = 'ec2_id,instance_id' # Filter for allowed group/host names when importing inventory from EC2. EC2_GROUP_FILTER = r'^.+$' From 6777b82d8e57f28019ce0a2db423664258e81604 Mon Sep 17 00:00:00 2001 From: Elijah DeLee Date: Wed, 10 Jun 2020 11:08:49 -0400 Subject: [PATCH 112/494] remove reference to memcached in k8s inv we don't have memcached container anymore --- installer/inventory | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/installer/inventory b/installer/inventory index 2908c2f7ef..0c31c78012 100644 --- a/installer/inventory +++ b/installer/inventory @@ -38,7 +38,7 @@ dockerhub_base=ansible # kubernetes_ingress_tls_secret=awx-cert # Kubernetes and Openshift Install Resource Requests -# These are the request and limit values for a pod's container for task/web/redis/memcached/management. +# These are the request and limit values for a pod's container for task/web/redis/management. # The total amount of requested resources for a pod is the sum of all # resources requested by all containers in the pod # A cpu_request of 1500 is 1.5 cores for the container to start out with. @@ -54,8 +54,6 @@ dockerhub_base=ansible # web_mem_limit=2 # redis_cpu_limit=1000 # redis_mem_limit=3 -# memcached_cpu_limit=1000 -# memcached_mem_limit=2 # management_cpu_limit=2000 # management_mem_limit=2 From a83a9c9723c44841bd133ffe66bef6ec00478ca6 Mon Sep 17 00:00:00 2001 From: Elijah DeLee Date: Wed, 10 Jun 2020 11:18:48 -0400 Subject: [PATCH 113/494] remove dep on python-memcached --- requirements/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 0d364a4922..9930111107 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -108,7 +108,7 @@ ruamel.yaml.clib==0.2.0 # via ruamel.yaml ruamel.yaml==0.16.10 # via openshift schedule==0.6.0 # via -r /awx_devel/requirements/requirements.in service-identity==18.1.0 # via twisted -six==1.14.0 # via ansible-runner, automat, cryptography, django-extensions, django-pglocks, google-auth, isodate, jaraco.collections, jaraco.logging, jaraco.text, jsonschema, kubernetes, openshift, pygerduty, pyopenssl, pyrad, pyrsistent, python-dateutil, python-memcached, slackclient, social-auth-app-django, social-auth-core, tacacs-plus, twilio, txaio, websocket-client +six==1.14.0 # via ansible-runner, automat, cryptography, django-extensions, django-pglocks, google-auth, isodate, jaraco.collections, jaraco.logging, jaraco.text, jsonschema, kubernetes, openshift, pygerduty, pyopenssl, pyrad, pyrsistent, python-dateutil, slackclient, social-auth-app-django, social-auth-core, tacacs-plus, twilio, txaio, websocket-client slackclient==1.1.2 # via -r /awx_devel/requirements/requirements.in smmap==3.0.1 # via gitdb social-auth-app-django==3.1.0 # via -r /awx_devel/requirements/requirements.in From 9797c8e3c2f94831a24df44779fece232fb1937b Mon Sep 17 00:00:00 2001 From: nixocio Date: Wed, 10 Jun 2020 11:23:29 -0400 Subject: [PATCH 114/494] Add required variables to ClipboardCopyButton test Add required variables to `ClipboardCopyButton` test to remove warnings during test execution. --- .../ClipboardCopyButton/ClipboardCopyButton.test.jsx | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/awx/ui_next/src/components/ClipboardCopyButton/ClipboardCopyButton.test.jsx b/awx/ui_next/src/components/ClipboardCopyButton/ClipboardCopyButton.test.jsx index 35836c0634..f777976135 100644 --- a/awx/ui_next/src/components/ClipboardCopyButton/ClipboardCopyButton.test.jsx +++ b/awx/ui_next/src/components/ClipboardCopyButton/ClipboardCopyButton.test.jsx @@ -12,6 +12,8 @@ describe('ClipboardCopyButton', () => { @@ -23,6 +25,8 @@ describe('ClipboardCopyButton', () => { @@ -40,6 +44,8 @@ describe('ClipboardCopyButton', () => { From e81ccf10df5096aca0554d1ae576eedac70e350d Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Wed, 10 Jun 2020 11:36:09 -0400 Subject: [PATCH 115/494] set proper permissions for the redis socket --- installer/roles/local_docker/templates/redis.conf.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/installer/roles/local_docker/templates/redis.conf.j2 b/installer/roles/local_docker/templates/redis.conf.j2 index daf69de7da..017bb067d5 100644 --- a/installer/roles/local_docker/templates/redis.conf.j2 +++ b/installer/roles/local_docker/templates/redis.conf.j2 @@ -1,4 +1,4 @@ unixsocket /var/run/redis/redis.sock -unixsocketperm 777 +unixsocketperm 660 port 0 bind 127.0.0.1 From 64ebfa75a8c48731401a67b4416c0549d83b41a4 Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Wed, 10 Jun 2020 11:50:13 -0400 Subject: [PATCH 116/494] remove a highly verbose log lines --- awx/main/models/mixins.py | 1 - awx/main/tasks.py | 2 -- 2 files changed, 3 deletions(-) diff --git a/awx/main/models/mixins.py b/awx/main/models/mixins.py index e51807f47b..ce6d3717a7 100644 --- a/awx/main/models/mixins.py +++ b/awx/main/models/mixins.py @@ -566,7 +566,6 @@ class WebhookMixin(models.Model): def update_webhook_status(self, status): if not self.webhook_credential: - logger.debug("No credential configured to post back webhook status, skipping.") return status_api = self.extra_vars_dict.get('tower_webhook_status_api') diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 360076e7d8..ef2de1a500 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -1013,8 +1013,6 @@ class BaseTask(object): 'resource_profiling_memory_poll_interval': mem_poll_interval, 'resource_profiling_pid_poll_interval': pid_poll_interval, 'resource_profiling_results_dir': results_dir}) - else: - logger.debug('Resource profiling not enabled for task') return resource_profiling_params From 423df6618d1f5484ea428dcd32371e67bb4c49d4 Mon Sep 17 00:00:00 2001 From: Shane McDonald Date: Wed, 10 Jun 2020 13:35:41 -0400 Subject: [PATCH 117/494] Force containers in k8s to run under root group Normally containers belong to the 'root' group, but for some reason the downstream red hat scl redis image only belongs to the 'redis' group by default. This fixes that. --- installer/roles/kubernetes/templates/configmap.yml.j2 | 2 +- installer/roles/kubernetes/templates/deployment.yml.j2 | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/installer/roles/kubernetes/templates/configmap.yml.j2 b/installer/roles/kubernetes/templates/configmap.yml.j2 index c657fa9df7..b7553811c1 100644 --- a/installer/roles/kubernetes/templates/configmap.yml.j2 +++ b/installer/roles/kubernetes/templates/configmap.yml.j2 @@ -202,6 +202,6 @@ data: {{ kubernetes_deployment_name }}_redis_conf: | unixsocket /var/run/redis/redis.sock - unixsocketperm 777 + unixsocketperm 660 port 0 bind 127.0.0.1 diff --git a/installer/roles/kubernetes/templates/deployment.yml.j2 b/installer/roles/kubernetes/templates/deployment.yml.j2 index 22ce12153a..1f2d2c213d 100644 --- a/installer/roles/kubernetes/templates/deployment.yml.j2 +++ b/installer/roles/kubernetes/templates/deployment.yml.j2 @@ -40,6 +40,8 @@ spec: app: {{ kubernetes_deployment_name }} spec: serviceAccountName: awx + securityContext: + fsGroup: 0 terminationGracePeriodSeconds: 10 {% if custom_venvs is defined %} {% set trusted_hosts = "" %} From a33c3037650035072c71f6fe47b00335928464a4 Mon Sep 17 00:00:00 2001 From: Bill Nottingham Date: Wed, 10 Jun 2020 15:25:14 -0400 Subject: [PATCH 118/494] Remove active_counts_by_org I was trying to parse the difference between this and the (directly above) org_active_count from the comment, and then I grepped and realized this function appears unused. --- awx/main/managers.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/awx/main/managers.py b/awx/main/managers.py index f4b437d027..3f55f57954 100644 --- a/awx/main/managers.py +++ b/awx/main/managers.py @@ -44,20 +44,6 @@ class HostManager(models.Manager): inventory_sources__source='tower' ).filter(inventory__organization=org_id).values('name').distinct().count() - def active_counts_by_org(self): - """Return the counts of active, unique hosts for each organization. - Construction of query involves: - - remove any ordering specified in model's Meta - - Exclude hosts sourced from another Tower - - Consider only hosts where the canonical inventory is owned by each organization - - Restrict the query to only count distinct names - - Return the counts - """ - return self.order_by().exclude( - inventory_sources__source='tower' - ).values('inventory__organization').annotate( - inventory__organization__count=models.Count('name', distinct=True)) - def get_queryset(self): """When the parent instance of the host query set has a `kind=smart` and a `host_filter` set. Use the `host_filter` to generate the queryset for the hosts. From bf6e8f8e83d1caa43cf0aa1499d14b0f57cc4395 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 10 Jun 2020 15:27:15 -0400 Subject: [PATCH 119/494] Prioritize membership roles since certain role grants will not be accepted by the api unless the user or team is part of the correct organization. --- awxkit/awxkit/api/pages/api.py | 116 ++++++++++++++++----------------- 1 file changed, 58 insertions(+), 58 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 81aef61902..fe57fe33e8 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -1,6 +1,5 @@ import itertools import logging -import queue from awxkit.api.resources import resources import awxkit.exceptions as exc @@ -231,75 +230,75 @@ class ApiV2(base.Base): if not S: continue if name == 'roles': - self._roles.put((_page, S)) + self._roles.append((_page, S)) else: - self._related.put((_page, name, S)) + self._related.append((_page, name, S)) return changed + def _assign_role(self, endpoint, role): + if 'content_object' not in role: + return + obj_page = self._cache.get_by_natural_key(role['content_object']) + if obj_page is None: + return + role_page = obj_page.get_object_role(role['name'], by_name=True) + try: + endpoint.post({'id': role_page['id']}) + except exc.NoContent: # desired exception on successful (dis)association + pass + + def _assign_membership(self): + for _page, roles in self._roles: + role_endpoint = _page.json['related']['roles'] + for role in roles: + if role['name'] == 'Member': + self._assign_role(role_endpoint, role) + def _assign_roles(self): - while True: - try: - _page, roles = self._roles.get_nowait() - self._roles.task_done() - role_endpoint = _page.json['related']['roles'] - for role in roles: - if 'content_object' not in role: - continue # admin role - obj_page = self._cache.get_by_natural_key(role['content_object']) - if obj_page is not None: - role_page = obj_page.get_object_role(role['name'], by_name=True) - try: - role_endpoint.post({'id': role_page['id']}) - except exc.NoContent: # desired exception on successful (dis)association - pass - else: - pass # admin role - except queue.Empty: - break + for _page, roles in self._roles: + role_endpoint = _page.json['related']['roles'] + for role in roles: + if role['name'] != 'Member': + self._assign_role(role_endpoint, role) def _assign_related(self): - while True: - try: - _page, name, related_set = self._related.get_nowait() - self._related.task_done() - endpoint = _page.related[name] - if isinstance(related_set, dict): # Relateds that are just json blobs, e.g. survey_spec - endpoint.post(related_set) - return + for _page, name, related_set in self._related: + endpoint = _page.related[name] + if isinstance(related_set, dict): # Relateds that are just json blobs, e.g. survey_spec + endpoint.post(related_set) + return - if 'natural_key' not in related_set[0]: # It is an attach set - # Try to impedance match - related = endpoint.get(all_pages=True) - existing = {rel['id'] for rel in related.results} - for item in related_set: - rel_page = self._cache.get_by_natural_key(item) - if rel_page is None: - continue # FIXME - if rel_page['id'] in existing: - continue - try: - post_data = {'id': rel_page['id']} - endpoint.post(post_data) - log.error("endpoint: %s, id: %s", endpoint.endpoint, rel_page['id']) - except exc.NoContent: # desired exception on successful (dis)association - pass - except exc.Common as e: - log.error("Object association failed: %s.", e) - log.debug("post_data: %r", post_data) - raise - else: # It is a create set - self._cache.get_page(endpoint) - self._import_list(endpoint, related_set) + if 'natural_key' not in related_set[0]: # It is an attach set + # Try to impedance match + related = endpoint.get(all_pages=True) + existing = {rel['id'] for rel in related.results} + for item in related_set: + rel_page = self._cache.get_by_natural_key(item) + if rel_page is None: + continue # FIXME + if rel_page['id'] in existing: + continue + try: + post_data = {'id': rel_page['id']} + endpoint.post(post_data) + log.error("endpoint: %s, id: %s", endpoint.endpoint, rel_page['id']) + except exc.NoContent: # desired exception on successful (dis)association + pass + except exc.Common as e: + log.error("Object association failed: %s.", e) + log.debug("post_data: %r", post_data) + raise + else: # It is a create set + self._cache.get_page(endpoint) + self._import_list(endpoint, related_set) - # FIXME: deal with pruning existing relations that do not match the import set - except queue.Empty: - break + # FIXME: deal with pruning existing relations that do not match the import set def import_assets(self, data): self._cache = page.PageCache() - self._related = queue.Queue() - self._roles = queue.Queue() + self._related = [] + self._roles = [] changed = False @@ -312,6 +311,7 @@ class ApiV2(base.Base): # FIXME: should we delete existing unpatched assets? self._assign_related() + self._assign_membership() self._assign_roles() return changed From 17eaeb28a8e9028e192e6d9eccbbb19f864ba8bf Mon Sep 17 00:00:00 2001 From: Jim Ladd Date: Wed, 10 Jun 2020 13:30:05 -0700 Subject: [PATCH 120/494] update VMWARE_INSTANCE_ID_VAR * Favor instanceUuid * .. but fall back to instanceuuid if necessary --- awx/settings/defaults.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index f35a8983bc..46b5e454c3 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -727,7 +727,7 @@ VMWARE_ENABLED_VAR = 'guest.gueststate' VMWARE_ENABLED_VALUE = 'running' # Inventory variable name containing the unique instance ID. -VMWARE_INSTANCE_ID_VAR = 'config.instanceuuid' +VMWARE_INSTANCE_ID_VAR = 'config.instanceUuid, config.instanceuuid' # Filter for allowed group and host names when importing inventory # from VMware. From 8eecfeaab43f11e7ec4fe624530eca0c60e8556c Mon Sep 17 00:00:00 2001 From: nixocio Date: Tue, 9 Jun 2020 16:49:57 -0400 Subject: [PATCH 121/494] Add stub files for Credential Types Add stub files for Credential Types. Routing system, and screens layout. closes: https://github.com/ansible/awx/issues/7301 --- .../screens/CredentialType/CredentialType.jsx | 25 ++++++++ .../CredentialTypeAdd/CredentialTypeAdd.jsx | 14 +++++ .../CredentialType/CredentialTypeAdd/index.js | 1 + .../CredentialTypeDetails.jsx | 12 ++++ .../CredentialTypeDetails/index.js | 1 + .../CredentialTypeEdit/CredentialTypeEdit.jsx | 12 ++++ .../CredentialTypeEdit/index.js | 1 + .../CredentialTypeList/CredentialTypeList.jsx | 14 +++++ .../CredentialTypeList/index.js | 1 + .../CredentialType/CredentialTypes.jsx | 60 +++++++++++++------ .../CredentialType/CredentialTypes.test.jsx | 8 +-- 11 files changed, 124 insertions(+), 25 deletions(-) create mode 100644 awx/ui_next/src/screens/CredentialType/CredentialType.jsx create mode 100644 awx/ui_next/src/screens/CredentialType/CredentialTypeAdd/CredentialTypeAdd.jsx create mode 100644 awx/ui_next/src/screens/CredentialType/CredentialTypeAdd/index.js create mode 100644 awx/ui_next/src/screens/CredentialType/CredentialTypeDetails/CredentialTypeDetails.jsx create mode 100644 awx/ui_next/src/screens/CredentialType/CredentialTypeDetails/index.js create mode 100644 awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/CredentialTypeEdit.jsx create mode 100644 awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/index.js create mode 100644 awx/ui_next/src/screens/CredentialType/CredentialTypeList/CredentialTypeList.jsx create mode 100644 awx/ui_next/src/screens/CredentialType/CredentialTypeList/index.js diff --git a/awx/ui_next/src/screens/CredentialType/CredentialType.jsx b/awx/ui_next/src/screens/CredentialType/CredentialType.jsx new file mode 100644 index 0000000000..1d5b2ecca3 --- /dev/null +++ b/awx/ui_next/src/screens/CredentialType/CredentialType.jsx @@ -0,0 +1,25 @@ +import React from 'react'; +import { Route, Switch, Redirect } from 'react-router-dom'; + +import CredentialTypeDetails from './CredentialTypeDetails'; +import CredentialTypeEdit from './CredentialTypeEdit'; + +function CredentialType() { + return ( + + + + + + + + + + ); +} + +export default CredentialType; diff --git a/awx/ui_next/src/screens/CredentialType/CredentialTypeAdd/CredentialTypeAdd.jsx b/awx/ui_next/src/screens/CredentialType/CredentialTypeAdd/CredentialTypeAdd.jsx new file mode 100644 index 0000000000..4f3907e7bd --- /dev/null +++ b/awx/ui_next/src/screens/CredentialType/CredentialTypeAdd/CredentialTypeAdd.jsx @@ -0,0 +1,14 @@ +import React from 'react'; +import { Card, PageSection } from '@patternfly/react-core'; + +function CredentialTypeAdd() { + return ( + + +
Credentials Type Add
+
+
+ ); +} + +export default CredentialTypeAdd; diff --git a/awx/ui_next/src/screens/CredentialType/CredentialTypeAdd/index.js b/awx/ui_next/src/screens/CredentialType/CredentialTypeAdd/index.js new file mode 100644 index 0000000000..47ad6341d0 --- /dev/null +++ b/awx/ui_next/src/screens/CredentialType/CredentialTypeAdd/index.js @@ -0,0 +1 @@ +export { default } from './CredentialTypeAdd'; diff --git a/awx/ui_next/src/screens/CredentialType/CredentialTypeDetails/CredentialTypeDetails.jsx b/awx/ui_next/src/screens/CredentialType/CredentialTypeDetails/CredentialTypeDetails.jsx new file mode 100644 index 0000000000..58543324b7 --- /dev/null +++ b/awx/ui_next/src/screens/CredentialType/CredentialTypeDetails/CredentialTypeDetails.jsx @@ -0,0 +1,12 @@ +import React from 'react'; +import { Card, PageSection } from '@patternfly/react-core'; + +function CredentialTypeDetails() { + return ( + + Credential Type Details + + ); +} + +export default CredentialTypeDetails; diff --git a/awx/ui_next/src/screens/CredentialType/CredentialTypeDetails/index.js b/awx/ui_next/src/screens/CredentialType/CredentialTypeDetails/index.js new file mode 100644 index 0000000000..d2f8e7c13d --- /dev/null +++ b/awx/ui_next/src/screens/CredentialType/CredentialTypeDetails/index.js @@ -0,0 +1 @@ +export { default } from './CredentialTypeDetails'; diff --git a/awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/CredentialTypeEdit.jsx b/awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/CredentialTypeEdit.jsx new file mode 100644 index 0000000000..9ccbf329d0 --- /dev/null +++ b/awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/CredentialTypeEdit.jsx @@ -0,0 +1,12 @@ +import React from 'react'; +import { Card, PageSection } from '@patternfly/react-core'; + +function CredentialTypeEdit() { + return ( + + Credential Type Edit + + ); +} + +export default CredentialTypeEdit; diff --git a/awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/index.js b/awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/index.js new file mode 100644 index 0000000000..8a35d6b6fd --- /dev/null +++ b/awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/index.js @@ -0,0 +1 @@ +export { default } from './CredentialTypeEdit'; diff --git a/awx/ui_next/src/screens/CredentialType/CredentialTypeList/CredentialTypeList.jsx b/awx/ui_next/src/screens/CredentialType/CredentialTypeList/CredentialTypeList.jsx new file mode 100644 index 0000000000..af9c5c0f11 --- /dev/null +++ b/awx/ui_next/src/screens/CredentialType/CredentialTypeList/CredentialTypeList.jsx @@ -0,0 +1,14 @@ +import React from 'react'; +import { Card, PageSection } from '@patternfly/react-core'; + +function CredentialTypeList() { + return ( + + +
Credential Type List
+
+
+ ); +} + +export default CredentialTypeList; diff --git a/awx/ui_next/src/screens/CredentialType/CredentialTypeList/index.js b/awx/ui_next/src/screens/CredentialType/CredentialTypeList/index.js new file mode 100644 index 0000000000..ed47b3615c --- /dev/null +++ b/awx/ui_next/src/screens/CredentialType/CredentialTypeList/index.js @@ -0,0 +1 @@ +export { default } from './CredentialTypeList'; diff --git a/awx/ui_next/src/screens/CredentialType/CredentialTypes.jsx b/awx/ui_next/src/screens/CredentialType/CredentialTypes.jsx index c9bed43ad4..0f2c2efb0c 100644 --- a/awx/ui_next/src/screens/CredentialType/CredentialTypes.jsx +++ b/awx/ui_next/src/screens/CredentialType/CredentialTypes.jsx @@ -1,26 +1,48 @@ -import React, { Component, Fragment } from 'react'; +import React, { useState, useCallback } from 'react'; import { withI18n } from '@lingui/react'; import { t } from '@lingui/macro'; -import { - PageSection, - PageSectionVariants, - Title, -} from '@patternfly/react-core'; +import { Route, Switch } from 'react-router-dom'; -class CredentialTypes extends Component { - render() { - const { i18n } = this.props; - const { light } = PageSectionVariants; +import CredentialTypeAdd from './CredentialTypeAdd'; +import CredentialTypeList from './CredentialTypeList'; +import CredentialType from './CredentialType'; +import Breadcrumbs from '../../components/Breadcrumbs'; - return ( - - - {i18n._(t`Credential Types`)} - - - - ); - } +function CredentialTypes({ i18n }) { + const [breadcrumbConfig, setBreadcrumbConfig] = useState({ + '/credential_types': i18n._(t`Credential Types`), + '/credential_types/add': i18n._(t`Create Credential Types`), + }); + + const buildBreadcrumbConfig = useCallback( + credentialTypes => { + if (!credentialTypes) { + return; + } + setBreadcrumbConfig({ + '/credential_types': i18n._(t`Credential Types`), + '/credential_types/add': i18n._(t`Create Credential Types`), + [`/credential_types/${credentialTypes.id}`]: `${credentialTypes.name}`, + }); + }, + [i18n] + ); + return ( + <> + + + + + + + + + + + + + + ); } export default withI18n()(CredentialTypes); diff --git a/awx/ui_next/src/screens/CredentialType/CredentialTypes.test.jsx b/awx/ui_next/src/screens/CredentialType/CredentialTypes.test.jsx index 24373b0a50..468a55ac7a 100644 --- a/awx/ui_next/src/screens/CredentialType/CredentialTypes.test.jsx +++ b/awx/ui_next/src/screens/CredentialType/CredentialTypes.test.jsx @@ -4,15 +4,13 @@ import { mountWithContexts } from '../../../testUtils/enzymeHelpers'; import CredentialTypes from './CredentialTypes'; -describe('', () => { +describe('', () => { let pageWrapper; let pageSections; - let title; beforeEach(() => { pageWrapper = mountWithContexts(); pageSections = pageWrapper.find('PageSection'); - title = pageWrapper.find('Title'); }); afterEach(() => { @@ -21,9 +19,7 @@ describe('', () => { test('initially renders without crashing', () => { expect(pageWrapper.length).toBe(1); - expect(pageSections.length).toBe(2); - expect(title.length).toBe(1); - expect(title.props().size).toBe('2xl'); + expect(pageSections.length).toBe(1); expect(pageSections.first().props().variant).toBe('light'); }); }); From 85deb8711c70da7f34ea67330a542259287ad298 Mon Sep 17 00:00:00 2001 From: Shane McDonald Date: Thu, 4 Jun 2020 18:32:14 -0400 Subject: [PATCH 122/494] Add queue / instance group registration to heartbeat for k8s installs There is some history here. https://github.com/ansible/awx/pull/7190 <- This PR was an attempt at fixing a bug notting ran into where some jobs on k8s installs would get stuck in Waiting forever. The PR mentioned above introduced a bug where there are no instance groups on a fresh k8s-based install. This is because this process currently happens in the launch scripts, before the database is up. With this patch, queue / instance group registration happens in the heartbeat, right after auto-registering the instance. --- .../management/commands/register_queue.py | 103 ++++++++++-------- awx/main/managers.py | 5 +- .../templates/launch_awx_task.sh.j2 | 4 +- 3 files changed, 63 insertions(+), 49 deletions(-) diff --git a/awx/main/management/commands/register_queue.py b/awx/main/management/commands/register_queue.py index 61761ec2aa..edd8068b89 100644 --- a/awx/main/management/commands/register_queue.py +++ b/awx/main/management/commands/register_queue.py @@ -16,31 +16,24 @@ class InstanceNotFound(Exception): super(InstanceNotFound, self).__init__(*args, **kwargs) -class Command(BaseCommand): +class RegisterQueue: + def __init__(self, queuename, controller, instance_percent, inst_min, hostname_list): + self.instance_not_found_err = None + self.queuename = queuename + self.controller = controller + self.instance_percent = instance_percent + self.instance_min = inst_min + self.hostname_list = hostname_list - def add_arguments(self, parser): - parser.add_argument('--queuename', dest='queuename', type=str, - help='Queue to create/update') - parser.add_argument('--hostnames', dest='hostnames', type=str, - help='Comma-Delimited Hosts to add to the Queue (will not remove already assigned instances)') - parser.add_argument('--controller', dest='controller', type=str, - default='', help='The controlling group (makes this an isolated group)') - parser.add_argument('--instance_percent', dest='instance_percent', type=int, default=0, - help='The percentage of active instances that will be assigned to this group'), - parser.add_argument('--instance_minimum', dest='instance_minimum', type=int, default=0, - help='The minimum number of instance that will be retained for this group from available instances') - - - def get_create_update_instance_group(self, queuename, instance_percent, instance_min): + def get_create_update_instance_group(self): created = False changed = False - - (ig, created) = InstanceGroup.objects.get_or_create(name=queuename) - if ig.policy_instance_percentage != instance_percent: - ig.policy_instance_percentage = instance_percent + (ig, created) = InstanceGroup.objects.get_or_create(name=self.queuename) + if ig.policy_instance_percentage != self.instance_percent: + ig.policy_instance_percentage = self.instance_percent changed = True - if ig.policy_instance_minimum != instance_min: - ig.policy_instance_minimum = instance_min + if ig.policy_instance_minimum != self.instance_min: + ig.policy_instance_minimum = self.instance_min changed = True if changed: @@ -48,12 +41,12 @@ class Command(BaseCommand): return (ig, created, changed) - def update_instance_group_controller(self, ig, controller): + def update_instance_group_controller(self, ig): changed = False control_ig = None - if controller: - control_ig = InstanceGroup.objects.filter(name=controller).first() + if self.controller: + control_ig = InstanceGroup.objects.filter(name=self.controller).first() if control_ig and ig.controller_id != control_ig.pk: ig.controller = control_ig @@ -62,10 +55,10 @@ class Command(BaseCommand): return (control_ig, changed) - def add_instances_to_group(self, ig, hostname_list): + def add_instances_to_group(self, ig): changed = False - instance_list_unique = set([x.strip() for x in hostname_list if x]) + instance_list_unique = set([x.strip() for x in self.hostname_list if x]) instances = [] for inst_name in instance_list_unique: instance = Instance.objects.filter(hostname=inst_name) @@ -86,43 +79,61 @@ class Command(BaseCommand): return (instances, changed) - def handle(self, **options): - instance_not_found_err = None - queuename = options.get('queuename') - if not queuename: - raise CommandError("Specify `--queuename` to use this command.") - ctrl = options.get('controller') - inst_per = options.get('instance_percent') - inst_min = options.get('instance_minimum') - hostname_list = [] - if options.get('hostnames'): - hostname_list = options.get('hostnames').split(",") - + def register(self): with advisory_lock('cluster_policy_lock'): with transaction.atomic(): changed2 = False changed3 = False - (ig, created, changed1) = self.get_create_update_instance_group(queuename, inst_per, inst_min) + (ig, created, changed1) = self.get_create_update_instance_group() if created: print("Creating instance group {}".format(ig.name)) elif not created: print("Instance Group already registered {}".format(ig.name)) - if ctrl: - (ig_ctrl, changed2) = self.update_instance_group_controller(ig, ctrl) + if self.controller: + (ig_ctrl, changed2) = self.update_instance_group_controller(ig) if changed2: - print("Set controller group {} on {}.".format(ctrl, queuename)) + print("Set controller group {} on {}.".format(self.controller, self.queuename)) try: - (instances, changed3) = self.add_instances_to_group(ig, hostname_list) + (instances, changed3) = self.add_instances_to_group(ig) for i in instances: print("Added instance {} to {}".format(i.hostname, ig.name)) except InstanceNotFound as e: - instance_not_found_err = e + self.instance_not_found_err = e if any([changed1, changed2, changed3]): print('(changed: True)') - if instance_not_found_err: - print(instance_not_found_err.message) + +class Command(BaseCommand): + + def add_arguments(self, parser): + parser.add_argument('--queuename', dest='queuename', type=str, + help='Queue to create/update') + parser.add_argument('--hostnames', dest='hostnames', type=str, + help='Comma-Delimited Hosts to add to the Queue (will not remove already assigned instances)') + parser.add_argument('--controller', dest='controller', type=str, + default='', help='The controlling group (makes this an isolated group)') + parser.add_argument('--instance_percent', dest='instance_percent', type=int, default=0, + help='The percentage of active instances that will be assigned to this group'), + parser.add_argument('--instance_minimum', dest='instance_minimum', type=int, default=0, + help='The minimum number of instance that will be retained for this group from available instances') + + + def handle(self, **options): + queuename = options.get('queuename') + if not queuename: + raise CommandError("Specify `--queuename` to use this command.") + ctrl = options.get('controller') + inst_per = options.get('instance_percent') + instance_min = options.get('instance_minimum') + hostname_list = [] + if options.get('hostnames'): + hostname_list = options.get('hostnames').split(",") + + rq = RegisterQueue(queuename, ctrl, inst_per, instance_min, hostname_list) + rq.register() + if rq.instance_not_found_err: + print(rq.instance_not_found_err.message) sys.exit(1) diff --git a/awx/main/managers.py b/awx/main/managers.py index 2076e7f0b0..f4b437d027 100644 --- a/awx/main/managers.py +++ b/awx/main/managers.py @@ -149,8 +149,11 @@ class InstanceManager(models.Manager): def get_or_register(self): if settings.AWX_AUTO_DEPROVISION_INSTANCES: + from awx.main.management.commands.register_queue import RegisterQueue pod_ip = os.environ.get('MY_POD_IP') - return self.register(ip_address=pod_ip) + registered = self.register(ip_address=pod_ip) + RegisterQueue('tower', None, 100, 0, []).register() + return registered else: return (False, self.me()) diff --git a/installer/roles/image_build/templates/launch_awx_task.sh.j2 b/installer/roles/image_build/templates/launch_awx_task.sh.j2 index 2e74dab678..c038059b03 100755 --- a/installer/roles/image_build/templates/launch_awx_task.sh.j2 +++ b/installer/roles/image_build/templates/launch_awx_task.sh.j2 @@ -12,6 +12,8 @@ ANSIBLE_REMOTE_TEMP=/tmp ANSIBLE_LOCAL_TEMP=/tmp ansible -i "127.0.0.1," -c loca if [ -z "$AWX_SKIP_MIGRATIONS" ]; then awx-manage migrate --noinput + awx-manage provision_instance --hostname=$(hostname) + awx-manage register_queue --queuename=tower --instance_percent=100 fi if [ ! -z "$AWX_ADMIN_USER" ]&&[ ! -z "$AWX_ADMIN_PASSWORD" ]; then @@ -21,8 +23,6 @@ if [ ! -z "$AWX_ADMIN_USER" ]&&[ ! -z "$AWX_ADMIN_PASSWORD" ]; then {% endif %} fi echo 'from django.conf import settings; x = settings.AWX_TASK_ENV; x["HOME"] = "/var/lib/awx"; settings.AWX_TASK_ENV = x' | awx-manage shell -awx-manage provision_instance --hostname=$(hostname) -awx-manage register_queue --queuename=tower --instance_percent=100 unset $(cut -d = -f -1 /etc/tower/conf.d/environment.sh) From 9514adaf3ae3396104a54f78e6c5ec983a90f3c9 Mon Sep 17 00:00:00 2001 From: Jim Ladd Date: Wed, 10 Jun 2020 13:55:54 -0700 Subject: [PATCH 123/494] wrap --instance-id-var in quotes --- awx/main/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 5a287a1444..09014d9da6 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -2554,7 +2554,7 @@ class RunInventoryUpdate(BaseTask): args.append('--exclude-empty-groups') if getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper(), False): args.extend(['--instance-id-var', - getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper()),]) + "'{}'".format(getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper())),]) # Add arguments for the source inventory script args.append('--source') args.append(self.pseudo_build_inventory(inventory_update, private_data_dir)) From 1970fac98978a883f082f8134b9415282d3bfa11 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Thu, 4 Apr 2019 12:20:35 -0400 Subject: [PATCH 124/494] Allow use of fallback instance_ids --- .../management/commands/inventory_import.py | 22 +++++++++++++------ awx/settings/defaults.py | 2 +- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/awx/main/management/commands/inventory_import.py b/awx/main/management/commands/inventory_import.py index 14ce8d389b..2e51283445 100644 --- a/awx/main/management/commands/inventory_import.py +++ b/awx/main/management/commands/inventory_import.py @@ -271,7 +271,7 @@ class Command(BaseCommand): logging.DEBUG, 0])) logger.setLevel(log_levels.get(self.verbosity, 0)) - def _get_instance_id(self, from_dict, default=''): + def _get_instance_id(self, variables, default=''): ''' Retrieve the instance ID from the given dict of host variables. @@ -279,15 +279,23 @@ class Command(BaseCommand): the lookup will traverse into nested dicts, equivalent to: from_dict.get('foo', {}).get('bar', default) + + Multiple ID variables may be specified as 'foo.bar,foobar', so that + it will first try to find 'bar' inside of 'foo', and if unable, + will try to find 'foobar' as a fallback ''' instance_id = default if getattr(self, 'instance_id_var', None): - for key in self.instance_id_var.split('.'): - if not hasattr(from_dict, 'get'): - instance_id = default + for single_instance_id in self.instance_id_var.split(','): + from_dict = variables + for key in single_instance_id.split('.'): + if not hasattr(from_dict, 'get'): + instance_id = default + break + instance_id = from_dict.get(key, default) + from_dict = instance_id + if instance_id: break - instance_id = from_dict.get(key, default) - from_dict = instance_id return smart_text(instance_id) def _get_enabled(self, from_dict, default=None): @@ -422,7 +430,7 @@ class Command(BaseCommand): for mem_host in self.all_group.all_hosts.values(): instance_id = self._get_instance_id(mem_host.variables) if not instance_id: - logger.warning('Host "%s" has no "%s" variable', + logger.warning('Host "%s" has no "%s" variable(s)', mem_host.name, self.instance_id_var) continue mem_host.instance_id = instance_id diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 8df6d4f440..606ee60072 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -702,7 +702,7 @@ EC2_ENABLED_VAR = 'ec2_state' EC2_ENABLED_VALUE = 'running' # Inventory variable name containing unique instance ID. -EC2_INSTANCE_ID_VAR = 'ec2_id' +EC2_INSTANCE_ID_VAR = 'ec2_id,instance_id' # Filter for allowed group/host names when importing inventory from EC2. EC2_GROUP_FILTER = r'^.+$' From 9eaee801a742751632b8490c740a9565178a28b5 Mon Sep 17 00:00:00 2001 From: Jim Ladd Date: Wed, 10 Jun 2020 13:30:05 -0700 Subject: [PATCH 125/494] update VMWARE_INSTANCE_ID_VAR * Favor instanceUuid * .. but fall back to instanceuuid if necessary --- awx/settings/defaults.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 606ee60072..60a11daa7a 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -721,7 +721,7 @@ VMWARE_ENABLED_VAR = 'guest.gueststate' VMWARE_ENABLED_VALUE = 'running' # Inventory variable name containing the unique instance ID. -VMWARE_INSTANCE_ID_VAR = 'config.instanceuuid' +VMWARE_INSTANCE_ID_VAR = 'config.instanceUuid, config.instanceuuid' # Filter for allowed group and host names when importing inventory # from VMware. From 3b61b8261637d4fa20445cbb9d6f4841e0996d8c Mon Sep 17 00:00:00 2001 From: Jim Ladd Date: Wed, 10 Jun 2020 13:55:54 -0700 Subject: [PATCH 126/494] wrap --instance-id-var in quotes --- awx/main/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/main/tasks.py b/awx/main/tasks.py index ef2de1a500..8fc3de1b6b 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -2552,7 +2552,7 @@ class RunInventoryUpdate(BaseTask): args.append('--exclude-empty-groups') if getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper(), False): args.extend(['--instance-id-var', - getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper()),]) + "'{}'".format(getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper())),]) # Add arguments for the source inventory script args.append('--source') args.append(self.pseudo_build_inventory(inventory_update, private_data_dir)) From 8eee0d40dd0d673ab99f7839cd5b7f0baa1a331f Mon Sep 17 00:00:00 2001 From: Jim Ladd Date: Thu, 11 Jun 2020 09:52:54 -0700 Subject: [PATCH 127/494] revert EC2_INSTANCE_ID_VAR --- awx/settings/defaults.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 46b5e454c3..ba7c481df0 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -708,7 +708,7 @@ EC2_ENABLED_VAR = 'ec2_state' EC2_ENABLED_VALUE = 'running' # Inventory variable name containing unique instance ID. -EC2_INSTANCE_ID_VAR = 'ec2_id,instance_id' +EC2_INSTANCE_ID_VAR = 'ec2_id' # Filter for allowed group/host names when importing inventory from EC2. EC2_GROUP_FILTER = r'^.+$' From 43ab6acb84e6573033972b4cafaf8420b17de5aa Mon Sep 17 00:00:00 2001 From: Jim Ladd Date: Thu, 11 Jun 2020 09:53:50 -0700 Subject: [PATCH 128/494] revert EC2_INSTANCE_ID_VAR --- awx/settings/defaults.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 60a11daa7a..f5507262bf 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -702,7 +702,7 @@ EC2_ENABLED_VAR = 'ec2_state' EC2_ENABLED_VALUE = 'running' # Inventory variable name containing unique instance ID. -EC2_INSTANCE_ID_VAR = 'ec2_id,instance_id' +EC2_INSTANCE_ID_VAR = 'ec2_id' # Filter for allowed group/host names when importing inventory from EC2. EC2_GROUP_FILTER = r'^.+$' From 0fda9d2c56ab4d5c328676ff70744f963b6de3b8 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 11 Jun 2020 11:17:52 -0400 Subject: [PATCH 129/494] Continue after failures to grant roles --- awxkit/awxkit/api/pages/api.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index fe57fe33e8..066153bfd3 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -247,6 +247,9 @@ class ApiV2(base.Base): endpoint.post({'id': role_page['id']}) except exc.NoContent: # desired exception on successful (dis)association pass + except exc.Common as e: + log.error("Role assignment failed: %s.", e) + log.debug("post_data: %r", {'id': role_page['id']}) def _assign_membership(self): for _page, roles in self._roles: @@ -288,7 +291,6 @@ class ApiV2(base.Base): except exc.Common as e: log.error("Object association failed: %s.", e) log.debug("post_data: %r", post_data) - raise else: # It is a create set self._cache.get_page(endpoint) self._import_list(endpoint, related_set) From 08d934170464100cd08215018ce40509c68cc58a Mon Sep 17 00:00:00 2001 From: Alex Corey Date: Mon, 8 Jun 2020 16:29:06 -0400 Subject: [PATCH 130/494] Adds lists and list items and delete functionality --- awx/ui_next/src/api/index.js | 3 + awx/ui_next/src/api/models/Applications.js | 10 + .../ApplicationsList/ApplicationList.test.jsx | 189 ++++++++++++++++++ .../ApplicationsList/ApplicationListItem.jsx | 102 ++++++++++ .../ApplicationListItem.test.jsx | 68 +++++++ .../ApplicationsList/ApplicationsList.jsx | 175 +++++++++++++++- awx/ui_next/src/types.js | 12 ++ 7 files changed, 554 insertions(+), 5 deletions(-) create mode 100644 awx/ui_next/src/api/models/Applications.js create mode 100644 awx/ui_next/src/screens/Application/ApplicationsList/ApplicationList.test.jsx create mode 100644 awx/ui_next/src/screens/Application/ApplicationsList/ApplicationListItem.jsx create mode 100644 awx/ui_next/src/screens/Application/ApplicationsList/ApplicationListItem.test.jsx diff --git a/awx/ui_next/src/api/index.js b/awx/ui_next/src/api/index.js index 36330716fd..f7567750d6 100644 --- a/awx/ui_next/src/api/index.js +++ b/awx/ui_next/src/api/index.js @@ -1,4 +1,5 @@ import AdHocCommands from './models/AdHocCommands'; +import Applications from './models/Applications'; import Config from './models/Config'; import CredentialInputSources from './models/CredentialInputSources'; import CredentialTypes from './models/CredentialTypes'; @@ -31,6 +32,7 @@ import WorkflowJobTemplates from './models/WorkflowJobTemplates'; import WorkflowJobs from './models/WorkflowJobs'; const AdHocCommandsAPI = new AdHocCommands(); +const ApplicationsAPI = new Applications(); const ConfigAPI = new Config(); const CredentialInputSourcesAPI = new CredentialInputSources(); const CredentialTypesAPI = new CredentialTypes(); @@ -64,6 +66,7 @@ const WorkflowJobsAPI = new WorkflowJobs(); export { AdHocCommandsAPI, + ApplicationsAPI, ConfigAPI, CredentialInputSourcesAPI, CredentialTypesAPI, diff --git a/awx/ui_next/src/api/models/Applications.js b/awx/ui_next/src/api/models/Applications.js new file mode 100644 index 0000000000..50b709bdca --- /dev/null +++ b/awx/ui_next/src/api/models/Applications.js @@ -0,0 +1,10 @@ +import Base from '../Base'; + +class Applications extends Base { + constructor(http) { + super(http); + this.baseUrl = '/api/v2/applications/'; + } +} + +export default Applications; diff --git a/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationList.test.jsx b/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationList.test.jsx new file mode 100644 index 0000000000..367886f2a9 --- /dev/null +++ b/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationList.test.jsx @@ -0,0 +1,189 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; + +import { + mountWithContexts, + waitForElement, +} from '../../../../testUtils/enzymeHelpers'; +import { ApplicationsAPI } from '../../../api'; +import ApplicationsList from './ApplicationsList'; + +jest.mock('../../../api/models/Applications'); + +const applications = { + data: { + results: [ + { + id: 1, + name: 'Foo', + summary_fields: { + organization: { name: 'Org 1', id: 10 }, + user_capabilities: { edit: true, delete: true }, + }, + url: '', + organiation: 10, + }, + { + id: 2, + name: 'Bar', + summary_fields: { + organization: { name: 'Org 2', id: 20 }, + user_capabilities: { edit: true, delete: true }, + }, + url: '', + organization: 20, + }, + ], + count: 2, + }, +}; +const options = { data: { actions: { POST: true } } }; +describe('', () => { + let wrapper; + test('should mount properly', async () => { + ApplicationsAPI.read.mockResolvedValue(applications); + ApplicationsAPI.readOptions.mockResolvedValue(options); + await act(async () => { + wrapper = mountWithContexts(); + }); + await waitForElement(wrapper, 'ApplicationsList', el => el.length > 0); + }); + test('should have data fetched and render 2 rows', async () => { + ApplicationsAPI.read.mockResolvedValue(applications); + ApplicationsAPI.readOptions.mockResolvedValue(options); + await act(async () => { + wrapper = mountWithContexts(); + }); + await waitForElement(wrapper, 'ApplicationsList', el => el.length > 0); + expect(wrapper.find('ApplicationListItem').length).toBe(2); + expect(ApplicationsAPI.read).toBeCalled(); + expect(ApplicationsAPI.readOptions).toBeCalled(); + }); + + test('should delete item successfully', async () => { + ApplicationsAPI.read.mockResolvedValue(applications); + ApplicationsAPI.readOptions.mockResolvedValue(options); + await act(async () => { + wrapper = mountWithContexts(); + }); + waitForElement(wrapper, 'ApplicationsList', el => el.length > 0); + + wrapper + .find('input#select-application-1') + .simulate('change', applications.data.results[0]); + + wrapper.update(); + + expect(wrapper.find('input#select-application-1').prop('checked')).toBe( + true + ); + await act(async () => + wrapper.find('Button[aria-label="Delete"]').prop('onClick')() + ); + + wrapper.update(); + + await act(async () => + wrapper.find('Button[aria-label="confirm delete"]').prop('onClick')() + ); + expect(ApplicationsAPI.destroy).toBeCalledWith( + applications.data.results[0].id + ); + }); + + test('should throw content error', async () => { + ApplicationsAPI.read.mockRejectedValue( + new Error({ + response: { + config: { + method: 'get', + url: '/api/v2/applications/', + }, + data: 'An error occurred', + }, + }) + ); + ApplicationsAPI.readOptions.mockResolvedValue(options); + await act(async () => { + wrapper = mountWithContexts(); + }); + + await waitForElement(wrapper, 'ApplicationsList', el => el.length > 0); + expect(wrapper.find('ContentError').length).toBe(1); + }); + + test('should render deletion error modal', async () => { + ApplicationsAPI.destroy.mockRejectedValue( + new Error({ + response: { + config: { + method: 'delete', + url: '/api/v2/applications/', + }, + data: 'An error occurred', + }, + }) + ); + ApplicationsAPI.read.mockResolvedValue(applications); + ApplicationsAPI.readOptions.mockResolvedValue(options); + await act(async () => { + wrapper = mountWithContexts(); + }); + waitForElement(wrapper, 'ApplicationsList', el => el.length > 0); + + wrapper.find('input#select-application-1').simulate('change', 'a'); + + wrapper.update(); + + expect(wrapper.find('input#select-application-1').prop('checked')).toBe( + true + ); + await act(async () => + wrapper.find('Button[aria-label="Delete"]').prop('onClick')() + ); + + wrapper.update(); + + await act(async () => + wrapper.find('Button[aria-label="confirm delete"]').prop('onClick')() + ); + wrapper.update(); + + expect(wrapper.find('ErrorDetail').length).toBe(1); + }); + + test('should not render add button', async () => { + ApplicationsAPI.read.mockResolvedValue(applications); + ApplicationsAPI.readOptions.mockResolvedValue({ + data: { actions: { POST: false } }, + }); + await act(async () => { + wrapper = mountWithContexts(); + }); + waitForElement(wrapper, 'ApplicationsList', el => el.length > 0); + expect(wrapper.find('ToolbarAddButton').length).toBe(0); + }); + test('should not render edit button for first list item', async () => { + applications.data.results[0].summary_fields.user_capabilities.edit = false; + ApplicationsAPI.read.mockResolvedValue(applications); + ApplicationsAPI.readOptions.mockResolvedValue({ + data: { actions: { POST: false } }, + }); + await act(async () => { + wrapper = mountWithContexts(); + }); + waitForElement(wrapper, 'ApplicationsList', el => el.length > 0); + expect( + wrapper + .find('ApplicationListItem') + .at(0) + .find('PencilAltIcon').length + ).toBe(0); + expect( + wrapper + .find('ApplicationListItem') + .at(1) + .find('PencilAltIcon').length + ).toBe(1); + }); +}); diff --git a/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationListItem.jsx b/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationListItem.jsx new file mode 100644 index 0000000000..ddd0df7bc0 --- /dev/null +++ b/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationListItem.jsx @@ -0,0 +1,102 @@ +import React from 'react'; +import { string, bool, func } from 'prop-types'; +import { withI18n } from '@lingui/react'; +import { + Button, + DataListAction as _DataListAction, + DataListCheck, + DataListItem, + DataListItemCells, + DataListItemRow, + Tooltip, +} from '@patternfly/react-core'; + +import { t } from '@lingui/macro'; +import { Link } from 'react-router-dom'; +import styled from 'styled-components'; +import { PencilAltIcon } from '@patternfly/react-icons'; +import { Application } from '../../../types'; +import DataListCell from '../../../components/DataListCell'; + +const DataListAction = styled(_DataListAction)` + align-items: center; + display: grid; + grid-gap: 16px; + grid-template-columns: 40px; +`; + +function ApplicationListItem({ + application, + isSelected, + onSelect, + detailUrl, + i18n, +}) { + ApplicationListItem.propTypes = { + application: Application.isRequired, + detailUrl: string.isRequired, + isSelected: bool.isRequired, + onSelect: func.isRequired, + }; + + const labelId = `check-action-${application.id}`; + return ( + + + + + + {application.name} + + , + + + {application.summary_fields.organization.name} + + , + ]} + /> + + {application.summary_fields.user_capabilities.edit ? ( + + + + ) : ( + '' + )} + + + + ); +} +export default withI18n()(ApplicationListItem); diff --git a/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationListItem.test.jsx b/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationListItem.test.jsx new file mode 100644 index 0000000000..0a53dd4cd8 --- /dev/null +++ b/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationListItem.test.jsx @@ -0,0 +1,68 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; + +import { mountWithContexts } from '../../../../testUtils/enzymeHelpers'; + +import ApplicationListItem from './ApplicationListItem'; + +describe('', () => { + let wrapper; + const application = { + id: 1, + name: 'Foo', + summary_fields: { + organization: { id: 2, name: 'Organization' }, + user_capabilities: { edit: true }, + }, + }; + test('should mount successfully', async () => { + await act(async () => { + wrapper = mountWithContexts( + {}} + /> + ); + }); + expect(wrapper.find('ApplicationListItem').length).toBe(1); + }); + test('should render the proper data', async () => { + await act(async () => { + wrapper = mountWithContexts( + {}} + /> + ); + }); + expect( + wrapper.find('DataListCell[aria-label="application name"]').text() + ).toBe('Foo'); + expect( + wrapper.find('DataListCell[aria-label="organization name"]').text() + ).toBe('Organization'); + expect(wrapper.find('input#select-application-1').prop('checked')).toBe( + false + ); + expect(wrapper.find('PencilAltIcon').length).toBe(1); + }); + test('should be checked', async () => { + await act(async () => { + wrapper = mountWithContexts( + {}} + /> + ); + }); + expect(wrapper.find('input#select-application-1').prop('checked')).toBe( + true + ); + }); +}); diff --git a/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationsList.jsx b/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationsList.jsx index 6fcf16bb73..f2869c5d2b 100644 --- a/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationsList.jsx +++ b/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationsList.jsx @@ -1,15 +1,180 @@ -import React from 'react'; -import { Card, PageSection } from '@patternfly/react-core'; +import React, { useCallback, useEffect } from 'react'; +import { t } from '@lingui/macro'; +import { withI18n } from '@lingui/react'; +import { useLocation, useRouteMatch } from 'react-router-dom'; + +import { Card, PageSection } from '@patternfly/react-core'; +import { getQSConfig, parseQueryString } from '../../../util/qs'; +import useRequest, { useDeleteItems } from '../../../util/useRequest'; +import ErrorDetail from '../../../components/ErrorDetail'; +import AlertModal from '../../../components/AlertModal'; + +import DatalistToolbar from '../../../components/DataListToolbar'; +import { ApplicationsAPI } from '../../../api'; +import PaginatedDataList, { + ToolbarDeleteButton, + ToolbarAddButton, +} from '../../../components/PaginatedDataList'; +import useSelected from '../../../util/useSelected'; + +import ApplicationListItem from './ApplicationListItem'; + +const QS_CONFIG = getQSConfig('inventory', { + page: 1, + page_size: 20, + order_by: 'name', +}); +function ApplicationsList({ i18n }) { + const location = useLocation(); + const match = useRouteMatch(); + + const { + isLoading, + error, + request: fetchApplications, + result: { applications, itemCount, actions }, + } = useRequest( + useCallback(async () => { + const params = parseQueryString(QS_CONFIG, location.search); + + const [response, actionsResponse] = await Promise.all([ + ApplicationsAPI.read(params), + ApplicationsAPI.readOptions(), + ]); + + return { + applications: response.data.results, + itemCount: response.data.count, + actions: actionsResponse.data.actions, + }; + }, [location]), + { + applications: [], + itemCount: 0, + actions: {}, + } + ); + + useEffect(() => { + fetchApplications(); + }, [fetchApplications]); + + const { selected, isAllSelected, handleSelect, setSelected } = useSelected( + applications + ); + + const { + isLoading: deleteLoading, + deletionError, + deleteItems: handleDeleteApplications, + clearDeletionError, + } = useDeleteItems( + useCallback(async () => { + await Promise.all(selected.map(({ id }) => ApplicationsAPI.destroy(id))); + }, [selected]), + { + qsConfig: QS_CONFIG, + allItemsSelected: isAllSelected, + fetchItems: fetchApplications, + } + ); + + const handleDelete = async () => { + await handleDeleteApplications(); + setSelected([]); + }; + + const canAdd = actions && actions.POST; -function ApplicationsList() { return ( <> -
Applications List
+ ( + + setSelected(isSelected ? [...applications] : []) + } + qsConfig={QS_CONFIG} + additionalControls={[ + ...(canAdd + ? [ + , + ] + : []), + , + ]} + /> + )} + renderItem={application => ( + handleSelect(application)} + isSelected={selected.some(row => row.id === application.id)} + /> + )} + emptyStateControls={ + canAdd && ( + + ) + } + />
+ + {i18n._(t`Failed to delete one or more applications.`)} + + ); } -export default ApplicationsList; +export default withI18n()(ApplicationsList); diff --git a/awx/ui_next/src/types.js b/awx/ui_next/src/types.js index 52e07d53cb..5e60df659a 100644 --- a/awx/ui_next/src/types.js +++ b/awx/ui_next/src/types.js @@ -42,6 +42,18 @@ export const AccessRecord = shape({ type: string, }); +export const Application = shape({ + id: number.isRequired, + name: string.isRequired, + organization: number, + summary_fields: shape({ + organization: shape({ + id: number.isRequired, + name: string.isRequired, + }), + }), +}); + export const Organization = shape({ id: number.isRequired, name: string.isRequired, From f211c70e69906c57570088b31dc32d892804bbba Mon Sep 17 00:00:00 2001 From: Alex Corey Date: Tue, 9 Jun 2020 17:41:38 -0400 Subject: [PATCH 131/494] fixes qs namespace, and location of proptypes --- .../ApplicationsList/ApplicationListItem.jsx | 24 +++++++++++++------ .../ApplicationsList/ApplicationsList.jsx | 22 +++++++++++------ 2 files changed, 32 insertions(+), 14 deletions(-) diff --git a/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationListItem.jsx b/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationListItem.jsx index ddd0df7bc0..470e4dc255 100644 --- a/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationListItem.jsx +++ b/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationListItem.jsx @@ -15,6 +15,7 @@ import { t } from '@lingui/macro'; import { Link } from 'react-router-dom'; import styled from 'styled-components'; import { PencilAltIcon } from '@patternfly/react-icons'; +import { formatDateString } from '../../../util/dates'; import { Application } from '../../../types'; import DataListCell from '../../../components/DataListCell'; @@ -25,6 +26,10 @@ const DataListAction = styled(_DataListAction)` grid-template-columns: 40px; `; +const Label = styled.b` + margin-right: 20px; +`; + function ApplicationListItem({ application, isSelected, @@ -32,13 +37,6 @@ function ApplicationListItem({ detailUrl, i18n, }) { - ApplicationListItem.propTypes = { - application: Application.isRequired, - detailUrl: string.isRequired, - isSelected: bool.isRequired, - onSelect: func.isRequired, - }; - const labelId = `check-action-${application.id}`; return ( {application.summary_fields.organization.name} , + + + {formatDateString(application.modified)} + , ]} /> ); } + +ApplicationListItem.propTypes = { + application: Application.isRequired, + detailUrl: string.isRequired, + isSelected: bool.isRequired, + onSelect: func.isRequired, +}; + export default withI18n()(ApplicationListItem); diff --git a/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationsList.jsx b/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationsList.jsx index f2869c5d2b..5870c341a3 100644 --- a/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationsList.jsx +++ b/awx/ui_next/src/screens/Application/ApplicationsList/ApplicationsList.jsx @@ -19,7 +19,7 @@ import useSelected from '../../../util/useSelected'; import ApplicationListItem from './ApplicationListItem'; -const QS_CONFIG = getQSConfig('inventory', { +const QS_CONFIG = getQSConfig('applications', { page: 1, page_size: 20, order_by: 'name', @@ -105,12 +105,8 @@ function ApplicationsList({ i18n }) { isDefault: true, }, { - name: i18n._(t`Created by (Username)`), - key: 'created_by__username', - }, - { - name: i18n._(t`Modified by (Username)`), - key: 'modified_by__username', + name: i18n._(t`Description`), + key: 'description', }, ]} toolbarSortColumns={[ @@ -118,6 +114,18 @@ function ApplicationsList({ i18n }) { name: i18n._(t`Name`), key: 'name', }, + { + name: i18n._(t`Created`), + key: 'created', + }, + { + name: i18n._(t`Organization`), + key: 'organization', + }, + { + name: i18n._(t`Description`), + key: 'description', + }, ]} renderToolbar={props => ( Date: Fri, 12 Jun 2020 17:03:37 +0000 Subject: [PATCH 132/494] UI translation strings for release_3.7.1 branch --- awx/locale/fr/LC_MESSAGES/django.po | 364 ++++++++++++++-------------- awx/locale/ja/LC_MESSAGES/django.po | 364 ++++++++++++++-------------- awx/locale/zh/LC_MESSAGES/django.po | 364 ++++++++++++++-------------- awx/ui/po/fr.po | 25 +- awx/ui/po/ja.po | 24 +- awx/ui/po/zh.po | 27 +-- 6 files changed, 582 insertions(+), 586 deletions(-) diff --git a/awx/locale/fr/LC_MESSAGES/django.po b/awx/locale/fr/LC_MESSAGES/django.po index 2e07a6232a..62c2ba7292 100644 --- a/awx/locale/fr/LC_MESSAGES/django.po +++ b/awx/locale/fr/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2020-04-27 13:55+0000\n" +"POT-Creation-Date: 2020-05-28 21:45+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -528,7 +528,7 @@ msgstr "Le projet de modèle d'inventaire est manquant ou non défini." msgid "Unknown, job may have been ran before launch configurations were saved." msgstr "Inconnu, il se peut que le job ait été exécuté avant que les configurations de lancement ne soient sauvegardées." -#: awx/api/serializers.py:3252 awx/main/tasks.py:2795 awx/main/tasks.py:2813 +#: awx/api/serializers.py:3252 awx/main/tasks.py:2800 awx/main/tasks.py:2818 msgid "{} are prohibited from use in ad hoc commands." msgstr "{} ne sont pas autorisés à utiliser les commandes ad hoc." @@ -547,324 +547,324 @@ msgstr "La variable fournie {} n'a pas de valeur de base de données de remplaç msgid "\"$encrypted$ is a reserved keyword, may not be used for {}.\"" msgstr "\"$encrypted$ est un mot clé réservé et ne peut pas être utilisé comme {}.\"" -#: awx/api/serializers.py:4070 +#: awx/api/serializers.py:4078 msgid "A project is required to run a job." msgstr "Un projet est nécessaire pour exécuter une tâche." -#: awx/api/serializers.py:4072 +#: awx/api/serializers.py:4080 msgid "Missing a revision to run due to failed project update." msgstr "Une révision n'a pas été exécutée en raison de l'échec de la mise à jour du projet." -#: awx/api/serializers.py:4076 +#: awx/api/serializers.py:4084 msgid "The inventory associated with this Job Template is being deleted." msgstr "L'inventaire associé à ce modèle de tâche est en cours de suppression." -#: awx/api/serializers.py:4078 awx/api/serializers.py:4194 +#: awx/api/serializers.py:4086 awx/api/serializers.py:4202 msgid "The provided inventory is being deleted." msgstr "L'inventaire fourni est en cours de suppression." -#: awx/api/serializers.py:4086 +#: awx/api/serializers.py:4094 msgid "Cannot assign multiple {} credentials." msgstr "Ne peut pas attribuer plusieurs identifiants {}." -#: awx/api/serializers.py:4090 +#: awx/api/serializers.py:4098 msgid "Cannot assign a Credential of kind `{}`" msgstr "Ne peut pas attribuer d'information d'identification de type `{}`" -#: awx/api/serializers.py:4103 +#: awx/api/serializers.py:4111 msgid "" "Removing {} credential at launch time without replacement is not supported. " "Provided list lacked credential(s): {}." msgstr "Le retrait des identifiants {} au moment du lancement sans procurer de valeurs de remplacement n'est pas pris en charge. La liste fournie manquait d'identifiant(s): {}." -#: awx/api/serializers.py:4192 +#: awx/api/serializers.py:4200 msgid "The inventory associated with this Workflow is being deleted." msgstr "L'inventaire associé à ce flux de travail est en cours de suppression." -#: awx/api/serializers.py:4263 +#: awx/api/serializers.py:4271 msgid "Message type '{}' invalid, must be either 'message' or 'body'" msgstr "Type de message '{}' invalide, doit être soit 'message' soit 'body'" -#: awx/api/serializers.py:4269 +#: awx/api/serializers.py:4277 msgid "Expected string for '{}', found {}, " msgstr "Chaîne attendue pour '{}', trouvé {}, " -#: awx/api/serializers.py:4273 +#: awx/api/serializers.py:4281 msgid "Messages cannot contain newlines (found newline in {} event)" msgstr "Les messages ne peuvent pas contenir de nouvelles lignes (trouvé nouvelle ligne dans l'événement {})" -#: awx/api/serializers.py:4279 +#: awx/api/serializers.py:4287 msgid "Expected dict for 'messages' field, found {}" msgstr "Dict attendu pour le champ 'messages', trouvé {}" -#: awx/api/serializers.py:4283 +#: awx/api/serializers.py:4291 msgid "" "Event '{}' invalid, must be one of 'started', 'success', 'error', or " "'workflow_approval'" msgstr "L'événement '{}' est invalide, il doit être de type 'started', 'success', 'error' ou 'workflow_approval'" -#: awx/api/serializers.py:4289 +#: awx/api/serializers.py:4297 msgid "Expected dict for event '{}', found {}" msgstr "Dict attendu pour l'événement '{}', trouvé {}" -#: awx/api/serializers.py:4294 +#: awx/api/serializers.py:4302 msgid "" "Workflow Approval event '{}' invalid, must be one of 'running', 'approved', " "'timed_out', or 'denied'" msgstr "L'événement d'approbation de workflow '{}' n'est pas valide, il doit être 'running', 'approved', 'timed_out' ou 'denied'" -#: awx/api/serializers.py:4301 +#: awx/api/serializers.py:4309 msgid "Expected dict for workflow approval event '{}', found {}" msgstr "Dict attendu pour l'événement d'approbation du workflow '{}', trouvé {}" -#: awx/api/serializers.py:4328 +#: awx/api/serializers.py:4336 msgid "Unable to render message '{}': {}" msgstr "Impossible de rendre le message '{}' : {}" -#: awx/api/serializers.py:4330 +#: awx/api/serializers.py:4338 msgid "Field '{}' unavailable" msgstr "Champ '{}' non disponible" -#: awx/api/serializers.py:4332 +#: awx/api/serializers.py:4340 msgid "Security error due to field '{}'" msgstr "Erreur de sécurité due au champ '{}'" -#: awx/api/serializers.py:4352 +#: awx/api/serializers.py:4360 msgid "Webhook body for '{}' should be a json dictionary. Found type '{}'." msgstr "Le corps du webhook pour '{}' doit être un dictionnaire json. Trouvé le type '{}'." -#: awx/api/serializers.py:4355 +#: awx/api/serializers.py:4363 msgid "Webhook body for '{}' is not a valid json dictionary ({})." msgstr "Le corps du webhook pour '{}' n'est pas un dictionnaire json valide ({})." -#: awx/api/serializers.py:4373 +#: awx/api/serializers.py:4381 msgid "" "Missing required fields for Notification Configuration: notification_type" msgstr "Champs obligatoires manquants pour la configuration des notifications : notification_type" -#: awx/api/serializers.py:4400 +#: awx/api/serializers.py:4408 msgid "No values specified for field '{}'" msgstr "Aucune valeur spécifiée pour le champ '{}'" -#: awx/api/serializers.py:4405 +#: awx/api/serializers.py:4413 msgid "HTTP method must be either 'POST' or 'PUT'." msgstr "La méthode HTTP doit être soit 'POST' soit 'PUT'." -#: awx/api/serializers.py:4407 +#: awx/api/serializers.py:4415 msgid "Missing required fields for Notification Configuration: {}." msgstr "Champs obligatoires manquants pour la configuration des notifications : {}." -#: awx/api/serializers.py:4410 +#: awx/api/serializers.py:4418 msgid "Configuration field '{}' incorrect type, expected {}." msgstr "Type de champ de configuration '{}' incorrect, {} attendu." -#: awx/api/serializers.py:4427 +#: awx/api/serializers.py:4435 msgid "Notification body" msgstr "Corps de notification" -#: awx/api/serializers.py:4507 +#: awx/api/serializers.py:4515 msgid "" "Valid DTSTART required in rrule. Value should start with: DTSTART:" "YYYYMMDDTHHMMSSZ" msgstr "DTSTART valide obligatoire dans rrule. La valeur doit commencer par : DTSTART:YYYYMMDDTHHMMSSZ" -#: awx/api/serializers.py:4509 +#: awx/api/serializers.py:4517 msgid "" "DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ." msgstr "DTSTART ne peut correspondre à une DateHeure naïve. Spécifier ;TZINFO= ou YYYYMMDDTHHMMSSZZ." -#: awx/api/serializers.py:4511 +#: awx/api/serializers.py:4519 msgid "Multiple DTSTART is not supported." msgstr "Une seule valeur DTSTART est prise en charge." -#: awx/api/serializers.py:4513 +#: awx/api/serializers.py:4521 msgid "RRULE required in rrule." msgstr "RRULE obligatoire dans rrule." -#: awx/api/serializers.py:4515 +#: awx/api/serializers.py:4523 msgid "Multiple RRULE is not supported." msgstr "Une seule valeur RRULE est prise en charge." -#: awx/api/serializers.py:4517 +#: awx/api/serializers.py:4525 msgid "INTERVAL required in rrule." msgstr "INTERVAL obligatoire dans rrule." -#: awx/api/serializers.py:4519 +#: awx/api/serializers.py:4527 msgid "SECONDLY is not supported." msgstr "SECONDLY n'est pas pris en charge." -#: awx/api/serializers.py:4521 +#: awx/api/serializers.py:4529 msgid "Multiple BYMONTHDAYs not supported." msgstr "Une seule valeur BYMONTHDAY est prise en charge." -#: awx/api/serializers.py:4523 +#: awx/api/serializers.py:4531 msgid "Multiple BYMONTHs not supported." msgstr "Une seule valeur BYMONTH est prise en charge." -#: awx/api/serializers.py:4525 +#: awx/api/serializers.py:4533 msgid "BYDAY with numeric prefix not supported." msgstr "BYDAY avec un préfixe numérique non pris en charge." -#: awx/api/serializers.py:4527 +#: awx/api/serializers.py:4535 msgid "BYYEARDAY not supported." msgstr "BYYEARDAY non pris en charge." -#: awx/api/serializers.py:4529 +#: awx/api/serializers.py:4537 msgid "BYWEEKNO not supported." msgstr "BYWEEKNO non pris en charge." -#: awx/api/serializers.py:4531 +#: awx/api/serializers.py:4539 msgid "RRULE may not contain both COUNT and UNTIL" msgstr "RRULE peut contenir à la fois COUNT et UNTIL" -#: awx/api/serializers.py:4535 +#: awx/api/serializers.py:4543 msgid "COUNT > 999 is unsupported." msgstr "COUNT > 999 non pris en charge." -#: awx/api/serializers.py:4541 +#: awx/api/serializers.py:4549 msgid "rrule parsing failed validation: {}" msgstr "L'analyse rrule n'a pas pu être validée : {}" -#: awx/api/serializers.py:4603 +#: awx/api/serializers.py:4611 msgid "Inventory Source must be a cloud resource." msgstr "La source d'inventaire doit être une ressource cloud." -#: awx/api/serializers.py:4605 +#: awx/api/serializers.py:4613 msgid "Manual Project cannot have a schedule set." msgstr "Le projet manuel ne peut pas avoir de calendrier défini." -#: awx/api/serializers.py:4608 +#: awx/api/serializers.py:4616 msgid "" "Inventory sources with `update_on_project_update` cannot be scheduled. " "Schedule its source project `{}` instead." msgstr "Impossible de planifier les sources d'inventaire avec `update_on_project_update`. Planifiez plutôt son projet source`{}`." -#: awx/api/serializers.py:4618 +#: awx/api/serializers.py:4626 msgid "" "Count of jobs in the running or waiting state that are targeted for this " "instance" msgstr "Le nombre de jobs en cours d'exécution ou en attente qui sont ciblés pour cette instance." -#: awx/api/serializers.py:4623 +#: awx/api/serializers.py:4631 msgid "Count of all jobs that target this instance" msgstr "Le nombre de jobs qui ciblent cette instance." -#: awx/api/serializers.py:4656 +#: awx/api/serializers.py:4664 msgid "" "Count of jobs in the running or waiting state that are targeted for this " "instance group" msgstr "Le nombre de jobs en cours d'exécution ou en attente qui sont ciblés pour ce groupe d'instances." -#: awx/api/serializers.py:4661 +#: awx/api/serializers.py:4669 msgid "Count of all jobs that target this instance group" msgstr "Le nombre de jobs qui ciblent ce groupe d'instances" -#: awx/api/serializers.py:4666 +#: awx/api/serializers.py:4674 msgid "Indicates whether instance group controls any other group" msgstr "Indique si le groupe d'instances contrôle un autre groupe" -#: awx/api/serializers.py:4670 +#: awx/api/serializers.py:4678 msgid "" "Indicates whether instances in this group are isolated.Isolated groups have " "a designated controller group." msgstr "Indique si les instances de ce groupe sont isolées. Les groupes isolés ont un groupe de contrôleurs désigné." -#: awx/api/serializers.py:4675 +#: awx/api/serializers.py:4683 msgid "" "Indicates whether instances in this group are containerized.Containerized " "groups have a designated Openshift or Kubernetes cluster." msgstr "Indique si les instances de ce groupe sont conteneurisées. Les groupes conteneurisés ont un groupe Openshift ou Kubernetes désigné." -#: awx/api/serializers.py:4683 +#: awx/api/serializers.py:4691 msgid "Policy Instance Percentage" msgstr "Pourcentage d'instances de stratégie" -#: awx/api/serializers.py:4684 +#: awx/api/serializers.py:4692 msgid "" "Minimum percentage of all instances that will be automatically assigned to " "this group when new instances come online." msgstr "Le pourcentage minimum de toutes les instances qui seront automatiquement assignées à ce groupe lorsque de nouvelles instances seront mises en ligne." -#: awx/api/serializers.py:4689 +#: awx/api/serializers.py:4697 msgid "Policy Instance Minimum" msgstr "Instances de stratégies minimum" -#: awx/api/serializers.py:4690 +#: awx/api/serializers.py:4698 msgid "" "Static minimum number of Instances that will be automatically assign to this " "group when new instances come online." msgstr "Nombre minimum statique d'instances qui seront automatiquement assignées à ce groupe lors de la mise en ligne de nouvelles instances." -#: awx/api/serializers.py:4695 +#: awx/api/serializers.py:4703 msgid "Policy Instance List" msgstr "Listes d'instances de stratégie" -#: awx/api/serializers.py:4696 +#: awx/api/serializers.py:4704 msgid "List of exact-match Instances that will be assigned to this group" msgstr "Liste des cas de concordance exacte qui seront assignés à ce groupe." -#: awx/api/serializers.py:4722 +#: awx/api/serializers.py:4730 msgid "Duplicate entry {}." msgstr "Entrée dupliquée {}." -#: awx/api/serializers.py:4724 +#: awx/api/serializers.py:4732 msgid "{} is not a valid hostname of an existing instance." msgstr "{} n'est pas un nom d'hôte valide d'instance existante." -#: awx/api/serializers.py:4726 awx/api/views/mixin.py:98 +#: awx/api/serializers.py:4734 awx/api/views/mixin.py:98 msgid "" "Isolated instances may not be added or removed from instances groups via the " "API." msgstr "Des instances isolées ne peuvent pas être ajoutées ou supprimées de groupes d'instances via l'API." -#: awx/api/serializers.py:4728 awx/api/views/mixin.py:102 +#: awx/api/serializers.py:4736 awx/api/views/mixin.py:102 msgid "Isolated instance group membership may not be managed via the API." msgstr "L'appartenance à un groupe d'instances isolées n'est sans doute pas gérée par l'API." -#: awx/api/serializers.py:4730 awx/api/serializers.py:4735 -#: awx/api/serializers.py:4740 +#: awx/api/serializers.py:4738 awx/api/serializers.py:4743 +#: awx/api/serializers.py:4748 msgid "Containerized instances may not be managed via the API" msgstr "Les instances conteneurisées ne peuvent pas être gérées via l'API" -#: awx/api/serializers.py:4745 +#: awx/api/serializers.py:4753 msgid "tower instance group name may not be changed." msgstr "Le nom de groupe de l'instance Tower ne peut pas être modifié." -#: awx/api/serializers.py:4750 +#: awx/api/serializers.py:4758 msgid "Only Kubernetes credentials can be associated with an Instance Group" msgstr "Seuls les identifiants Kubernetes peuvent être associés à un groupe d'instances" -#: awx/api/serializers.py:4789 +#: awx/api/serializers.py:4797 msgid "" "When present, shows the field name of the role or relationship that changed." msgstr "Le cas échéant, affiche le nom de champ du rôle ou de la relation qui a changé." -#: awx/api/serializers.py:4791 +#: awx/api/serializers.py:4799 msgid "" "When present, shows the model on which the role or relationship was defined." msgstr "Le cas échéant, affiche le modèle sur lequel le rôle ou la relation a été défini." -#: awx/api/serializers.py:4824 +#: awx/api/serializers.py:4832 msgid "" "A summary of the new and changed values when an object is created, updated, " "or deleted" msgstr "Un récapitulatif des valeurs nouvelles et modifiées lorsqu'un objet est créé, mis à jour ou supprimé" -#: awx/api/serializers.py:4826 +#: awx/api/serializers.py:4834 msgid "" "For create, update, and delete events this is the object type that was " "affected. For associate and disassociate events this is the object type " "associated or disassociated with object2." msgstr "Pour créer, mettre à jour et supprimer des événements, il s'agit du type d'objet qui a été affecté. Pour associer et dissocier des événements, il s'agit du type d'objet associé à ou dissocié de object2." -#: awx/api/serializers.py:4829 +#: awx/api/serializers.py:4837 msgid "" "Unpopulated for create, update, and delete events. For associate and " "disassociate events this is the object type that object1 is being associated " "with." msgstr "Laisser vide pour créer, mettre à jour et supprimer des événements. Pour associer et dissocier des événements, il s'agit du type d'objet auquel object1 est associé." -#: awx/api/serializers.py:4832 +#: awx/api/serializers.py:4840 msgid "The action taken with respect to the given object(s)." msgstr "Action appliquée par rapport à l'objet ou aux objets donnés." @@ -1638,7 +1638,7 @@ msgstr "Exemple de paramètre" msgid "Example setting which can be different for each user." msgstr "Exemple de paramètre qui peut être différent pour chaque utilisateur." -#: awx/conf/conf.py:92 awx/conf/registry.py:81 awx/conf/views.py:55 +#: awx/conf/conf.py:92 awx/conf/registry.py:81 awx/conf/views.py:56 msgid "User" msgstr "Utilisateur" @@ -1741,15 +1741,15 @@ msgstr "Système" msgid "OtherSystem" msgstr "Autre Système" -#: awx/conf/views.py:47 +#: awx/conf/views.py:48 msgid "Setting Categories" msgstr "Catégories de paramètre" -#: awx/conf/views.py:69 +#: awx/conf/views.py:70 msgid "Setting Detail" msgstr "Détails du paramètre" -#: awx/conf/views.py:160 +#: awx/conf/views.py:162 msgid "Logging Connectivity Test" msgstr "Journalisation du test de connectivité" @@ -2795,7 +2795,7 @@ msgstr "URL Conjur" msgid "API Key" msgstr "Clé API" -#: awx/main/credential_plugins/conjur.py:28 awx/main/models/inventory.py:1017 +#: awx/main/credential_plugins/conjur.py:28 awx/main/models/inventory.py:1018 msgid "Account" msgstr "Compte" @@ -2882,7 +2882,7 @@ msgid "" msgstr "Nom du backend secret (s'il est laissé vide, le premier segment du chemin secret sera utilisé)." #: awx/main/credential_plugins/hashivault.py:60 -#: awx/main/models/inventory.py:1022 +#: awx/main/models/inventory.py:1023 msgid "Key Name" msgstr "Nom de la clé" @@ -3259,7 +3259,7 @@ msgid "" "Management (IAM) users." msgstr "Le service de jeton de sécurité (STS) est un service Web qui permet de demander des informations d’identification provisoires avec des privilèges limités pour les utilisateurs d’AWS Identity and Access Management (IAM)." -#: awx/main/models/credential/__init__.py:780 awx/main/models/inventory.py:832 +#: awx/main/models/credential/__init__.py:780 awx/main/models/inventory.py:833 msgid "OpenStack" msgstr "OpenStack" @@ -3300,7 +3300,7 @@ msgstr "Les domaines OpenStack définissent les limites administratives. Ils son msgid "Verify SSL" msgstr "Vérifier SSL" -#: awx/main/models/credential/__init__.py:823 awx/main/models/inventory.py:829 +#: awx/main/models/credential/__init__.py:823 awx/main/models/inventory.py:830 msgid "VMware vCenter" msgstr "VMware vCenter" @@ -3313,7 +3313,7 @@ msgid "" "Enter the hostname or IP address that corresponds to your VMware vCenter." msgstr "Saisir le nom d’hôte ou l’adresse IP qui correspond à votre VMware vCenter." -#: awx/main/models/credential/__init__.py:849 awx/main/models/inventory.py:830 +#: awx/main/models/credential/__init__.py:849 awx/main/models/inventory.py:831 msgid "Red Hat Satellite 6" msgstr "Red Hat Satellite 6" @@ -3327,7 +3327,7 @@ msgid "" "example, https://satellite.example.org" msgstr "Veuillez saisir l’URL qui correspond à votre serveur Red Hat Satellite 6. Par exemple, https://satellite.example.org" -#: awx/main/models/credential/__init__.py:875 awx/main/models/inventory.py:831 +#: awx/main/models/credential/__init__.py:875 awx/main/models/inventory.py:832 msgid "Red Hat CloudForms" msgstr "Red Hat CloudForms" @@ -3341,7 +3341,7 @@ msgid "" "instance. For example, https://cloudforms.example.org" msgstr "Veuillez saisir l’URL de la machine virtuelle qui correspond à votre instance de CloudForm. Par exemple, https://cloudforms.example.org" -#: awx/main/models/credential/__init__.py:902 awx/main/models/inventory.py:827 +#: awx/main/models/credential/__init__.py:902 awx/main/models/inventory.py:828 msgid "Google Compute Engine" msgstr "Google Compute Engine" @@ -3370,7 +3370,7 @@ msgid "" "Paste the contents of the PEM file associated with the service account email." msgstr "Collez le contenu du fichier PEM associé à l’adresse électronique du compte de service." -#: awx/main/models/credential/__init__.py:936 awx/main/models/inventory.py:828 +#: awx/main/models/credential/__init__.py:936 awx/main/models/inventory.py:829 msgid "Microsoft Azure Resource Manager" msgstr "Microsoft Azure Resource Manager" @@ -3408,7 +3408,7 @@ msgstr "Jeton d'accès personnel GitLab" msgid "This token needs to come from your profile settings in GitLab" msgstr "Ce jeton doit provenir de vos paramètres de profil dans GitLab" -#: awx/main/models/credential/__init__.py:1041 awx/main/models/inventory.py:833 +#: awx/main/models/credential/__init__.py:1041 awx/main/models/inventory.py:834 msgid "Red Hat Virtualization" msgstr "Red Hat Virtualization" @@ -3424,7 +3424,7 @@ msgstr "Fichier CA" msgid "Absolute file path to the CA file to use (optional)" msgstr "Chemin d'accès absolu vers le fichier CA à utiliser (en option)" -#: awx/main/models/credential/__init__.py:1091 awx/main/models/inventory.py:834 +#: awx/main/models/credential/__init__.py:1091 awx/main/models/inventory.py:835 msgid "Ansible Tower" msgstr "Ansible Tower" @@ -3468,7 +3468,7 @@ msgstr "La source doit être une information d'identification externe" msgid "Input field must be defined on target credential (options are {})." msgstr "Le champ de saisie doit être défini sur des informations d'identification externes (les options sont {})." -#: awx/main/models/events.py:152 awx/main/models/events.py:655 +#: awx/main/models/events.py:152 awx/main/models/events.py:674 msgid "Host Failed" msgstr "Échec de l'hôte" @@ -3476,7 +3476,7 @@ msgstr "Échec de l'hôte" msgid "Host Started" msgstr "Hôte démarré" -#: awx/main/models/events.py:154 awx/main/models/events.py:656 +#: awx/main/models/events.py:154 awx/main/models/events.py:675 msgid "Host OK" msgstr "Hôte OK" @@ -3484,11 +3484,11 @@ msgstr "Hôte OK" msgid "Host Failure" msgstr "Échec de l'hôte" -#: awx/main/models/events.py:156 awx/main/models/events.py:662 +#: awx/main/models/events.py:156 awx/main/models/events.py:681 msgid "Host Skipped" msgstr "Hôte ignoré" -#: awx/main/models/events.py:157 awx/main/models/events.py:657 +#: awx/main/models/events.py:157 awx/main/models/events.py:676 msgid "Host Unreachable" msgstr "Hôte inaccessible" @@ -3572,27 +3572,27 @@ msgstr "Scène démarrée" msgid "Playbook Complete" msgstr "Playbook terminé" -#: awx/main/models/events.py:184 awx/main/models/events.py:672 +#: awx/main/models/events.py:184 awx/main/models/events.py:691 msgid "Debug" msgstr "Déboguer" -#: awx/main/models/events.py:185 awx/main/models/events.py:673 +#: awx/main/models/events.py:185 awx/main/models/events.py:692 msgid "Verbose" msgstr "Verbeux" -#: awx/main/models/events.py:186 awx/main/models/events.py:674 +#: awx/main/models/events.py:186 awx/main/models/events.py:693 msgid "Deprecated" msgstr "Obsolète" -#: awx/main/models/events.py:187 awx/main/models/events.py:675 +#: awx/main/models/events.py:187 awx/main/models/events.py:694 msgid "Warning" msgstr "Avertissement" -#: awx/main/models/events.py:188 awx/main/models/events.py:676 +#: awx/main/models/events.py:188 awx/main/models/events.py:695 msgid "System Warning" msgstr "Avertissement système" -#: awx/main/models/events.py:189 awx/main/models/events.py:677 +#: awx/main/models/events.py:189 awx/main/models/events.py:696 #: awx/main/models/unified_jobs.py:75 msgid "Error" msgstr "Erreur" @@ -3620,300 +3620,300 @@ msgid "" "this group" msgstr "Liste des cas de concordance exacte qui seront toujours assignés automatiquement à ce groupe." -#: awx/main/models/inventory.py:79 +#: awx/main/models/inventory.py:80 msgid "Hosts have a direct link to this inventory." msgstr "Les hôtes ont un lien direct vers cet inventaire." -#: awx/main/models/inventory.py:80 +#: awx/main/models/inventory.py:81 msgid "Hosts for inventory generated using the host_filter property." msgstr "Hôtes pour inventaire générés avec la propriété host_filter." -#: awx/main/models/inventory.py:85 +#: awx/main/models/inventory.py:86 msgid "inventories" msgstr "inventaires" -#: awx/main/models/inventory.py:92 +#: awx/main/models/inventory.py:93 msgid "Organization containing this inventory." msgstr "Organisation contenant cet inventaire." -#: awx/main/models/inventory.py:99 +#: awx/main/models/inventory.py:100 msgid "Inventory variables in JSON or YAML format." msgstr "Variables d'inventaire au format JSON ou YAML." -#: awx/main/models/inventory.py:104 +#: awx/main/models/inventory.py:105 msgid "" "This field is deprecated and will be removed in a future release. Flag " "indicating whether any hosts in this inventory have failed." msgstr "Ce champ est obsolète et sera supprimé dans une prochaine version. Indicateur signalant si des hôtes de cet inventaire ont échoué." -#: awx/main/models/inventory.py:110 +#: awx/main/models/inventory.py:111 msgid "" "This field is deprecated and will be removed in a future release. Total " "number of hosts in this inventory." msgstr "Ce champ est obsolète et sera supprimé dans une prochaine version. Nombre total d'hôtes dans cet inventaire." -#: awx/main/models/inventory.py:116 +#: awx/main/models/inventory.py:117 msgid "" "This field is deprecated and will be removed in a future release. Number of " "hosts in this inventory with active failures." msgstr "Ce champ est obsolète et sera supprimé dans une prochaine version. Nombre d'hôtes dans cet inventaire avec des échecs actifs." -#: awx/main/models/inventory.py:122 +#: awx/main/models/inventory.py:123 msgid "" "This field is deprecated and will be removed in a future release. Total " "number of groups in this inventory." msgstr "Ce champ est obsolète et sera supprimé dans une prochaine version. Nombre total de groupes dans cet inventaire." -#: awx/main/models/inventory.py:128 +#: awx/main/models/inventory.py:129 msgid "" "This field is deprecated and will be removed in a future release. Flag " "indicating whether this inventory has any external inventory sources." msgstr "Ce champ est obsolète et sera supprimé dans une prochaine version. Indicateur signalant si cet inventaire a des sources d’inventaire externes." -#: awx/main/models/inventory.py:134 +#: awx/main/models/inventory.py:135 msgid "" "Total number of external inventory sources configured within this inventory." msgstr "Nombre total de sources d'inventaire externes configurées dans cet inventaire." -#: awx/main/models/inventory.py:139 +#: awx/main/models/inventory.py:140 msgid "Number of external inventory sources in this inventory with failures." msgstr "Nombre total de sources d'inventaire externes en échec dans cet inventaire." -#: awx/main/models/inventory.py:146 +#: awx/main/models/inventory.py:147 msgid "Kind of inventory being represented." msgstr "Genre d'inventaire représenté." -#: awx/main/models/inventory.py:152 +#: awx/main/models/inventory.py:153 msgid "Filter that will be applied to the hosts of this inventory." msgstr "Filtre appliqué aux hôtes de cet inventaire." -#: awx/main/models/inventory.py:180 +#: awx/main/models/inventory.py:181 msgid "" "Credentials to be used by hosts belonging to this inventory when accessing " "Red Hat Insights API." msgstr "Informations d'identification à utiliser par les hôtes appartenant à cet inventaire lors de l'accès à l'API Red Hat Insights ." -#: awx/main/models/inventory.py:189 +#: awx/main/models/inventory.py:190 msgid "Flag indicating the inventory is being deleted." msgstr "Marqueur indiquant que cet inventaire est en cours de suppression." -#: awx/main/models/inventory.py:244 +#: awx/main/models/inventory.py:245 msgid "Could not parse subset as slice specification." msgstr "N'a pas pu traiter les sous-ensembles en tant que spécification de découpage." -#: awx/main/models/inventory.py:248 +#: awx/main/models/inventory.py:249 msgid "Slice number must be less than total number of slices." msgstr "Le nombre de tranches doit être inférieur au nombre total de tranches." -#: awx/main/models/inventory.py:250 +#: awx/main/models/inventory.py:251 msgid "Slice number must be 1 or higher." msgstr "Le nombre de tranches doit être 1 ou valeur supérieure." -#: awx/main/models/inventory.py:387 +#: awx/main/models/inventory.py:388 msgid "Assignment not allowed for Smart Inventory" msgstr "Attribution non autorisée pour un inventaire Smart" -#: awx/main/models/inventory.py:389 awx/main/models/projects.py:166 +#: awx/main/models/inventory.py:390 awx/main/models/projects.py:166 msgid "Credential kind must be 'insights'." msgstr "Le genre d'informations d'identification doit être 'insights'." -#: awx/main/models/inventory.py:474 +#: awx/main/models/inventory.py:475 msgid "Is this host online and available for running jobs?" msgstr "Cet hôte est-il en ligne et disponible pour exécuter des tâches ?" -#: awx/main/models/inventory.py:480 +#: awx/main/models/inventory.py:481 msgid "" "The value used by the remote inventory source to uniquely identify the host" msgstr "Valeur utilisée par la source d'inventaire distante pour identifier l'hôte de façon unique" -#: awx/main/models/inventory.py:485 +#: awx/main/models/inventory.py:486 msgid "Host variables in JSON or YAML format." msgstr "Variables d'hôte au format JSON ou YAML." -#: awx/main/models/inventory.py:508 +#: awx/main/models/inventory.py:509 msgid "Inventory source(s) that created or modified this host." msgstr "Sources d'inventaire qui ont créé ou modifié cet hôte." -#: awx/main/models/inventory.py:513 +#: awx/main/models/inventory.py:514 msgid "Arbitrary JSON structure of most recent ansible_facts, per-host." msgstr "Structure JSON arbitraire des faits ansible les plus récents, par hôte." -#: awx/main/models/inventory.py:519 +#: awx/main/models/inventory.py:520 msgid "The date and time ansible_facts was last modified." msgstr "Date et heure de la dernière modification apportée à ansible_facts." -#: awx/main/models/inventory.py:526 +#: awx/main/models/inventory.py:527 msgid "Red Hat Insights host unique identifier." msgstr "Identifiant unique de l'hôte de Red Hat Insights." -#: awx/main/models/inventory.py:640 +#: awx/main/models/inventory.py:641 msgid "Group variables in JSON or YAML format." msgstr "Variables de groupe au format JSON ou YAML." -#: awx/main/models/inventory.py:646 +#: awx/main/models/inventory.py:647 msgid "Hosts associated directly with this group." msgstr "Hôtes associés directement à ce groupe." -#: awx/main/models/inventory.py:652 +#: awx/main/models/inventory.py:653 msgid "Inventory source(s) that created or modified this group." msgstr "Sources d'inventaire qui ont créé ou modifié ce groupe." -#: awx/main/models/inventory.py:824 +#: awx/main/models/inventory.py:825 msgid "File, Directory or Script" msgstr "Fichier, répertoire ou script" -#: awx/main/models/inventory.py:825 +#: awx/main/models/inventory.py:826 msgid "Sourced from a Project" msgstr "Provenance d'un projet" -#: awx/main/models/inventory.py:826 +#: awx/main/models/inventory.py:827 msgid "Amazon EC2" msgstr "Amazon EC2" -#: awx/main/models/inventory.py:835 +#: awx/main/models/inventory.py:836 msgid "Custom Script" msgstr "Script personnalisé" -#: awx/main/models/inventory.py:952 +#: awx/main/models/inventory.py:953 msgid "Inventory source variables in YAML or JSON format." msgstr "Variables de source d'inventaire au format JSON ou YAML." -#: awx/main/models/inventory.py:963 +#: awx/main/models/inventory.py:964 msgid "" "Comma-separated list of filter expressions (EC2 only). Hosts are imported " "when ANY of the filters match." msgstr "Liste d'expressions de filtre séparées par des virgules (EC2 uniquement). Les hôtes sont importés lorsque l'UN des filtres correspondent." -#: awx/main/models/inventory.py:969 +#: awx/main/models/inventory.py:970 msgid "Limit groups automatically created from inventory source (EC2 only)." msgstr "Limiter automatiquement les groupes créés à partir de la source d'inventaire (EC2 uniquement)." -#: awx/main/models/inventory.py:973 +#: awx/main/models/inventory.py:974 msgid "Overwrite local groups and hosts from remote inventory source." msgstr "Écraser les groupes locaux et les hôtes de la source d'inventaire distante." -#: awx/main/models/inventory.py:977 +#: awx/main/models/inventory.py:978 msgid "Overwrite local variables from remote inventory source." msgstr "Écraser les variables locales de la source d'inventaire distante." -#: awx/main/models/inventory.py:982 awx/main/models/jobs.py:154 +#: awx/main/models/inventory.py:983 awx/main/models/jobs.py:154 #: awx/main/models/projects.py:135 msgid "The amount of time (in seconds) to run before the task is canceled." msgstr "Délai écoulé (en secondes) avant que la tâche ne soit annulée." -#: awx/main/models/inventory.py:1015 +#: awx/main/models/inventory.py:1016 msgid "Image ID" msgstr "ID d'image" -#: awx/main/models/inventory.py:1016 +#: awx/main/models/inventory.py:1017 msgid "Availability Zone" msgstr "Zone de disponibilité" -#: awx/main/models/inventory.py:1018 +#: awx/main/models/inventory.py:1019 msgid "Instance ID" msgstr "ID d'instance" -#: awx/main/models/inventory.py:1019 +#: awx/main/models/inventory.py:1020 msgid "Instance State" msgstr "État de l'instance" -#: awx/main/models/inventory.py:1020 +#: awx/main/models/inventory.py:1021 msgid "Platform" msgstr "Plateforme " -#: awx/main/models/inventory.py:1021 +#: awx/main/models/inventory.py:1022 msgid "Instance Type" msgstr "Type d'instance" -#: awx/main/models/inventory.py:1023 +#: awx/main/models/inventory.py:1024 msgid "Region" msgstr "Région" -#: awx/main/models/inventory.py:1024 +#: awx/main/models/inventory.py:1025 msgid "Security Group" msgstr "Groupe de sécurité" -#: awx/main/models/inventory.py:1025 +#: awx/main/models/inventory.py:1026 msgid "Tags" msgstr "Balises" -#: awx/main/models/inventory.py:1026 +#: awx/main/models/inventory.py:1027 msgid "Tag None" msgstr "Ne rien baliser" -#: awx/main/models/inventory.py:1027 +#: awx/main/models/inventory.py:1028 msgid "VPC ID" msgstr "ID VPC" -#: awx/main/models/inventory.py:1095 +#: awx/main/models/inventory.py:1096 #, python-format msgid "" "Cloud-based inventory sources (such as %s) require credentials for the " "matching cloud service." msgstr "Les sources d'inventaire cloud (telles que %s) requièrent des informations d'identification pour le service cloud correspondant." -#: awx/main/models/inventory.py:1101 +#: awx/main/models/inventory.py:1102 msgid "Credential is required for a cloud source." msgstr "Les informations d'identification sont requises pour une source cloud." -#: awx/main/models/inventory.py:1104 +#: awx/main/models/inventory.py:1105 msgid "" "Credentials of type machine, source control, insights and vault are " "disallowed for custom inventory sources." msgstr "Les identifiants de type machine, contrôle de la source, insights ou archivage sécurisé ne sont pas autorisés par les sources d'inventaire personnalisées." -#: awx/main/models/inventory.py:1109 +#: awx/main/models/inventory.py:1110 msgid "" "Credentials of type insights and vault are disallowed for scm inventory " "sources." msgstr "Les identifiants de type insights ou archivage sécurisé ne sont pas autorisés pour les sources d'inventaire scm." -#: awx/main/models/inventory.py:1169 +#: awx/main/models/inventory.py:1170 #, python-format msgid "Invalid %(source)s region: %(region)s" msgstr "Région %(source)s non valide : %(region)s" -#: awx/main/models/inventory.py:1193 +#: awx/main/models/inventory.py:1194 #, python-format msgid "Invalid filter expression: %(filter)s" msgstr "Expression de filtre non valide : %(filter)s" -#: awx/main/models/inventory.py:1214 +#: awx/main/models/inventory.py:1215 #, python-format msgid "Invalid group by choice: %(choice)s" msgstr "Choix de regroupement non valide : %(choice)s" -#: awx/main/models/inventory.py:1242 +#: awx/main/models/inventory.py:1243 msgid "Project containing inventory file used as source." msgstr "Projet contenant le fichier d'inventaire utilisé comme source." -#: awx/main/models/inventory.py:1415 +#: awx/main/models/inventory.py:1416 msgid "" "More than one SCM-based inventory source with update on project update per-" "inventory not allowed." msgstr "On n'autorise pas plus d'une source d'inventaire basé SCM avec mise à jour pré-inventaire ou mise à jour projet." -#: awx/main/models/inventory.py:1422 +#: awx/main/models/inventory.py:1423 msgid "" "Cannot update SCM-based inventory source on launch if set to update on " "project update. Instead, configure the corresponding source project to " "update on launch." msgstr "Impossible de mettre à jour une source d'inventaire SCM lors du lancement si elle est définie pour se mettre à jour lors de l'actualisation du projet. À la place, configurez le projet source correspondant pour qu'il se mette à jour au moment du lancement." -#: awx/main/models/inventory.py:1428 +#: awx/main/models/inventory.py:1429 msgid "Cannot set source_path if not SCM type." msgstr "Impossible de définir chemin_source si pas du type SCM." -#: awx/main/models/inventory.py:1471 +#: awx/main/models/inventory.py:1472 msgid "" "Inventory files from this Project Update were used for the inventory update." msgstr "Les fichiers d'inventaire de cette mise à jour de projet ont été utilisés pour la mise à jour de l'inventaire." -#: awx/main/models/inventory.py:1582 +#: awx/main/models/inventory.py:1583 msgid "Inventory script contents" msgstr "Contenus des scripts d'inventaire" -#: awx/main/models/inventory.py:1587 +#: awx/main/models/inventory.py:1588 msgid "Organization owning this inventory script" msgstr "Organisation propriétaire de ce script d'inventaire." @@ -4012,28 +4012,28 @@ msgstr "Inventaire appliqué en tant qu'invite, en supposant que le modèle de t msgid "job host summaries" msgstr "récapitulatifs des hôtes pour la tâche" -#: awx/main/models/jobs.py:1158 +#: awx/main/models/jobs.py:1144 msgid "Remove jobs older than a certain number of days" msgstr "Supprimer les tâches plus anciennes qu'un certain nombre de jours" -#: awx/main/models/jobs.py:1159 +#: awx/main/models/jobs.py:1145 msgid "Remove activity stream entries older than a certain number of days" msgstr "Supprimer les entrées du flux d'activité plus anciennes qu'un certain nombre de jours" -#: awx/main/models/jobs.py:1160 +#: awx/main/models/jobs.py:1146 msgid "Removes expired browser sessions from the database" msgstr "Supprime les sessions de navigateur expirées dans la base de données" -#: awx/main/models/jobs.py:1161 +#: awx/main/models/jobs.py:1147 msgid "Removes expired OAuth 2 access tokens and refresh tokens" msgstr "Supprime les jetons d'accès OAuth 2 et les jetons d’actualisation arrivés à expiration" -#: awx/main/models/jobs.py:1231 +#: awx/main/models/jobs.py:1217 #, python-brace-format msgid "Variables {list_of_keys} are not allowed for system jobs." msgstr "Les variables {list_of_keys} ne sont pas autorisées pour les tâches système." -#: awx/main/models/jobs.py:1247 +#: awx/main/models/jobs.py:1233 msgid "days must be a positive integer." msgstr "jours doit être un entier positif." @@ -4777,7 +4777,7 @@ msgstr "Aucun chemin de traitement des erreurs trouvé, flux de travail marqué msgid "The approval node {name} ({pk}) has expired after {timeout} seconds." msgstr "Le nœud d'approbation {name} ({pk}) a expiré après {timeout} secondes." -#: awx/main/tasks.py:1053 +#: awx/main/tasks.py:1049 msgid "Invalid virtual environment selected: {}" msgstr "Environnement virtuel non valide sélectionné : {}" @@ -4814,53 +4814,53 @@ msgstr "Aucun chemin de traitement des erreurs pour le ou les nœuds de tâche d msgid "Unable to convert \"%s\" to boolean" msgstr "Impossible de convertir \"%s\" en booléen" -#: awx/main/utils/common.py:275 +#: awx/main/utils/common.py:261 #, python-format msgid "Unsupported SCM type \"%s\"" msgstr "Type de SCM \"%s\" non pris en charge" -#: awx/main/utils/common.py:282 awx/main/utils/common.py:294 -#: awx/main/utils/common.py:313 +#: awx/main/utils/common.py:268 awx/main/utils/common.py:280 +#: awx/main/utils/common.py:299 #, python-format msgid "Invalid %s URL" msgstr "URL %s non valide" -#: awx/main/utils/common.py:284 awx/main/utils/common.py:323 +#: awx/main/utils/common.py:270 awx/main/utils/common.py:309 #, python-format msgid "Unsupported %s URL" msgstr "URL %s non prise en charge" -#: awx/main/utils/common.py:325 +#: awx/main/utils/common.py:311 #, python-format msgid "Unsupported host \"%s\" for file:// URL" msgstr "Hôte \"%s\" non pris en charge pour le fichier ://URL" -#: awx/main/utils/common.py:327 +#: awx/main/utils/common.py:313 #, python-format msgid "Host is required for %s URL" msgstr "L'hôte est requis pour l'URL %s" -#: awx/main/utils/common.py:345 +#: awx/main/utils/common.py:331 #, python-format msgid "Username must be \"git\" for SSH access to %s." msgstr "Le nom d'utilisateur doit être \"git\" pour l'accès SSH à %s." -#: awx/main/utils/common.py:351 +#: awx/main/utils/common.py:337 #, python-format msgid "Username must be \"hg\" for SSH access to %s." msgstr "Le nom d'utilisateur doit être \"hg\" pour l'accès SSH à %s." -#: awx/main/utils/common.py:682 +#: awx/main/utils/common.py:668 #, python-brace-format msgid "Input type `{data_type}` is not a dictionary" msgstr "Le type d'entrée ’{data_type}’ n'est pas un dictionnaire" -#: awx/main/utils/common.py:715 +#: awx/main/utils/common.py:701 #, python-brace-format msgid "Variables not compatible with JSON standard (error: {json_error})" msgstr "Variables non compatibles avec la norme JSON (error : {json_error})" -#: awx/main/utils/common.py:721 +#: awx/main/utils/common.py:707 #, python-brace-format msgid "" "Cannot parse as JSON (error: {json_error}) or YAML (error: {yaml_error})." diff --git a/awx/locale/ja/LC_MESSAGES/django.po b/awx/locale/ja/LC_MESSAGES/django.po index db72a7ec8f..a878a3d1ad 100644 --- a/awx/locale/ja/LC_MESSAGES/django.po +++ b/awx/locale/ja/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2020-04-27 13:55+0000\n" +"POT-Creation-Date: 2020-05-28 21:45+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -528,7 +528,7 @@ msgstr "ジョブテンプレートインベントリーが見つからないか msgid "Unknown, job may have been ran before launch configurations were saved." msgstr "不明です。ジョブは起動設定が保存される前に実行された可能性があります。" -#: awx/api/serializers.py:3252 awx/main/tasks.py:2795 awx/main/tasks.py:2813 +#: awx/api/serializers.py:3252 awx/main/tasks.py:2800 awx/main/tasks.py:2818 msgid "{} are prohibited from use in ad hoc commands." msgstr "{} の使用はアドホックコマンドで禁止されています。" @@ -547,324 +547,324 @@ msgstr "指定された変数 {} には置き換えるデータベースの値 msgid "\"$encrypted$ is a reserved keyword, may not be used for {}.\"" msgstr "\"$encrypted$ は予約されたキーワードで、{} には使用できません。\"" -#: awx/api/serializers.py:4070 +#: awx/api/serializers.py:4078 msgid "A project is required to run a job." msgstr "ジョブを実行するにはプロジェクトが必要です。" -#: awx/api/serializers.py:4072 +#: awx/api/serializers.py:4080 msgid "Missing a revision to run due to failed project update." msgstr "プロジェクトの更新に失敗したため、実行するリビジョンがありません。" -#: awx/api/serializers.py:4076 +#: awx/api/serializers.py:4084 msgid "The inventory associated with this Job Template is being deleted." msgstr "このジョブテンプレートに関連付けられているインベントリーが削除されています。" -#: awx/api/serializers.py:4078 awx/api/serializers.py:4194 +#: awx/api/serializers.py:4086 awx/api/serializers.py:4202 msgid "The provided inventory is being deleted." msgstr "指定されたインベントリーが削除されています。" -#: awx/api/serializers.py:4086 +#: awx/api/serializers.py:4094 msgid "Cannot assign multiple {} credentials." msgstr "複数の {} 認証情報を割り当てることができません。" -#: awx/api/serializers.py:4090 +#: awx/api/serializers.py:4098 msgid "Cannot assign a Credential of kind `{}`" msgstr "`{}`の種類の認証情報を割り当てることができません。" -#: awx/api/serializers.py:4103 +#: awx/api/serializers.py:4111 msgid "" "Removing {} credential at launch time without replacement is not supported. " "Provided list lacked credential(s): {}." msgstr "置き換えなしで起動時に {} 認証情報を削除することはサポートされていません。指定された一覧には認証情報がありません: {}" -#: awx/api/serializers.py:4192 +#: awx/api/serializers.py:4200 msgid "The inventory associated with this Workflow is being deleted." msgstr "このワークフローに関連付けられているインベントリーが削除されています。" -#: awx/api/serializers.py:4263 +#: awx/api/serializers.py:4271 msgid "Message type '{}' invalid, must be either 'message' or 'body'" msgstr "メッセージタイプ '{}' が無効です。'メッセージ' または 'ボディー' のいずれかに指定する必要があります。" -#: awx/api/serializers.py:4269 +#: awx/api/serializers.py:4277 msgid "Expected string for '{}', found {}, " msgstr "'{}' の文字列が必要ですが、{} が見つかりました。 " -#: awx/api/serializers.py:4273 +#: awx/api/serializers.py:4281 msgid "Messages cannot contain newlines (found newline in {} event)" msgstr "メッセージでは改行を追加できません ({} イベントに改行が含まれます)" -#: awx/api/serializers.py:4279 +#: awx/api/serializers.py:4287 msgid "Expected dict for 'messages' field, found {}" msgstr "'messages' フィールドには辞書が必要ですが、{} が見つかりました。" -#: awx/api/serializers.py:4283 +#: awx/api/serializers.py:4291 msgid "" "Event '{}' invalid, must be one of 'started', 'success', 'error', or " "'workflow_approval'" msgstr "イベント '{}' は無効です。'started'、'success'、'error' または 'workflow_approval' のいずれかでなければなりません。" -#: awx/api/serializers.py:4289 +#: awx/api/serializers.py:4297 msgid "Expected dict for event '{}', found {}" msgstr "イベント '{}' には辞書が必要ですが、{} が見つかりました。" -#: awx/api/serializers.py:4294 +#: awx/api/serializers.py:4302 msgid "" "Workflow Approval event '{}' invalid, must be one of 'running', 'approved', " "'timed_out', or 'denied'" msgstr "ワークフロー承認イベント '{}' が無効です。'running'、'approved'、'timed_out' または 'denied' のいずれかでなければなりません。" -#: awx/api/serializers.py:4301 +#: awx/api/serializers.py:4309 msgid "Expected dict for workflow approval event '{}', found {}" msgstr "ワークフロー承認イベント '{}' には辞書が必要ですが、{} が見つかりました。" -#: awx/api/serializers.py:4328 +#: awx/api/serializers.py:4336 msgid "Unable to render message '{}': {}" msgstr "メッセージ '{}' のレンダリングができません: {}" -#: awx/api/serializers.py:4330 +#: awx/api/serializers.py:4338 msgid "Field '{}' unavailable" msgstr "フィールド '{}' が利用できません" -#: awx/api/serializers.py:4332 +#: awx/api/serializers.py:4340 msgid "Security error due to field '{}'" msgstr "フィールド '{}' が原因のセキュリティーエラー" -#: awx/api/serializers.py:4352 +#: awx/api/serializers.py:4360 msgid "Webhook body for '{}' should be a json dictionary. Found type '{}'." msgstr "'{}' の Webhook のボディーは json 辞書でなければなりません。'{}' のタイプが見つかりました。" -#: awx/api/serializers.py:4355 +#: awx/api/serializers.py:4363 msgid "Webhook body for '{}' is not a valid json dictionary ({})." msgstr "'{}' の Webhook ボディーは有効な json 辞書ではありません ({})。" -#: awx/api/serializers.py:4373 +#: awx/api/serializers.py:4381 msgid "" "Missing required fields for Notification Configuration: notification_type" msgstr "通知設定の必須フィールドがありません: notification_type" -#: awx/api/serializers.py:4400 +#: awx/api/serializers.py:4408 msgid "No values specified for field '{}'" msgstr "フィールド '{}' に値が指定されていません" -#: awx/api/serializers.py:4405 +#: awx/api/serializers.py:4413 msgid "HTTP method must be either 'POST' or 'PUT'." msgstr "HTTP メソッドは 'POST' または 'PUT' のいずれかでなければなりません。" -#: awx/api/serializers.py:4407 +#: awx/api/serializers.py:4415 msgid "Missing required fields for Notification Configuration: {}." msgstr "通知設定の必須フィールドがありません: {}。" -#: awx/api/serializers.py:4410 +#: awx/api/serializers.py:4418 msgid "Configuration field '{}' incorrect type, expected {}." msgstr "設定フィールド '{}' のタイプが正しくありません。{} が予期されました。" -#: awx/api/serializers.py:4427 +#: awx/api/serializers.py:4435 msgid "Notification body" msgstr "通知ボディー" -#: awx/api/serializers.py:4507 +#: awx/api/serializers.py:4515 msgid "" "Valid DTSTART required in rrule. Value should start with: DTSTART:" "YYYYMMDDTHHMMSSZ" msgstr "有効な DTSTART が rrule で必要です。値は DTSTART:YYYYMMDDTHHMMSSZ で開始する必要があります。" -#: awx/api/serializers.py:4509 +#: awx/api/serializers.py:4517 msgid "" "DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ." msgstr "DTSTART をネイティブの日時にすることができません。;TZINFO= or YYYYMMDDTHHMMSSZZ を指定します。" -#: awx/api/serializers.py:4511 +#: awx/api/serializers.py:4519 msgid "Multiple DTSTART is not supported." msgstr "複数の DTSTART はサポートされません。" -#: awx/api/serializers.py:4513 +#: awx/api/serializers.py:4521 msgid "RRULE required in rrule." msgstr "RRULE が rrule で必要です。" -#: awx/api/serializers.py:4515 +#: awx/api/serializers.py:4523 msgid "Multiple RRULE is not supported." msgstr "複数の RRULE はサポートされません。" -#: awx/api/serializers.py:4517 +#: awx/api/serializers.py:4525 msgid "INTERVAL required in rrule." msgstr "INTERVAL が rrule で必要です。" -#: awx/api/serializers.py:4519 +#: awx/api/serializers.py:4527 msgid "SECONDLY is not supported." msgstr "SECONDLY はサポートされません。" -#: awx/api/serializers.py:4521 +#: awx/api/serializers.py:4529 msgid "Multiple BYMONTHDAYs not supported." msgstr "複数の BYMONTHDAY はサポートされません。" -#: awx/api/serializers.py:4523 +#: awx/api/serializers.py:4531 msgid "Multiple BYMONTHs not supported." msgstr "複数の BYMONTH はサポートされません。" -#: awx/api/serializers.py:4525 +#: awx/api/serializers.py:4533 msgid "BYDAY with numeric prefix not supported." msgstr "数字の接頭辞のある BYDAY はサポートされません。" -#: awx/api/serializers.py:4527 +#: awx/api/serializers.py:4535 msgid "BYYEARDAY not supported." msgstr "BYYEARDAY はサポートされません。" -#: awx/api/serializers.py:4529 +#: awx/api/serializers.py:4537 msgid "BYWEEKNO not supported." msgstr "BYWEEKNO はサポートされません。" -#: awx/api/serializers.py:4531 +#: awx/api/serializers.py:4539 msgid "RRULE may not contain both COUNT and UNTIL" msgstr "RRULE には COUNT と UNTIL の両方を含めることができません" -#: awx/api/serializers.py:4535 +#: awx/api/serializers.py:4543 msgid "COUNT > 999 is unsupported." msgstr "COUNT > 999 はサポートされません。" -#: awx/api/serializers.py:4541 +#: awx/api/serializers.py:4549 msgid "rrule parsing failed validation: {}" msgstr "rrule の構文解析で検証に失敗しました: {}" -#: awx/api/serializers.py:4603 +#: awx/api/serializers.py:4611 msgid "Inventory Source must be a cloud resource." msgstr "インベントリーソースはクラウドリソースでなければなりません。" -#: awx/api/serializers.py:4605 +#: awx/api/serializers.py:4613 msgid "Manual Project cannot have a schedule set." msgstr "手動プロジェクトにはスケジュールを設定できません。" -#: awx/api/serializers.py:4608 +#: awx/api/serializers.py:4616 msgid "" "Inventory sources with `update_on_project_update` cannot be scheduled. " "Schedule its source project `{}` instead." msgstr "「update_on_project_update」が設定されたインベントリーソースはスケジュールできません。代わりのそのソースプロジェクト「{}」 をスケジュールします。" -#: awx/api/serializers.py:4618 +#: awx/api/serializers.py:4626 msgid "" "Count of jobs in the running or waiting state that are targeted for this " "instance" msgstr "このインスタンスにターゲット設定されている実行中または待機状態のジョブの数" -#: awx/api/serializers.py:4623 +#: awx/api/serializers.py:4631 msgid "Count of all jobs that target this instance" msgstr "このインスタンスをターゲットに設定するすべてのジョブの数" -#: awx/api/serializers.py:4656 +#: awx/api/serializers.py:4664 msgid "" "Count of jobs in the running or waiting state that are targeted for this " "instance group" msgstr "このインスタンスグループにターゲット設定されている実行中または待機状態のジョブの数" -#: awx/api/serializers.py:4661 +#: awx/api/serializers.py:4669 msgid "Count of all jobs that target this instance group" msgstr "このインスタンスグループをターゲットに設定するすべてのジョブの数" -#: awx/api/serializers.py:4666 +#: awx/api/serializers.py:4674 msgid "Indicates whether instance group controls any other group" msgstr "インスタンスグループが他のグループを制御するかどうかを指定します。" -#: awx/api/serializers.py:4670 +#: awx/api/serializers.py:4678 msgid "" "Indicates whether instances in this group are isolated.Isolated groups have " "a designated controller group." msgstr "このグループ内でインスタンスを分離させるかを指定します。分離されたグループには指定したコントローラーグループがあります。" -#: awx/api/serializers.py:4675 +#: awx/api/serializers.py:4683 msgid "" "Indicates whether instances in this group are containerized.Containerized " "groups have a designated Openshift or Kubernetes cluster." msgstr "このグループ内でインスタンスをコンテナー化するかを指定します。コンテナー化したグループには、指定の OpenShift または Kubernetes クラスターが含まれます。" -#: awx/api/serializers.py:4683 +#: awx/api/serializers.py:4691 msgid "Policy Instance Percentage" msgstr "ポリシーインスタンスの割合" -#: awx/api/serializers.py:4684 +#: awx/api/serializers.py:4692 msgid "" "Minimum percentage of all instances that will be automatically assigned to " "this group when new instances come online." msgstr "新規インスタンスがオンラインになると、このグループに自動的に最小限割り当てられるインスタンスの割合を選択します。" -#: awx/api/serializers.py:4689 +#: awx/api/serializers.py:4697 msgid "Policy Instance Minimum" msgstr "ポリシーインスタンスの最小値" -#: awx/api/serializers.py:4690 +#: awx/api/serializers.py:4698 msgid "" "Static minimum number of Instances that will be automatically assign to this " "group when new instances come online." msgstr "新規インスタンスがオンラインになると、このグループに自動的に最小限割り当てられるインスタンス数を入力します。" -#: awx/api/serializers.py:4695 +#: awx/api/serializers.py:4703 msgid "Policy Instance List" msgstr "ポリシーインスタンスの一覧" -#: awx/api/serializers.py:4696 +#: awx/api/serializers.py:4704 msgid "List of exact-match Instances that will be assigned to this group" msgstr "このグループに割り当てられる完全一致のインスタンスの一覧" -#: awx/api/serializers.py:4722 +#: awx/api/serializers.py:4730 msgid "Duplicate entry {}." msgstr "重複するエントリー {}。" -#: awx/api/serializers.py:4724 +#: awx/api/serializers.py:4732 msgid "{} is not a valid hostname of an existing instance." msgstr "{} は既存インスタンスの有効なホスト名ではありません。" -#: awx/api/serializers.py:4726 awx/api/views/mixin.py:98 +#: awx/api/serializers.py:4734 awx/api/views/mixin.py:98 msgid "" "Isolated instances may not be added or removed from instances groups via the " "API." msgstr "分離されたインスタンスは、API 経由でインスタンスグループから追加したり、削除したりすることができません。" -#: awx/api/serializers.py:4728 awx/api/views/mixin.py:102 +#: awx/api/serializers.py:4736 awx/api/views/mixin.py:102 msgid "Isolated instance group membership may not be managed via the API." msgstr "分離されたインスタンスグループのメンバーシップは API で管理できません。" -#: awx/api/serializers.py:4730 awx/api/serializers.py:4735 -#: awx/api/serializers.py:4740 +#: awx/api/serializers.py:4738 awx/api/serializers.py:4743 +#: awx/api/serializers.py:4748 msgid "Containerized instances may not be managed via the API" msgstr "コンテナー化されたインスタンスは API で管理されないことがあります" -#: awx/api/serializers.py:4745 +#: awx/api/serializers.py:4753 msgid "tower instance group name may not be changed." msgstr "Tower のインスタンスグループ名は変更できません。" -#: awx/api/serializers.py:4750 +#: awx/api/serializers.py:4758 msgid "Only Kubernetes credentials can be associated with an Instance Group" msgstr "インスタンスグループに関連付けることができる Kubernetes 認証情報のみです" -#: awx/api/serializers.py:4789 +#: awx/api/serializers.py:4797 msgid "" "When present, shows the field name of the role or relationship that changed." msgstr "これがある場合には、変更された関係またはロールのフィールド名を表示します。" -#: awx/api/serializers.py:4791 +#: awx/api/serializers.py:4799 msgid "" "When present, shows the model on which the role or relationship was defined." msgstr "これがある場合には、ロールまたは関係が定義されているモデルを表示します。" -#: awx/api/serializers.py:4824 +#: awx/api/serializers.py:4832 msgid "" "A summary of the new and changed values when an object is created, updated, " "or deleted" msgstr "オブジェクトの作成、更新または削除時の新規値および変更された値の概要" -#: awx/api/serializers.py:4826 +#: awx/api/serializers.py:4834 msgid "" "For create, update, and delete events this is the object type that was " "affected. For associate and disassociate events this is the object type " "associated or disassociated with object2." msgstr "作成、更新、および削除イベントの場合、これは影響を受けたオブジェクトタイプになります。関連付けおよび関連付け解除イベントの場合、これは object2 に関連付けられたか、またはその関連付けが解除されたオブジェクトタイプになります。" -#: awx/api/serializers.py:4829 +#: awx/api/serializers.py:4837 msgid "" "Unpopulated for create, update, and delete events. For associate and " "disassociate events this is the object type that object1 is being associated " "with." msgstr "作成、更新、および削除イベントの場合は設定されません。関連付けおよび関連付け解除イベントの場合、これは object1 が関連付けられるオブジェクトタイプになります。" -#: awx/api/serializers.py:4832 +#: awx/api/serializers.py:4840 msgid "The action taken with respect to the given object(s)." msgstr "指定されたオブジェクトについて実行されたアクション。" @@ -1638,7 +1638,7 @@ msgstr "設定例" msgid "Example setting which can be different for each user." msgstr "ユーザーごとに異なる設定例" -#: awx/conf/conf.py:92 awx/conf/registry.py:81 awx/conf/views.py:55 +#: awx/conf/conf.py:92 awx/conf/registry.py:81 awx/conf/views.py:56 msgid "User" msgstr "ユーザー" @@ -1741,15 +1741,15 @@ msgstr "システム" msgid "OtherSystem" msgstr "他のシステム" -#: awx/conf/views.py:47 +#: awx/conf/views.py:48 msgid "Setting Categories" msgstr "設定カテゴリー" -#: awx/conf/views.py:69 +#: awx/conf/views.py:70 msgid "Setting Detail" msgstr "設定の詳細" -#: awx/conf/views.py:160 +#: awx/conf/views.py:162 msgid "Logging Connectivity Test" msgstr "ロギング接続テスト" @@ -2794,7 +2794,7 @@ msgstr "Conjur URL" msgid "API Key" msgstr "API キー" -#: awx/main/credential_plugins/conjur.py:28 awx/main/models/inventory.py:1017 +#: awx/main/credential_plugins/conjur.py:28 awx/main/models/inventory.py:1018 msgid "Account" msgstr "アカウント" @@ -2881,7 +2881,7 @@ msgid "" msgstr "KV シークレットバックエンド名 (空白の場合は、シークレットパスの最初のセグメントが使用されます)。" #: awx/main/credential_plugins/hashivault.py:60 -#: awx/main/models/inventory.py:1022 +#: awx/main/models/inventory.py:1023 msgid "Key Name" msgstr "キー名" @@ -3258,7 +3258,7 @@ msgid "" "Management (IAM) users." msgstr "セキュリティートークンサービス (STS) は、AWS Identity and Access Management (IAM) ユーザーの一時的な、権限の制限された認証情報を要求できる web サービスです。" -#: awx/main/models/credential/__init__.py:780 awx/main/models/inventory.py:832 +#: awx/main/models/credential/__init__.py:780 awx/main/models/inventory.py:833 msgid "OpenStack" msgstr "OpenStack" @@ -3298,7 +3298,7 @@ msgstr "OpenStack ドメインは管理上の境界を定義します。これ msgid "Verify SSL" msgstr "SSL の検証" -#: awx/main/models/credential/__init__.py:823 awx/main/models/inventory.py:829 +#: awx/main/models/credential/__init__.py:823 awx/main/models/inventory.py:830 msgid "VMware vCenter" msgstr "VMware vCenter" @@ -3311,7 +3311,7 @@ msgid "" "Enter the hostname or IP address that corresponds to your VMware vCenter." msgstr "VMware vCenter に対応するホスト名または IP アドレスを入力します。" -#: awx/main/models/credential/__init__.py:849 awx/main/models/inventory.py:830 +#: awx/main/models/credential/__init__.py:849 awx/main/models/inventory.py:831 msgid "Red Hat Satellite 6" msgstr "Red Hat Satellite 6" @@ -3325,7 +3325,7 @@ msgid "" "example, https://satellite.example.org" msgstr "Red Hat Satellite 6 Server に対応する URL を入力します (例: https://satellite.example.org)。" -#: awx/main/models/credential/__init__.py:875 awx/main/models/inventory.py:831 +#: awx/main/models/credential/__init__.py:875 awx/main/models/inventory.py:832 msgid "Red Hat CloudForms" msgstr "Red Hat CloudForms" @@ -3339,7 +3339,7 @@ msgid "" "instance. For example, https://cloudforms.example.org" msgstr "CloudForms インスタンスに対応する仮想マシンの URL を入力します (例: https://cloudforms.example.org)。" -#: awx/main/models/credential/__init__.py:902 awx/main/models/inventory.py:827 +#: awx/main/models/credential/__init__.py:902 awx/main/models/inventory.py:828 msgid "Google Compute Engine" msgstr "Google Compute Engine" @@ -3368,7 +3368,7 @@ msgid "" "Paste the contents of the PEM file associated with the service account email." msgstr "サービスアカウントメールに関連付けられた PEM ファイルの内容を貼り付けます。" -#: awx/main/models/credential/__init__.py:936 awx/main/models/inventory.py:828 +#: awx/main/models/credential/__init__.py:936 awx/main/models/inventory.py:829 msgid "Microsoft Azure Resource Manager" msgstr "Microsoft Azure Resource Manager" @@ -3406,7 +3406,7 @@ msgstr "GitLab パーソナルアクセストークン" msgid "This token needs to come from your profile settings in GitLab" msgstr "このトークンは GitLab のプロファイル設定から取得する必要があります。" -#: awx/main/models/credential/__init__.py:1041 awx/main/models/inventory.py:833 +#: awx/main/models/credential/__init__.py:1041 awx/main/models/inventory.py:834 msgid "Red Hat Virtualization" msgstr "Red Hat Virtualization" @@ -3422,7 +3422,7 @@ msgstr "CA ファイル" msgid "Absolute file path to the CA file to use (optional)" msgstr "使用する CA ファイルへの絶対ファイルパス (オプション)" -#: awx/main/models/credential/__init__.py:1091 awx/main/models/inventory.py:834 +#: awx/main/models/credential/__init__.py:1091 awx/main/models/inventory.py:835 msgid "Ansible Tower" msgstr "Ansible Tower" @@ -3466,7 +3466,7 @@ msgstr "ソースは、外部の認証情報でなければなりません。" msgid "Input field must be defined on target credential (options are {})." msgstr "入力フィールドは、ターゲットの認証情報 (オプションは {}) で定義する必要があります。" -#: awx/main/models/events.py:152 awx/main/models/events.py:655 +#: awx/main/models/events.py:152 awx/main/models/events.py:674 msgid "Host Failed" msgstr "ホストの失敗" @@ -3474,7 +3474,7 @@ msgstr "ホストの失敗" msgid "Host Started" msgstr "ホストの開始" -#: awx/main/models/events.py:154 awx/main/models/events.py:656 +#: awx/main/models/events.py:154 awx/main/models/events.py:675 msgid "Host OK" msgstr "ホスト OK" @@ -3482,11 +3482,11 @@ msgstr "ホスト OK" msgid "Host Failure" msgstr "ホストの失敗" -#: awx/main/models/events.py:156 awx/main/models/events.py:662 +#: awx/main/models/events.py:156 awx/main/models/events.py:681 msgid "Host Skipped" msgstr "ホストがスキップされました" -#: awx/main/models/events.py:157 awx/main/models/events.py:657 +#: awx/main/models/events.py:157 awx/main/models/events.py:676 msgid "Host Unreachable" msgstr "ホストに到達できません" @@ -3570,27 +3570,27 @@ msgstr "プレイの開始" msgid "Playbook Complete" msgstr "Playbook の完了" -#: awx/main/models/events.py:184 awx/main/models/events.py:672 +#: awx/main/models/events.py:184 awx/main/models/events.py:691 msgid "Debug" msgstr "デバッグ" -#: awx/main/models/events.py:185 awx/main/models/events.py:673 +#: awx/main/models/events.py:185 awx/main/models/events.py:692 msgid "Verbose" msgstr "詳細" -#: awx/main/models/events.py:186 awx/main/models/events.py:674 +#: awx/main/models/events.py:186 awx/main/models/events.py:693 msgid "Deprecated" msgstr "非推奨" -#: awx/main/models/events.py:187 awx/main/models/events.py:675 +#: awx/main/models/events.py:187 awx/main/models/events.py:694 msgid "Warning" msgstr "警告" -#: awx/main/models/events.py:188 awx/main/models/events.py:676 +#: awx/main/models/events.py:188 awx/main/models/events.py:695 msgid "System Warning" msgstr "システム警告" -#: awx/main/models/events.py:189 awx/main/models/events.py:677 +#: awx/main/models/events.py:189 awx/main/models/events.py:696 #: awx/main/models/unified_jobs.py:75 msgid "Error" msgstr "エラー" @@ -3618,300 +3618,300 @@ msgid "" "this group" msgstr "このグループに常に自動的に割り当てられる完全一致のインスタンスの一覧" -#: awx/main/models/inventory.py:79 +#: awx/main/models/inventory.py:80 msgid "Hosts have a direct link to this inventory." msgstr "ホストにはこのインベントリーへの直接のリンクがあります。" -#: awx/main/models/inventory.py:80 +#: awx/main/models/inventory.py:81 msgid "Hosts for inventory generated using the host_filter property." msgstr "host_filter プロパティーを使用して生成されたインベントリーのホスト。" -#: awx/main/models/inventory.py:85 +#: awx/main/models/inventory.py:86 msgid "inventories" msgstr "インベントリー" -#: awx/main/models/inventory.py:92 +#: awx/main/models/inventory.py:93 msgid "Organization containing this inventory." msgstr "このインベントリーを含む組織。" -#: awx/main/models/inventory.py:99 +#: awx/main/models/inventory.py:100 msgid "Inventory variables in JSON or YAML format." msgstr "JSON または YAML 形式のインベントリー変数。" -#: awx/main/models/inventory.py:104 +#: awx/main/models/inventory.py:105 msgid "" "This field is deprecated and will be removed in a future release. Flag " "indicating whether any hosts in this inventory have failed." msgstr "このフィールドは非推奨で、今後のリリースで削除予定です。このインベントリーのホストが失敗したかどうかを示すフラグ。" -#: awx/main/models/inventory.py:110 +#: awx/main/models/inventory.py:111 msgid "" "This field is deprecated and will be removed in a future release. Total " "number of hosts in this inventory." msgstr "このフィールドは非推奨で、今後のリリースで削除予定です。このインベントリーでの合計ホスト数。" -#: awx/main/models/inventory.py:116 +#: awx/main/models/inventory.py:117 msgid "" "This field is deprecated and will be removed in a future release. Number of " "hosts in this inventory with active failures." msgstr "このフィールドは非推奨で、今後のリリースで削除予定です。このインベントリーで障害が発生中のホスト数。" -#: awx/main/models/inventory.py:122 +#: awx/main/models/inventory.py:123 msgid "" "This field is deprecated and will be removed in a future release. Total " "number of groups in this inventory." msgstr "このフィールドは非推奨で、今後のリリースで削除予定です。このインベントリーでの合計グループ数。" -#: awx/main/models/inventory.py:128 +#: awx/main/models/inventory.py:129 msgid "" "This field is deprecated and will be removed in a future release. Flag " "indicating whether this inventory has any external inventory sources." msgstr "このフィールドは非推奨で、今後のリリースで削除予定です。このインベントリーに外部のインベントリーソースがあるかどうかを示すフラグ。" -#: awx/main/models/inventory.py:134 +#: awx/main/models/inventory.py:135 msgid "" "Total number of external inventory sources configured within this inventory." msgstr "このインベントリー内で設定される外部インベントリーソースの合計数。" -#: awx/main/models/inventory.py:139 +#: awx/main/models/inventory.py:140 msgid "Number of external inventory sources in this inventory with failures." msgstr "エラーのあるこのインベントリー内の外部インベントリーソースの数。" -#: awx/main/models/inventory.py:146 +#: awx/main/models/inventory.py:147 msgid "Kind of inventory being represented." msgstr "表示されているインベントリーの種類。" -#: awx/main/models/inventory.py:152 +#: awx/main/models/inventory.py:153 msgid "Filter that will be applied to the hosts of this inventory." msgstr "このインべントリーのホストに適用されるフィルター。" -#: awx/main/models/inventory.py:180 +#: awx/main/models/inventory.py:181 msgid "" "Credentials to be used by hosts belonging to this inventory when accessing " "Red Hat Insights API." msgstr "Red Hat Insights API へのアクセス時にこのインベントリーに属するホストによって使用される認証情報。" -#: awx/main/models/inventory.py:189 +#: awx/main/models/inventory.py:190 msgid "Flag indicating the inventory is being deleted." msgstr "このインベントリーが削除されていることを示すフラグ。" -#: awx/main/models/inventory.py:244 +#: awx/main/models/inventory.py:245 msgid "Could not parse subset as slice specification." msgstr "サブセットをスライスの詳細として解析できませんでした。" -#: awx/main/models/inventory.py:248 +#: awx/main/models/inventory.py:249 msgid "Slice number must be less than total number of slices." msgstr "スライス番号はスライスの合計数より小さくなければなりません。" -#: awx/main/models/inventory.py:250 +#: awx/main/models/inventory.py:251 msgid "Slice number must be 1 or higher." msgstr "スライス番号は 1 以上でなければなりません。" -#: awx/main/models/inventory.py:387 +#: awx/main/models/inventory.py:388 msgid "Assignment not allowed for Smart Inventory" msgstr "割り当てはスマートインベントリーでは許可されません" -#: awx/main/models/inventory.py:389 awx/main/models/projects.py:166 +#: awx/main/models/inventory.py:390 awx/main/models/projects.py:166 msgid "Credential kind must be 'insights'." msgstr "認証情報の種類は「insights」である必要があります。" -#: awx/main/models/inventory.py:474 +#: awx/main/models/inventory.py:475 msgid "Is this host online and available for running jobs?" msgstr "このホストはオンラインで、ジョブを実行するために利用できますか?" -#: awx/main/models/inventory.py:480 +#: awx/main/models/inventory.py:481 msgid "" "The value used by the remote inventory source to uniquely identify the host" msgstr "ホストを一意に識別するためにリモートインベントリーソースで使用される値" -#: awx/main/models/inventory.py:485 +#: awx/main/models/inventory.py:486 msgid "Host variables in JSON or YAML format." msgstr "JSON または YAML 形式のホスト変数。" -#: awx/main/models/inventory.py:508 +#: awx/main/models/inventory.py:509 msgid "Inventory source(s) that created or modified this host." msgstr "このホストを作成または変更したインベントリーソース。" -#: awx/main/models/inventory.py:513 +#: awx/main/models/inventory.py:514 msgid "Arbitrary JSON structure of most recent ansible_facts, per-host." msgstr "ホスト別の最新 ansible_facts の任意の JSON 構造。" -#: awx/main/models/inventory.py:519 +#: awx/main/models/inventory.py:520 msgid "The date and time ansible_facts was last modified." msgstr "ansible_facts の最終変更日時。" -#: awx/main/models/inventory.py:526 +#: awx/main/models/inventory.py:527 msgid "Red Hat Insights host unique identifier." msgstr "Red Hat Insights ホスト固有 ID。" -#: awx/main/models/inventory.py:640 +#: awx/main/models/inventory.py:641 msgid "Group variables in JSON or YAML format." msgstr "JSON または YAML 形式のグループ変数。" -#: awx/main/models/inventory.py:646 +#: awx/main/models/inventory.py:647 msgid "Hosts associated directly with this group." msgstr "このグループに直接関連付けられたホスト。" -#: awx/main/models/inventory.py:652 +#: awx/main/models/inventory.py:653 msgid "Inventory source(s) that created or modified this group." msgstr "このグループを作成または変更したインベントリーソース。" -#: awx/main/models/inventory.py:824 +#: awx/main/models/inventory.py:825 msgid "File, Directory or Script" msgstr "ファイル、ディレクトリーまたはスクリプト" -#: awx/main/models/inventory.py:825 +#: awx/main/models/inventory.py:826 msgid "Sourced from a Project" msgstr "ソース: プロジェクト" -#: awx/main/models/inventory.py:826 +#: awx/main/models/inventory.py:827 msgid "Amazon EC2" msgstr "Amazon EC2" -#: awx/main/models/inventory.py:835 +#: awx/main/models/inventory.py:836 msgid "Custom Script" msgstr "カスタムスクリプト" -#: awx/main/models/inventory.py:952 +#: awx/main/models/inventory.py:953 msgid "Inventory source variables in YAML or JSON format." msgstr "YAML または JSON 形式のインベントリーソース変数。" -#: awx/main/models/inventory.py:963 +#: awx/main/models/inventory.py:964 msgid "" "Comma-separated list of filter expressions (EC2 only). Hosts are imported " "when ANY of the filters match." msgstr "カンマ区切りのフィルター式の一覧 (EC2 のみ) です。ホストは、フィルターのいずれかが一致する場合にインポートされます。" -#: awx/main/models/inventory.py:969 +#: awx/main/models/inventory.py:970 msgid "Limit groups automatically created from inventory source (EC2 only)." msgstr "インベントリーソースから自動的に作成されるグループを制限します (EC2 のみ)。" -#: awx/main/models/inventory.py:973 +#: awx/main/models/inventory.py:974 msgid "Overwrite local groups and hosts from remote inventory source." msgstr "リモートインベントリーソースからのローカルグループおよびホストを上書きします。" -#: awx/main/models/inventory.py:977 +#: awx/main/models/inventory.py:978 msgid "Overwrite local variables from remote inventory source." msgstr "リモートインベントリーソースからのローカル変数を上書きします。" -#: awx/main/models/inventory.py:982 awx/main/models/jobs.py:154 +#: awx/main/models/inventory.py:983 awx/main/models/jobs.py:154 #: awx/main/models/projects.py:135 msgid "The amount of time (in seconds) to run before the task is canceled." msgstr "タスクが取り消される前の実行時間 (秒数)。" -#: awx/main/models/inventory.py:1015 +#: awx/main/models/inventory.py:1016 msgid "Image ID" msgstr "イメージ ID" -#: awx/main/models/inventory.py:1016 +#: awx/main/models/inventory.py:1017 msgid "Availability Zone" msgstr "アベイラビリティーゾーン" -#: awx/main/models/inventory.py:1018 +#: awx/main/models/inventory.py:1019 msgid "Instance ID" msgstr "インスタンス ID" -#: awx/main/models/inventory.py:1019 +#: awx/main/models/inventory.py:1020 msgid "Instance State" msgstr "インスタンスの状態" -#: awx/main/models/inventory.py:1020 +#: awx/main/models/inventory.py:1021 msgid "Platform" msgstr "プラットフォーム" -#: awx/main/models/inventory.py:1021 +#: awx/main/models/inventory.py:1022 msgid "Instance Type" msgstr "インスタンスタイプ" -#: awx/main/models/inventory.py:1023 +#: awx/main/models/inventory.py:1024 msgid "Region" msgstr "リージョン" -#: awx/main/models/inventory.py:1024 +#: awx/main/models/inventory.py:1025 msgid "Security Group" msgstr "セキュリティーグループ" -#: awx/main/models/inventory.py:1025 +#: awx/main/models/inventory.py:1026 msgid "Tags" msgstr "タグ" -#: awx/main/models/inventory.py:1026 +#: awx/main/models/inventory.py:1027 msgid "Tag None" msgstr "タグ None" -#: awx/main/models/inventory.py:1027 +#: awx/main/models/inventory.py:1028 msgid "VPC ID" msgstr "VPC ID" -#: awx/main/models/inventory.py:1095 +#: awx/main/models/inventory.py:1096 #, python-format msgid "" "Cloud-based inventory sources (such as %s) require credentials for the " "matching cloud service." msgstr "クラウドベースのインベントリーソース (%s など) には一致するクラウドサービスの認証情報が必要です。" -#: awx/main/models/inventory.py:1101 +#: awx/main/models/inventory.py:1102 msgid "Credential is required for a cloud source." msgstr "認証情報がクラウドソースに必要です。" -#: awx/main/models/inventory.py:1104 +#: awx/main/models/inventory.py:1105 msgid "" "Credentials of type machine, source control, insights and vault are " "disallowed for custom inventory sources." msgstr "タイプがマシン、ソースコントロール、Insights および Vault の認証情報はカスタムインベントリーソースには許可されません。" -#: awx/main/models/inventory.py:1109 +#: awx/main/models/inventory.py:1110 msgid "" "Credentials of type insights and vault are disallowed for scm inventory " "sources." msgstr "タイプが Insights および Vault の認証情報は SCM のインベントリーソースには許可されません。" -#: awx/main/models/inventory.py:1169 +#: awx/main/models/inventory.py:1170 #, python-format msgid "Invalid %(source)s region: %(region)s" msgstr "無効な %(source)s リージョン: %(region)s" -#: awx/main/models/inventory.py:1193 +#: awx/main/models/inventory.py:1194 #, python-format msgid "Invalid filter expression: %(filter)s" msgstr "無効なフィルター式: %(filter)s" -#: awx/main/models/inventory.py:1214 +#: awx/main/models/inventory.py:1215 #, python-format msgid "Invalid group by choice: %(choice)s" msgstr "無効なグループ (選択による): %(choice)s" -#: awx/main/models/inventory.py:1242 +#: awx/main/models/inventory.py:1243 msgid "Project containing inventory file used as source." msgstr "ソースとして使用されるインベントリーファイルが含まれるプロジェクト。" -#: awx/main/models/inventory.py:1415 +#: awx/main/models/inventory.py:1416 msgid "" "More than one SCM-based inventory source with update on project update per-" "inventory not allowed." msgstr "複数の SCM ベースのインベントリーソースについて、インベントリー別のプロジェクト更新時の更新は許可されません。" -#: awx/main/models/inventory.py:1422 +#: awx/main/models/inventory.py:1423 msgid "" "Cannot update SCM-based inventory source on launch if set to update on " "project update. Instead, configure the corresponding source project to " "update on launch." msgstr "プロジェクト更新時の更新に設定している場合、SCM ベースのインベントリーソースを更新できません。その代わりに起動時に更新するように対応するソースプロジェクトを設定します。" -#: awx/main/models/inventory.py:1428 +#: awx/main/models/inventory.py:1429 msgid "Cannot set source_path if not SCM type." msgstr "SCM タイプでない場合 source_path を設定できません。" -#: awx/main/models/inventory.py:1471 +#: awx/main/models/inventory.py:1472 msgid "" "Inventory files from this Project Update were used for the inventory update." msgstr "このプロジェクト更新のインベントリーファイルがインベントリー更新に使用されました。" -#: awx/main/models/inventory.py:1582 +#: awx/main/models/inventory.py:1583 msgid "Inventory script contents" msgstr "インベントリースクリプトの内容" -#: awx/main/models/inventory.py:1587 +#: awx/main/models/inventory.py:1588 msgid "Organization owning this inventory script" msgstr "このインベントリースクリプトを所有する組織" @@ -4010,28 +4010,28 @@ msgstr "インベントリーがプロンプトとして適用されると、ジ msgid "job host summaries" msgstr "ジョブホストの概要" -#: awx/main/models/jobs.py:1158 +#: awx/main/models/jobs.py:1144 msgid "Remove jobs older than a certain number of days" msgstr "特定の日数より前のジョブを削除" -#: awx/main/models/jobs.py:1159 +#: awx/main/models/jobs.py:1145 msgid "Remove activity stream entries older than a certain number of days" msgstr "特定の日数より前のアクティビティーストリームのエントリーを削除" -#: awx/main/models/jobs.py:1160 +#: awx/main/models/jobs.py:1146 msgid "Removes expired browser sessions from the database" msgstr "期限切れブラウザーセッションをデータベースから削除" -#: awx/main/models/jobs.py:1161 +#: awx/main/models/jobs.py:1147 msgid "Removes expired OAuth 2 access tokens and refresh tokens" msgstr "期限切れの OAuth 2 アクセストークンを削除し、トークンを更新" -#: awx/main/models/jobs.py:1231 +#: awx/main/models/jobs.py:1217 #, python-brace-format msgid "Variables {list_of_keys} are not allowed for system jobs." msgstr "システムジョブでは変数 {list_of_keys} を使用できません。" -#: awx/main/models/jobs.py:1247 +#: awx/main/models/jobs.py:1233 msgid "days must be a positive integer." msgstr "日数は正の整数である必要があります。" @@ -4775,7 +4775,7 @@ msgstr "エラーの処理パスが見つかりません。ワークフローを msgid "The approval node {name} ({pk}) has expired after {timeout} seconds." msgstr "承認ノード {name} ({pk}) は {timeout} 秒後に失効しました。" -#: awx/main/tasks.py:1053 +#: awx/main/tasks.py:1049 msgid "Invalid virtual environment selected: {}" msgstr "無効な仮想環境が選択されました: {}" @@ -4812,53 +4812,53 @@ msgstr "ワークフロージョブのノードにエラーハンドルパスが msgid "Unable to convert \"%s\" to boolean" msgstr "\"%s\" をブール値に変換できません" -#: awx/main/utils/common.py:275 +#: awx/main/utils/common.py:261 #, python-format msgid "Unsupported SCM type \"%s\"" msgstr "サポートされない SCM タイプ \"%s\"" -#: awx/main/utils/common.py:282 awx/main/utils/common.py:294 -#: awx/main/utils/common.py:313 +#: awx/main/utils/common.py:268 awx/main/utils/common.py:280 +#: awx/main/utils/common.py:299 #, python-format msgid "Invalid %s URL" msgstr "無効な %s URL" -#: awx/main/utils/common.py:284 awx/main/utils/common.py:323 +#: awx/main/utils/common.py:270 awx/main/utils/common.py:309 #, python-format msgid "Unsupported %s URL" msgstr "サポートされない %s URL" -#: awx/main/utils/common.py:325 +#: awx/main/utils/common.py:311 #, python-format msgid "Unsupported host \"%s\" for file:// URL" msgstr "file:// URL でサポートされないホスト \"%s\"" -#: awx/main/utils/common.py:327 +#: awx/main/utils/common.py:313 #, python-format msgid "Host is required for %s URL" msgstr "%s URL にはホストが必要です" -#: awx/main/utils/common.py:345 +#: awx/main/utils/common.py:331 #, python-format msgid "Username must be \"git\" for SSH access to %s." msgstr "%s への SSH アクセスではユーザー名を \"git\" にする必要があります。" -#: awx/main/utils/common.py:351 +#: awx/main/utils/common.py:337 #, python-format msgid "Username must be \"hg\" for SSH access to %s." msgstr "%s への SSH アクセスではユーザー名を \"hg\" にする必要があります。" -#: awx/main/utils/common.py:682 +#: awx/main/utils/common.py:668 #, python-brace-format msgid "Input type `{data_type}` is not a dictionary" msgstr "入力タイプ `{data_type}` は辞書ではありません" -#: awx/main/utils/common.py:715 +#: awx/main/utils/common.py:701 #, python-brace-format msgid "Variables not compatible with JSON standard (error: {json_error})" msgstr "変数には JSON 標準との互換性がありません (エラー: {json_error})" -#: awx/main/utils/common.py:721 +#: awx/main/utils/common.py:707 #, python-brace-format msgid "" "Cannot parse as JSON (error: {json_error}) or YAML (error: {yaml_error})." diff --git a/awx/locale/zh/LC_MESSAGES/django.po b/awx/locale/zh/LC_MESSAGES/django.po index b615e5d02b..7827966e11 100644 --- a/awx/locale/zh/LC_MESSAGES/django.po +++ b/awx/locale/zh/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2020-04-27 13:55+0000\n" +"POT-Creation-Date: 2020-05-28 21:45+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -527,7 +527,7 @@ msgstr "作业模板清单缺失或未定义。" msgid "Unknown, job may have been ran before launch configurations were saved." msgstr "未知,在保存启动配置前作业可能已经运行。" -#: awx/api/serializers.py:3252 awx/main/tasks.py:2795 awx/main/tasks.py:2813 +#: awx/api/serializers.py:3252 awx/main/tasks.py:2800 awx/main/tasks.py:2818 msgid "{} are prohibited from use in ad hoc commands." msgstr "{} 被禁止在临时命令中使用。" @@ -546,324 +546,324 @@ msgstr "提供的变量 {} 没有要替换的数据库值。" msgid "\"$encrypted$ is a reserved keyword, may not be used for {}.\"" msgstr "\"$encrypted$ 是一个保留的关键字,可能不能用于 {}\"" -#: awx/api/serializers.py:4070 +#: awx/api/serializers.py:4078 msgid "A project is required to run a job." msgstr "运行一个作业时需要一个项目。" -#: awx/api/serializers.py:4072 +#: awx/api/serializers.py:4080 msgid "Missing a revision to run due to failed project update." msgstr "由于项目更新失败,缺少运行的修订版本。" -#: awx/api/serializers.py:4076 +#: awx/api/serializers.py:4084 msgid "The inventory associated with this Job Template is being deleted." msgstr "与此作业模板关联的清单将被删除。" -#: awx/api/serializers.py:4078 awx/api/serializers.py:4194 +#: awx/api/serializers.py:4086 awx/api/serializers.py:4202 msgid "The provided inventory is being deleted." msgstr "提供的清单将被删除。" -#: awx/api/serializers.py:4086 +#: awx/api/serializers.py:4094 msgid "Cannot assign multiple {} credentials." msgstr "无法分配多个 {} 凭证。" -#: awx/api/serializers.py:4090 +#: awx/api/serializers.py:4098 msgid "Cannot assign a Credential of kind `{}`" msgstr "无法分配类型为 `{}` 的凭证" -#: awx/api/serializers.py:4103 +#: awx/api/serializers.py:4111 msgid "" "Removing {} credential at launch time without replacement is not supported. " "Provided list lacked credential(s): {}." msgstr "不支持在不替换的情况下在启动时删除 {} 凭证。提供的列表缺少凭证:{}。" -#: awx/api/serializers.py:4192 +#: awx/api/serializers.py:4200 msgid "The inventory associated with this Workflow is being deleted." msgstr "与此 Workflow 关联的清单将被删除。" -#: awx/api/serializers.py:4263 +#: awx/api/serializers.py:4271 msgid "Message type '{}' invalid, must be either 'message' or 'body'" msgstr "消息类型 '{}' 无效,必须是 'message' 或 'body'" -#: awx/api/serializers.py:4269 +#: awx/api/serializers.py:4277 msgid "Expected string for '{}', found {}, " msgstr "'{}' 的预期字符串,找到 {}," -#: awx/api/serializers.py:4273 +#: awx/api/serializers.py:4281 msgid "Messages cannot contain newlines (found newline in {} event)" msgstr "消息不能包含新行(在 {} 事件中找到新行)" -#: awx/api/serializers.py:4279 +#: awx/api/serializers.py:4287 msgid "Expected dict for 'messages' field, found {}" msgstr "'messages' 字段的预期字典,找到 {}" -#: awx/api/serializers.py:4283 +#: awx/api/serializers.py:4291 msgid "" "Event '{}' invalid, must be one of 'started', 'success', 'error', or " "'workflow_approval'" msgstr "事件 '{}' 无效,必须是 'started'、'success'、'error' 或 'workflow_approval' 之一" -#: awx/api/serializers.py:4289 +#: awx/api/serializers.py:4297 msgid "Expected dict for event '{}', found {}" msgstr "事件 '{}' 的预期字典,找到 {}" -#: awx/api/serializers.py:4294 +#: awx/api/serializers.py:4302 msgid "" "Workflow Approval event '{}' invalid, must be one of 'running', 'approved', " "'timed_out', or 'denied'" msgstr "工作流批准事件 '{}' 无效,必须是 'running'、'approved'、'timed_out' 或 'denied' 之一。" -#: awx/api/serializers.py:4301 +#: awx/api/serializers.py:4309 msgid "Expected dict for workflow approval event '{}', found {}" msgstr "工作流批准事件 '{}' 的预期字典,找到 {}" -#: awx/api/serializers.py:4328 +#: awx/api/serializers.py:4336 msgid "Unable to render message '{}': {}" msgstr "无法呈现消息 '{}':{}" -#: awx/api/serializers.py:4330 +#: awx/api/serializers.py:4338 msgid "Field '{}' unavailable" msgstr "字段 '{}' 不可用" -#: awx/api/serializers.py:4332 +#: awx/api/serializers.py:4340 msgid "Security error due to field '{}'" msgstr "因为字段 '{}' 导致安全错误" -#: awx/api/serializers.py:4352 +#: awx/api/serializers.py:4360 msgid "Webhook body for '{}' should be a json dictionary. Found type '{}'." msgstr "'{}' 的 Webhook 正文应该是 json 字典。找到类型 '{}'。" -#: awx/api/serializers.py:4355 +#: awx/api/serializers.py:4363 msgid "Webhook body for '{}' is not a valid json dictionary ({})." msgstr "'{}' 的 Webhook 正文不是有效的 json 字典 ({})。" -#: awx/api/serializers.py:4373 +#: awx/api/serializers.py:4381 msgid "" "Missing required fields for Notification Configuration: notification_type" msgstr "通知配置缺少所需字段:notification_type" -#: awx/api/serializers.py:4400 +#: awx/api/serializers.py:4408 msgid "No values specified for field '{}'" msgstr "没有为字段 '{}' 指定值" -#: awx/api/serializers.py:4405 +#: awx/api/serializers.py:4413 msgid "HTTP method must be either 'POST' or 'PUT'." msgstr "HTTP 方法必须是 'POST' 或 'PUT'。" -#: awx/api/serializers.py:4407 +#: awx/api/serializers.py:4415 msgid "Missing required fields for Notification Configuration: {}." msgstr "通知配置缺少所需字段:{}。" -#: awx/api/serializers.py:4410 +#: awx/api/serializers.py:4418 msgid "Configuration field '{}' incorrect type, expected {}." msgstr "配置字段 '{}' 类型错误,预期为 {}。" -#: awx/api/serializers.py:4427 +#: awx/api/serializers.py:4435 msgid "Notification body" msgstr "通知正文" -#: awx/api/serializers.py:4507 +#: awx/api/serializers.py:4515 msgid "" "Valid DTSTART required in rrule. Value should start with: DTSTART:" "YYYYMMDDTHHMMSSZ" msgstr "rrule 中需要有效的 DTSTART。值应该以 DTSTART:YYYMMDDTHHMMSSZ 开头" -#: awx/api/serializers.py:4509 +#: awx/api/serializers.py:4517 msgid "" "DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ." msgstr "DTSTART 不能是一个不带时区的日期时间。指定 ;TZINFO= 或 YYYMMDDTHHMMSSZ。" -#: awx/api/serializers.py:4511 +#: awx/api/serializers.py:4519 msgid "Multiple DTSTART is not supported." msgstr "不支持多个 DTSTART。" -#: awx/api/serializers.py:4513 +#: awx/api/serializers.py:4521 msgid "RRULE required in rrule." msgstr "rrule 中需要 RRULE。" -#: awx/api/serializers.py:4515 +#: awx/api/serializers.py:4523 msgid "Multiple RRULE is not supported." msgstr "不支持多个 RRULE。" -#: awx/api/serializers.py:4517 +#: awx/api/serializers.py:4525 msgid "INTERVAL required in rrule." msgstr "rrule 需要 INTERVAL。" -#: awx/api/serializers.py:4519 +#: awx/api/serializers.py:4527 msgid "SECONDLY is not supported." msgstr "不支持 SECONDLY。" -#: awx/api/serializers.py:4521 +#: awx/api/serializers.py:4529 msgid "Multiple BYMONTHDAYs not supported." msgstr "不支持多个 BYMONTHDAY。" -#: awx/api/serializers.py:4523 +#: awx/api/serializers.py:4531 msgid "Multiple BYMONTHs not supported." msgstr "不支持多个 BYMONTH。" -#: awx/api/serializers.py:4525 +#: awx/api/serializers.py:4533 msgid "BYDAY with numeric prefix not supported." msgstr "不支持带有数字前缀的 BYDAY。" -#: awx/api/serializers.py:4527 +#: awx/api/serializers.py:4535 msgid "BYYEARDAY not supported." msgstr "不支持 BYYEARDAY。" -#: awx/api/serializers.py:4529 +#: awx/api/serializers.py:4537 msgid "BYWEEKNO not supported." msgstr "不支持 BYWEEKNO。" -#: awx/api/serializers.py:4531 +#: awx/api/serializers.py:4539 msgid "RRULE may not contain both COUNT and UNTIL" msgstr "RRULE 可能不包含 COUNT 和 UNTIL" -#: awx/api/serializers.py:4535 +#: awx/api/serializers.py:4543 msgid "COUNT > 999 is unsupported." msgstr "不支持 COUNT > 999。" -#: awx/api/serializers.py:4541 +#: awx/api/serializers.py:4549 msgid "rrule parsing failed validation: {}" msgstr "rrule 解析失败验证:{}" -#: awx/api/serializers.py:4603 +#: awx/api/serializers.py:4611 msgid "Inventory Source must be a cloud resource." msgstr "清单源必须是云资源。" -#: awx/api/serializers.py:4605 +#: awx/api/serializers.py:4613 msgid "Manual Project cannot have a schedule set." msgstr "手动项目不能有计划集。" -#: awx/api/serializers.py:4608 +#: awx/api/serializers.py:4616 msgid "" "Inventory sources with `update_on_project_update` cannot be scheduled. " "Schedule its source project `{}` instead." msgstr "无法调度带有 `update_on_project_update` 的清单源。改为调度其源项目 `{}`。" -#: awx/api/serializers.py:4618 +#: awx/api/serializers.py:4626 msgid "" "Count of jobs in the running or waiting state that are targeted for this " "instance" msgstr "处于运行状态或等待状态的针对此实例的作业计数" -#: awx/api/serializers.py:4623 +#: awx/api/serializers.py:4631 msgid "Count of all jobs that target this instance" msgstr "所有针对此实例的作业计数" -#: awx/api/serializers.py:4656 +#: awx/api/serializers.py:4664 msgid "" "Count of jobs in the running or waiting state that are targeted for this " "instance group" msgstr "处于运行状态或等待状态的针对此实例组的作业计数" -#: awx/api/serializers.py:4661 +#: awx/api/serializers.py:4669 msgid "Count of all jobs that target this instance group" msgstr "所有针对此实例组的作业计数" -#: awx/api/serializers.py:4666 +#: awx/api/serializers.py:4674 msgid "Indicates whether instance group controls any other group" msgstr "指明实例组是否控制任何其他组" -#: awx/api/serializers.py:4670 +#: awx/api/serializers.py:4678 msgid "" "Indicates whether instances in this group are isolated.Isolated groups have " "a designated controller group." msgstr "指明此组中的实例是否被隔离。隔离的组具有指定的控制器组。" -#: awx/api/serializers.py:4675 +#: awx/api/serializers.py:4683 msgid "" "Indicates whether instances in this group are containerized.Containerized " "groups have a designated Openshift or Kubernetes cluster." msgstr "指明此组中的实例是否容器化。容器化的组具有指定的 Openshift 或 Kubernetes 集群。" -#: awx/api/serializers.py:4683 +#: awx/api/serializers.py:4691 msgid "Policy Instance Percentage" msgstr "策略实例百分比" -#: awx/api/serializers.py:4684 +#: awx/api/serializers.py:4692 msgid "" "Minimum percentage of all instances that will be automatically assigned to " "this group when new instances come online." msgstr "新实例上线时将自动分配给此组的所有实例的最小百分比。" -#: awx/api/serializers.py:4689 +#: awx/api/serializers.py:4697 msgid "Policy Instance Minimum" msgstr "策略实例最小值" -#: awx/api/serializers.py:4690 +#: awx/api/serializers.py:4698 msgid "" "Static minimum number of Instances that will be automatically assign to this " "group when new instances come online." msgstr "新实例上线时自动分配给此组的静态最小实例数量。" -#: awx/api/serializers.py:4695 +#: awx/api/serializers.py:4703 msgid "Policy Instance List" msgstr "策略实例列表" -#: awx/api/serializers.py:4696 +#: awx/api/serializers.py:4704 msgid "List of exact-match Instances that will be assigned to this group" msgstr "将分配给此组的完全匹配实例的列表" -#: awx/api/serializers.py:4722 +#: awx/api/serializers.py:4730 msgid "Duplicate entry {}." msgstr "重复条目 {}。" -#: awx/api/serializers.py:4724 +#: awx/api/serializers.py:4732 msgid "{} is not a valid hostname of an existing instance." msgstr "{} 不是现有实例的有效主机名。" -#: awx/api/serializers.py:4726 awx/api/views/mixin.py:98 +#: awx/api/serializers.py:4734 awx/api/views/mixin.py:98 msgid "" "Isolated instances may not be added or removed from instances groups via the " "API." msgstr "可能无法通过 API 为实例组添加或删除隔离的实例。" -#: awx/api/serializers.py:4728 awx/api/views/mixin.py:102 +#: awx/api/serializers.py:4736 awx/api/views/mixin.py:102 msgid "Isolated instance group membership may not be managed via the API." msgstr "可能无法通过 API 管理隔离的实例组成员资格。" -#: awx/api/serializers.py:4730 awx/api/serializers.py:4735 -#: awx/api/serializers.py:4740 +#: awx/api/serializers.py:4738 awx/api/serializers.py:4743 +#: awx/api/serializers.py:4748 msgid "Containerized instances may not be managed via the API" msgstr "可能无法通过 API 管理容器化实例" -#: awx/api/serializers.py:4745 +#: awx/api/serializers.py:4753 msgid "tower instance group name may not be changed." msgstr "可能不会更改 tower 实例组名称。" -#: awx/api/serializers.py:4750 +#: awx/api/serializers.py:4758 msgid "Only Kubernetes credentials can be associated with an Instance Group" msgstr "只有 Kubernetes 凭证可以与实例组关联" -#: awx/api/serializers.py:4789 +#: awx/api/serializers.py:4797 msgid "" "When present, shows the field name of the role or relationship that changed." msgstr "存在时,显示更改的角色或关系的字段名称。" -#: awx/api/serializers.py:4791 +#: awx/api/serializers.py:4799 msgid "" "When present, shows the model on which the role or relationship was defined." msgstr "存在时,显示定义角色或关系的模型。" -#: awx/api/serializers.py:4824 +#: awx/api/serializers.py:4832 msgid "" "A summary of the new and changed values when an object is created, updated, " "or deleted" msgstr "创建、更新或删除对象时新值和更改值的概述" -#: awx/api/serializers.py:4826 +#: awx/api/serializers.py:4834 msgid "" "For create, update, and delete events this is the object type that was " "affected. For associate and disassociate events this is the object type " "associated or disassociated with object2." msgstr "对于创建、更新和删除事件,这是受影响的对象类型。对于关联和解除关联事件,这是与对象 2 关联或解除关联的对象类型。" -#: awx/api/serializers.py:4829 +#: awx/api/serializers.py:4837 msgid "" "Unpopulated for create, update, and delete events. For associate and " "disassociate events this is the object type that object1 is being associated " "with." msgstr "创建、更新和删除事件未填充。对于关联和解除关联事件,这是对象 1 要关联的对象类型。" -#: awx/api/serializers.py:4832 +#: awx/api/serializers.py:4840 msgid "The action taken with respect to the given object(s)." msgstr "对给定对象执行的操作。" @@ -1637,7 +1637,7 @@ msgstr "设置示例" msgid "Example setting which can be different for each user." msgstr "每个用户之间可以各不相同的设置示例。" -#: awx/conf/conf.py:92 awx/conf/registry.py:81 awx/conf/views.py:55 +#: awx/conf/conf.py:92 awx/conf/registry.py:81 awx/conf/views.py:56 msgid "User" msgstr "用户" @@ -1740,15 +1740,15 @@ msgstr "系统" msgid "OtherSystem" msgstr "OtherSystem" -#: awx/conf/views.py:47 +#: awx/conf/views.py:48 msgid "Setting Categories" msgstr "设置类别" -#: awx/conf/views.py:69 +#: awx/conf/views.py:70 msgid "Setting Detail" msgstr "设置详情" -#: awx/conf/views.py:160 +#: awx/conf/views.py:162 msgid "Logging Connectivity Test" msgstr "日志记录连接测试" @@ -2793,7 +2793,7 @@ msgstr "Conjur URL" msgid "API Key" msgstr "API 密钥" -#: awx/main/credential_plugins/conjur.py:28 awx/main/models/inventory.py:1017 +#: awx/main/credential_plugins/conjur.py:28 awx/main/models/inventory.py:1018 msgid "Account" msgstr "帐户" @@ -2880,7 +2880,7 @@ msgid "" msgstr "kv 机密后端的名称(如果留空,将使用机密路径的第一个分段)。" #: awx/main/credential_plugins/hashivault.py:60 -#: awx/main/models/inventory.py:1022 +#: awx/main/models/inventory.py:1023 msgid "Key Name" msgstr "密钥名称" @@ -3257,7 +3257,7 @@ msgid "" "Management (IAM) users." msgstr "安全令牌服务 (STS) 是一个 Web 服务,让您可以为 AWS 身份和访问管理 (IAM) 用户请求临时的有限权限凭证。" -#: awx/main/models/credential/__init__.py:780 awx/main/models/inventory.py:832 +#: awx/main/models/credential/__init__.py:780 awx/main/models/inventory.py:833 msgid "OpenStack" msgstr "OpenStack" @@ -3297,7 +3297,7 @@ msgstr "OpenStack 域定义了管理边界。只有 Keystone v3 身份验证 URL msgid "Verify SSL" msgstr "验证 SSL" -#: awx/main/models/credential/__init__.py:823 awx/main/models/inventory.py:829 +#: awx/main/models/credential/__init__.py:823 awx/main/models/inventory.py:830 msgid "VMware vCenter" msgstr "VMware vCenter" @@ -3310,7 +3310,7 @@ msgid "" "Enter the hostname or IP address that corresponds to your VMware vCenter." msgstr "输入与 VMware vCenter 对应的主机名或 IP 地址。" -#: awx/main/models/credential/__init__.py:849 awx/main/models/inventory.py:830 +#: awx/main/models/credential/__init__.py:849 awx/main/models/inventory.py:831 msgid "Red Hat Satellite 6" msgstr "红帽卫星 6" @@ -3324,7 +3324,7 @@ msgid "" "example, https://satellite.example.org" msgstr "输入与您的红帽卫星 6 服务器对应的 URL。例如:https://satellite.example.org" -#: awx/main/models/credential/__init__.py:875 awx/main/models/inventory.py:831 +#: awx/main/models/credential/__init__.py:875 awx/main/models/inventory.py:832 msgid "Red Hat CloudForms" msgstr "Red Hat CloudForms" @@ -3338,7 +3338,7 @@ msgid "" "instance. For example, https://cloudforms.example.org" msgstr "输入与您的 CloudForms 实例对应的虚拟机的 URL。例如:https://cloudforms.example.org" -#: awx/main/models/credential/__init__.py:902 awx/main/models/inventory.py:827 +#: awx/main/models/credential/__init__.py:902 awx/main/models/inventory.py:828 msgid "Google Compute Engine" msgstr "Google Compute Engine" @@ -3367,7 +3367,7 @@ msgid "" "Paste the contents of the PEM file associated with the service account email." msgstr "粘贴与服务账户电子邮件关联的 PEM 文件的内容。" -#: awx/main/models/credential/__init__.py:936 awx/main/models/inventory.py:828 +#: awx/main/models/credential/__init__.py:936 awx/main/models/inventory.py:829 msgid "Microsoft Azure Resource Manager" msgstr "Microsoft Azure Resource Manager" @@ -3405,7 +3405,7 @@ msgstr "GitLab 个人访问令牌" msgid "This token needs to come from your profile settings in GitLab" msgstr "此令牌需要来自您在 GitLab 中的配置文件设置" -#: awx/main/models/credential/__init__.py:1041 awx/main/models/inventory.py:833 +#: awx/main/models/credential/__init__.py:1041 awx/main/models/inventory.py:834 msgid "Red Hat Virtualization" msgstr "红帽虚拟化" @@ -3421,7 +3421,7 @@ msgstr "CA 文件" msgid "Absolute file path to the CA file to use (optional)" msgstr "要使用的 CA 文件的绝对文件路径(可选)" -#: awx/main/models/credential/__init__.py:1091 awx/main/models/inventory.py:834 +#: awx/main/models/credential/__init__.py:1091 awx/main/models/inventory.py:835 msgid "Ansible Tower" msgstr "Ansible Tower" @@ -3465,7 +3465,7 @@ msgstr "源必须是外部凭证" msgid "Input field must be defined on target credential (options are {})." msgstr "输入字段必须在目标凭证上定义(选项为 {})。" -#: awx/main/models/events.py:152 awx/main/models/events.py:655 +#: awx/main/models/events.py:152 awx/main/models/events.py:674 msgid "Host Failed" msgstr "主机故障" @@ -3473,7 +3473,7 @@ msgstr "主机故障" msgid "Host Started" msgstr "主机已启动" -#: awx/main/models/events.py:154 awx/main/models/events.py:656 +#: awx/main/models/events.py:154 awx/main/models/events.py:675 msgid "Host OK" msgstr "主机正常" @@ -3481,11 +3481,11 @@ msgstr "主机正常" msgid "Host Failure" msgstr "主机故障" -#: awx/main/models/events.py:156 awx/main/models/events.py:662 +#: awx/main/models/events.py:156 awx/main/models/events.py:681 msgid "Host Skipped" msgstr "主机已跳过" -#: awx/main/models/events.py:157 awx/main/models/events.py:657 +#: awx/main/models/events.py:157 awx/main/models/events.py:676 msgid "Host Unreachable" msgstr "主机无法访问" @@ -3569,27 +3569,27 @@ msgstr "Play 已启动" msgid "Playbook Complete" msgstr "Playbook 完成" -#: awx/main/models/events.py:184 awx/main/models/events.py:672 +#: awx/main/models/events.py:184 awx/main/models/events.py:691 msgid "Debug" msgstr "调试" -#: awx/main/models/events.py:185 awx/main/models/events.py:673 +#: awx/main/models/events.py:185 awx/main/models/events.py:692 msgid "Verbose" msgstr "详细" -#: awx/main/models/events.py:186 awx/main/models/events.py:674 +#: awx/main/models/events.py:186 awx/main/models/events.py:693 msgid "Deprecated" msgstr "已弃用" -#: awx/main/models/events.py:187 awx/main/models/events.py:675 +#: awx/main/models/events.py:187 awx/main/models/events.py:694 msgid "Warning" msgstr "警告" -#: awx/main/models/events.py:188 awx/main/models/events.py:676 +#: awx/main/models/events.py:188 awx/main/models/events.py:695 msgid "System Warning" msgstr "系统警告" -#: awx/main/models/events.py:189 awx/main/models/events.py:677 +#: awx/main/models/events.py:189 awx/main/models/events.py:696 #: awx/main/models/unified_jobs.py:75 msgid "Error" msgstr "错误" @@ -3617,300 +3617,300 @@ msgid "" "this group" msgstr "将始终自动分配给此组的完全匹配实例的列表" -#: awx/main/models/inventory.py:79 +#: awx/main/models/inventory.py:80 msgid "Hosts have a direct link to this inventory." msgstr "主机具有指向此清单的直接链接。" -#: awx/main/models/inventory.py:80 +#: awx/main/models/inventory.py:81 msgid "Hosts for inventory generated using the host_filter property." msgstr "使用 host_filter 属性生成的清单的主机。" -#: awx/main/models/inventory.py:85 +#: awx/main/models/inventory.py:86 msgid "inventories" msgstr "清单" -#: awx/main/models/inventory.py:92 +#: awx/main/models/inventory.py:93 msgid "Organization containing this inventory." msgstr "包含此清单的机构。" -#: awx/main/models/inventory.py:99 +#: awx/main/models/inventory.py:100 msgid "Inventory variables in JSON or YAML format." msgstr "JSON 或 YAML 格式的清单变量。" -#: awx/main/models/inventory.py:104 +#: awx/main/models/inventory.py:105 msgid "" "This field is deprecated and will be removed in a future release. Flag " "indicating whether any hosts in this inventory have failed." msgstr "此字段已弃用,并将在以后的发行版本中删除。指示此清单中是否有任何主机故障的标记。" -#: awx/main/models/inventory.py:110 +#: awx/main/models/inventory.py:111 msgid "" "This field is deprecated and will be removed in a future release. Total " "number of hosts in this inventory." msgstr "此字段已弃用,并将在以后的发行版本中删除。此清单中的主机总数。" -#: awx/main/models/inventory.py:116 +#: awx/main/models/inventory.py:117 msgid "" "This field is deprecated and will be removed in a future release. Number of " "hosts in this inventory with active failures." msgstr "此字段已弃用,并将在以后的发行版本中删除。此清单中有活跃故障的主机数量。" -#: awx/main/models/inventory.py:122 +#: awx/main/models/inventory.py:123 msgid "" "This field is deprecated and will be removed in a future release. Total " "number of groups in this inventory." msgstr "此字段已弃用,并将在以后的发行版本中删除。此清单中的总组数。" -#: awx/main/models/inventory.py:128 +#: awx/main/models/inventory.py:129 msgid "" "This field is deprecated and will be removed in a future release. Flag " "indicating whether this inventory has any external inventory sources." msgstr "此字段已弃用,并将在以后的发行版本中删除。表示此清单是否有任何外部清单源的标记。" -#: awx/main/models/inventory.py:134 +#: awx/main/models/inventory.py:135 msgid "" "Total number of external inventory sources configured within this inventory." msgstr "在此清单中配置的外部清单源总数。" -#: awx/main/models/inventory.py:139 +#: awx/main/models/inventory.py:140 msgid "Number of external inventory sources in this inventory with failures." msgstr "此清单中有故障的外部清单源数量。" -#: awx/main/models/inventory.py:146 +#: awx/main/models/inventory.py:147 msgid "Kind of inventory being represented." msgstr "所代表的清单种类。" -#: awx/main/models/inventory.py:152 +#: awx/main/models/inventory.py:153 msgid "Filter that will be applied to the hosts of this inventory." msgstr "将应用到此清单的主机的过滤器。" -#: awx/main/models/inventory.py:180 +#: awx/main/models/inventory.py:181 msgid "" "Credentials to be used by hosts belonging to this inventory when accessing " "Red Hat Insights API." msgstr "访问红帽 Insights API 时供属于此清单的主机使用的凭证。" -#: awx/main/models/inventory.py:189 +#: awx/main/models/inventory.py:190 msgid "Flag indicating the inventory is being deleted." msgstr "指示正在删除清单的标记。" -#: awx/main/models/inventory.py:244 +#: awx/main/models/inventory.py:245 msgid "Could not parse subset as slice specification." msgstr "无法将子集作为分片规格来解析。" -#: awx/main/models/inventory.py:248 +#: awx/main/models/inventory.py:249 msgid "Slice number must be less than total number of slices." msgstr "分片数量必须小于分片总数。" -#: awx/main/models/inventory.py:250 +#: awx/main/models/inventory.py:251 msgid "Slice number must be 1 or higher." msgstr "分片数量必须为 1 或更高。" -#: awx/main/models/inventory.py:387 +#: awx/main/models/inventory.py:388 msgid "Assignment not allowed for Smart Inventory" msgstr "智能清单不允许分配" -#: awx/main/models/inventory.py:389 awx/main/models/projects.py:166 +#: awx/main/models/inventory.py:390 awx/main/models/projects.py:166 msgid "Credential kind must be 'insights'." msgstr "凭证种类必须是 'inights'。" -#: awx/main/models/inventory.py:474 +#: awx/main/models/inventory.py:475 msgid "Is this host online and available for running jobs?" msgstr "此主机是否在线,并可用于运行作业?" -#: awx/main/models/inventory.py:480 +#: awx/main/models/inventory.py:481 msgid "" "The value used by the remote inventory source to uniquely identify the host" msgstr "远程清单源用来唯一标识主机的值" -#: awx/main/models/inventory.py:485 +#: awx/main/models/inventory.py:486 msgid "Host variables in JSON or YAML format." msgstr "JSON 或 YAML 格式的主机变量。" -#: awx/main/models/inventory.py:508 +#: awx/main/models/inventory.py:509 msgid "Inventory source(s) that created or modified this host." msgstr "创建或修改此主机的清单源。" -#: awx/main/models/inventory.py:513 +#: awx/main/models/inventory.py:514 msgid "Arbitrary JSON structure of most recent ansible_facts, per-host." msgstr "每个主机最近的 ansible_facts 的任意 JSON 结构。" -#: awx/main/models/inventory.py:519 +#: awx/main/models/inventory.py:520 msgid "The date and time ansible_facts was last modified." msgstr "最后修改 ansible_facts 的日期和时间。" -#: awx/main/models/inventory.py:526 +#: awx/main/models/inventory.py:527 msgid "Red Hat Insights host unique identifier." msgstr "红帽 Insights 主机唯一标识符。" -#: awx/main/models/inventory.py:640 +#: awx/main/models/inventory.py:641 msgid "Group variables in JSON or YAML format." msgstr "JSON 或 YAML 格式的组变量。" -#: awx/main/models/inventory.py:646 +#: awx/main/models/inventory.py:647 msgid "Hosts associated directly with this group." msgstr "与此组直接关联的主机。" -#: awx/main/models/inventory.py:652 +#: awx/main/models/inventory.py:653 msgid "Inventory source(s) that created or modified this group." msgstr "创建或修改此组的清单源。" -#: awx/main/models/inventory.py:824 +#: awx/main/models/inventory.py:825 msgid "File, Directory or Script" msgstr "文件、目录或脚本" -#: awx/main/models/inventory.py:825 +#: awx/main/models/inventory.py:826 msgid "Sourced from a Project" msgstr "源于项目" -#: awx/main/models/inventory.py:826 +#: awx/main/models/inventory.py:827 msgid "Amazon EC2" msgstr "Amazon EC2" -#: awx/main/models/inventory.py:835 +#: awx/main/models/inventory.py:836 msgid "Custom Script" msgstr "自定义脚本" -#: awx/main/models/inventory.py:952 +#: awx/main/models/inventory.py:953 msgid "Inventory source variables in YAML or JSON format." msgstr "YAML 或 JSON 格式的清单源变量。" -#: awx/main/models/inventory.py:963 +#: awx/main/models/inventory.py:964 msgid "" "Comma-separated list of filter expressions (EC2 only). Hosts are imported " "when ANY of the filters match." msgstr "以逗号分隔的过滤器表达式列表(仅限 EC2)。当任何过滤器匹配时会导入主机。" -#: awx/main/models/inventory.py:969 +#: awx/main/models/inventory.py:970 msgid "Limit groups automatically created from inventory source (EC2 only)." msgstr "限制从清单源自动创建的组(仅限 EC2)。" -#: awx/main/models/inventory.py:973 +#: awx/main/models/inventory.py:974 msgid "Overwrite local groups and hosts from remote inventory source." msgstr "从远程清单源覆盖本地组和主机。" -#: awx/main/models/inventory.py:977 +#: awx/main/models/inventory.py:978 msgid "Overwrite local variables from remote inventory source." msgstr "从远程清单源覆盖本地变量。" -#: awx/main/models/inventory.py:982 awx/main/models/jobs.py:154 +#: awx/main/models/inventory.py:983 awx/main/models/jobs.py:154 #: awx/main/models/projects.py:135 msgid "The amount of time (in seconds) to run before the task is canceled." msgstr "取消任务前运行的时间(以秒为单位)。" -#: awx/main/models/inventory.py:1015 +#: awx/main/models/inventory.py:1016 msgid "Image ID" msgstr "镜像 ID" -#: awx/main/models/inventory.py:1016 +#: awx/main/models/inventory.py:1017 msgid "Availability Zone" msgstr "可用性区域" -#: awx/main/models/inventory.py:1018 +#: awx/main/models/inventory.py:1019 msgid "Instance ID" msgstr "实例 ID" -#: awx/main/models/inventory.py:1019 +#: awx/main/models/inventory.py:1020 msgid "Instance State" msgstr "实例状态" -#: awx/main/models/inventory.py:1020 +#: awx/main/models/inventory.py:1021 msgid "Platform" msgstr "平台" -#: awx/main/models/inventory.py:1021 +#: awx/main/models/inventory.py:1022 msgid "Instance Type" msgstr "实例类型" -#: awx/main/models/inventory.py:1023 +#: awx/main/models/inventory.py:1024 msgid "Region" msgstr "区域" -#: awx/main/models/inventory.py:1024 +#: awx/main/models/inventory.py:1025 msgid "Security Group" msgstr "安全组" -#: awx/main/models/inventory.py:1025 +#: awx/main/models/inventory.py:1026 msgid "Tags" msgstr "标签" -#: awx/main/models/inventory.py:1026 +#: awx/main/models/inventory.py:1027 msgid "Tag None" msgstr "标签 None" -#: awx/main/models/inventory.py:1027 +#: awx/main/models/inventory.py:1028 msgid "VPC ID" msgstr "VPC ID" -#: awx/main/models/inventory.py:1095 +#: awx/main/models/inventory.py:1096 #, python-format msgid "" "Cloud-based inventory sources (such as %s) require credentials for the " "matching cloud service." msgstr "基于云的清单源(如 %s)需要匹配的云服务的凭证。" -#: awx/main/models/inventory.py:1101 +#: awx/main/models/inventory.py:1102 msgid "Credential is required for a cloud source." msgstr "云源需要凭证。" -#: awx/main/models/inventory.py:1104 +#: awx/main/models/inventory.py:1105 msgid "" "Credentials of type machine, source control, insights and vault are " "disallowed for custom inventory sources." msgstr "对于自定义清单源,不允许使用机器、源控制、insights 和 vault 类型的凭证。" -#: awx/main/models/inventory.py:1109 +#: awx/main/models/inventory.py:1110 msgid "" "Credentials of type insights and vault are disallowed for scm inventory " "sources." msgstr "对于 scm 清单源,不允许使用 insights 和 vault 类型的凭证。" -#: awx/main/models/inventory.py:1169 +#: awx/main/models/inventory.py:1170 #, python-format msgid "Invalid %(source)s region: %(region)s" msgstr "无效的 %(source)s 区域:%(region)s" -#: awx/main/models/inventory.py:1193 +#: awx/main/models/inventory.py:1194 #, python-format msgid "Invalid filter expression: %(filter)s" msgstr "无效的过滤器表达式:%(filter)s" -#: awx/main/models/inventory.py:1214 +#: awx/main/models/inventory.py:1215 #, python-format msgid "Invalid group by choice: %(choice)s" msgstr "选择的组无效:%(choice)s" -#: awx/main/models/inventory.py:1242 +#: awx/main/models/inventory.py:1243 msgid "Project containing inventory file used as source." msgstr "包含用作源的清单文件的项目。" -#: awx/main/models/inventory.py:1415 +#: awx/main/models/inventory.py:1416 msgid "" "More than one SCM-based inventory source with update on project update per-" "inventory not allowed." msgstr "不允许多个基于 SCM 的清单源按清单在项目更新时更新。" -#: awx/main/models/inventory.py:1422 +#: awx/main/models/inventory.py:1423 msgid "" "Cannot update SCM-based inventory source on launch if set to update on " "project update. Instead, configure the corresponding source project to " "update on launch." msgstr "如果设置为在项目更新时更新,则无法在启动时更新基于 SCM 的清单源。应将对应的源项目配置为在启动时更新。" -#: awx/main/models/inventory.py:1428 +#: awx/main/models/inventory.py:1429 msgid "Cannot set source_path if not SCM type." msgstr "如果不是 SCM 类型,则无法设置 source_path。" -#: awx/main/models/inventory.py:1471 +#: awx/main/models/inventory.py:1472 msgid "" "Inventory files from this Project Update were used for the inventory update." msgstr "此项目更新中的清单文件用于清单更新。" -#: awx/main/models/inventory.py:1582 +#: awx/main/models/inventory.py:1583 msgid "Inventory script contents" msgstr "清单脚本内容" -#: awx/main/models/inventory.py:1587 +#: awx/main/models/inventory.py:1588 msgid "Organization owning this inventory script" msgstr "拥有此清单脚本的机构" @@ -4009,28 +4009,28 @@ msgstr "作为提示而应用的清单,假定作业模板提示提供清单" msgid "job host summaries" msgstr "作业主机摘要" -#: awx/main/models/jobs.py:1158 +#: awx/main/models/jobs.py:1144 msgid "Remove jobs older than a certain number of days" msgstr "删除超过特定天数的作业" -#: awx/main/models/jobs.py:1159 +#: awx/main/models/jobs.py:1145 msgid "Remove activity stream entries older than a certain number of days" msgstr "删除比特定天数旧的活动流条目" -#: awx/main/models/jobs.py:1160 +#: awx/main/models/jobs.py:1146 msgid "Removes expired browser sessions from the database" msgstr "从数据库中删除已过期的浏览器会话" -#: awx/main/models/jobs.py:1161 +#: awx/main/models/jobs.py:1147 msgid "Removes expired OAuth 2 access tokens and refresh tokens" msgstr "删除已过期的 OAuth 2 访问令牌并刷新令牌" -#: awx/main/models/jobs.py:1231 +#: awx/main/models/jobs.py:1217 #, python-brace-format msgid "Variables {list_of_keys} are not allowed for system jobs." msgstr "系统作业不允许使用变量 {list_of_keys}。" -#: awx/main/models/jobs.py:1247 +#: awx/main/models/jobs.py:1233 msgid "days must be a positive integer." msgstr "天必须为正整数。" @@ -4774,7 +4774,7 @@ msgstr "未找到错误处理路径,将工作流标记为失败" msgid "The approval node {name} ({pk}) has expired after {timeout} seconds." msgstr "批准节点 {name} ({pk}) 已在 {timeout} 秒后过期。" -#: awx/main/tasks.py:1053 +#: awx/main/tasks.py:1049 msgid "Invalid virtual environment selected: {}" msgstr "选择了无效的虚拟环境:{}" @@ -4811,53 +4811,53 @@ msgstr "工作流作业节点没有错误处理路径 []。工作流作业节点 msgid "Unable to convert \"%s\" to boolean" msgstr "无法将 \"%s\" 转换为布尔值" -#: awx/main/utils/common.py:275 +#: awx/main/utils/common.py:261 #, python-format msgid "Unsupported SCM type \"%s\"" msgstr "不受支持的 SCM 类型 \"%s\"" -#: awx/main/utils/common.py:282 awx/main/utils/common.py:294 -#: awx/main/utils/common.py:313 +#: awx/main/utils/common.py:268 awx/main/utils/common.py:280 +#: awx/main/utils/common.py:299 #, python-format msgid "Invalid %s URL" msgstr "无效的 %s URL" -#: awx/main/utils/common.py:284 awx/main/utils/common.py:323 +#: awx/main/utils/common.py:270 awx/main/utils/common.py:309 #, python-format msgid "Unsupported %s URL" msgstr "不受支持的 %s URL" -#: awx/main/utils/common.py:325 +#: awx/main/utils/common.py:311 #, python-format msgid "Unsupported host \"%s\" for file:// URL" msgstr "用于 file:// URL的主机 \"%s\" 不受支持" -#: awx/main/utils/common.py:327 +#: awx/main/utils/common.py:313 #, python-format msgid "Host is required for %s URL" msgstr "%s URL 需要主机" -#: awx/main/utils/common.py:345 +#: awx/main/utils/common.py:331 #, python-format msgid "Username must be \"git\" for SSH access to %s." msgstr "用户名必须是 \"git\" 以供 SSH 访问 %s。" -#: awx/main/utils/common.py:351 +#: awx/main/utils/common.py:337 #, python-format msgid "Username must be \"hg\" for SSH access to %s." msgstr "用户名必须是 \"hg\" 以供 SSH 访问 %s。" -#: awx/main/utils/common.py:682 +#: awx/main/utils/common.py:668 #, python-brace-format msgid "Input type `{data_type}` is not a dictionary" msgstr "输入类型 `{data_type}` 不是字典" -#: awx/main/utils/common.py:715 +#: awx/main/utils/common.py:701 #, python-brace-format msgid "Variables not compatible with JSON standard (error: {json_error})" msgstr "与 JSON 标准不兼容的变量(错误:{json_error})" -#: awx/main/utils/common.py:721 +#: awx/main/utils/common.py:707 #, python-brace-format msgid "" "Cannot parse as JSON (error: {json_error}) or YAML (error: {yaml_error})." diff --git a/awx/ui/po/fr.po b/awx/ui/po/fr.po index 71d684fb90..59ed681256 100644 --- a/awx/ui/po/fr.po +++ b/awx/ui/po/fr.po @@ -4429,11 +4429,12 @@ msgstr "Remplacer les variables qui se trouvent dans azure_rm.ini et qui sont ut #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:282 msgid "" "Override variables found in cloudforms.ini and used by the inventory update script. For an example variable configuration\n" -" \n" -" view cloudforms.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +" \n" +" view cloudforms.ini in the Ansible Collections github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." msgstr "Remplacez les variables qui se trouvent dans cloudforms.ini et qui sont utilisées par le script de mise à jour de l'inventaire. Voici un exemple de configuration de variable\n" -" \n" -" view cloudforms.ini in the Ansible github repo. Entrez les variables d’inventaire avec la syntaxe JSON ou YAML. Utilisez le bouton radio pour basculer entre les deux. Consultez la documentation d’Ansible Tower pour avoir un exemple de syntaxe." +" \n" +" voir cloudforms.ini dans Ansible Collections github repo.\n" +" Entrez les variables d’inventaire avec la syntaxe JSON ou YAML. Utilisez le bouton radio pour basculer entre les deux. Consultez la documentation d’Ansible Tower pour avoir un exemple de syntaxe." #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:217 msgid "Override variables found in ec2.ini and used by the inventory update script. For a detailed description of these variables" @@ -4442,20 +4443,20 @@ msgstr "Remplacer les variables qui se trouvent dans ec2.ini et qui sont utilis #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:299 msgid "" "Override variables found in foreman.ini and used by the inventory update script. For an example variable configuration\n" -" \n" -" view foreman.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +" \n" +" view foreman.ini in the Ansible Collections github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." msgstr "Remplacez les variables qui se trouvent dans foreman.ini et qui sont utilisées par le script de mise à jour de l'inventaire. Voici un exemple de configuration de variable\n" -" \n" -" view foreman.ini in the Ansible github repo. Entrez les variables d’inventaire avec la syntaxe JSON ou YAML. Utilisez le bouton radio pour basculer entre les deux. Consultez la documentation d’Ansible Tower pour avoir un exemple de syntaxe." +" \n" +" voir foreman.ini dans Ansible le référentiel github repo. Entrez les variables d’inventaire avec la syntaxe JSON ou YAML. Utilisez le bouton radio pour basculer entre les deux. Consultez la documentation d’Ansible Tower pour avoir un exemple de syntaxe." #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:265 msgid "" "Override variables found in openstack.yml and used by the inventory update script. For an example variable configuration\n" -" \n" -" view openstack.yml in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +" \n" +" view openstack.yml in the Openstack github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." msgstr "Remplacez les variables qui se trouvent dans openstack.yml et qui sont utilisées par le script de mise à jour de l'inventaire. Voici un exemple de configuration de variable\n" -" \n" -" view openstack.yml in the Ansible github repo. Entrez les variables d’inventaire avec la syntaxe JSON ou YAML. Utilisez le bouton radio pour basculer entre les deux. Consultez la documentation d’Ansible Tower pour avoir un exemple de syntaxe." +" \n" +" voir openstack.yml dans le référentiel Ansible github repo. Entrez les variables d’inventaire avec la syntaxe JSON ou YAML. Utilisez le bouton radio pour basculer entre les deux. Consultez la documentation d’Ansible Tower pour avoir un exemple de syntaxe." #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:241 msgid "Override variables found in vmware.ini and used by the inventory update script. For a detailed description of these variables" diff --git a/awx/ui/po/ja.po b/awx/ui/po/ja.po index 7e44693fb0..e292156170 100644 --- a/awx/ui/po/ja.po +++ b/awx/ui/po/ja.po @@ -4427,11 +4427,11 @@ msgstr "azure_rm.ini にあり、インベントリー更新スクリプトで #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:282 msgid "" "Override variables found in cloudforms.ini and used by the inventory update script. For an example variable configuration\n" -" \n" -" view cloudforms.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +" \n" +" view cloudforms.ini in the Ansible Collections github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." msgstr "cloudforms.ini にあり、インベントリー更新スクリプトで使用される変数を上書きします。たとえば、変数の設定\n" -" \n" -" は Ansible github リポジトリーで cloudforms.ini を表示します。 JSON または YAML 構文のいずれかを使用してインベントリー変数を入力します。ラジオボタンを使用してこの 2 つの間の切り替えを行います。構文のサンプルについては、Ansible Tower ドキュメントを参照してください。" +" \n" +" は Ansible Collections github リポジトリーで cloudforms.ini を表示します。 JSON または YAML 構文のいずれかを使用してインベントリー変数を入力します。ラジオボタンを使用してこの 2 つの間の切り替えを行います。構文のサンプルについては、Ansible Tower ドキュメントを参照してください。" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:217 msgid "Override variables found in ec2.ini and used by the inventory update script. For a detailed description of these variables" @@ -4440,20 +4440,16 @@ msgstr "ec2.ini にあり、インベントリー更新スクリプトで使用 #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:299 msgid "" "Override variables found in foreman.ini and used by the inventory update script. For an example variable configuration\n" -" \n" -" view foreman.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." -msgstr "foreman.ini にあり、インベントリー更新スクリプトで使用される変数を上書きします。たとえば、変数の設定\n" -" \n" -" は Ansible github リポジトリーで foreman.ini を表示します。 JSON または YAML 構文のいずれかを使用してインベントリー変数を入力します。ラジオボタンを使用してこの 2 つの間の切り替えを行います。構文のサンプルについては、Ansible Tower ドキュメントを参照してください。" +" \n" +" view foreman.ini in the Ansible Collections github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +msgstr "foreman.ini にあり、インベントリー更新スクリプトで使用される変数を上書きします。たとえば、変数の設定 は Ansible Collections github リポジトリーで foreman.ini を表示します。 JSON または YAML 構文のいずれかを使用してインベントリー変数を入力します。ラジオボタンを使用してこの 2 つの間の切り替えを行います。構文のサンプルについては、Ansible Tower ドキュメントを参照してください。" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:265 msgid "" "Override variables found in openstack.yml and used by the inventory update script. For an example variable configuration\n" -" \n" -" view openstack.yml in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." -msgstr "openstack.yml にあり、インベントリー更新スクリプトで使用される変数を上書きします。たとえば、変数の設定\n" -" \n" -" は Ansible github リポジトリーで openstack.yml を表示します。 JSON または YAML 構文のいずれかを使用してインベントリー変数を入力します。ラジオボタンを使用してこの 2 つの間の切り替えを行います。構文のサンプルについては、Ansible Tower ドキュメントを参照してください。" +" \n" +" view openstack.yml in the Openstack github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +msgstr "openstack.yml にあり、インベントリー更新スクリプトで使用される変数を上書きします。たとえば、変数の設定 は Openstack github リポジトリーで openstack.yml を表示します。 JSON または YAML 構文のいずれかを使用してインベントリー変数を入力します。ラジオボタンを使用して 2 つの間の切り替えを行います。構文のサンプルについては、Ansible Tower ドキュメントを参照してください。" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:241 msgid "Override variables found in vmware.ini and used by the inventory update script. For a detailed description of these variables" diff --git a/awx/ui/po/zh.po b/awx/ui/po/zh.po index 23e2bb39c6..92348c9c6e 100644 --- a/awx/ui/po/zh.po +++ b/awx/ui/po/zh.po @@ -4424,16 +4424,16 @@ msgstr "其他(云提供商)" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:316 msgid "Override variables found in azure_rm.ini and used by the inventory update script. For a detailed description of these variables" -msgstr "覆写 azure_rm.ini 中由清单更新脚本使用的变量。有关这些变量的详细描述" +msgstr "覆盖 azure_rm.ini 中由清单更新脚本使用的变量。有关这些变量的详细描述" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:282 msgid "" "Override variables found in cloudforms.ini and used by the inventory update script. For an example variable configuration\n" +" \n" +" view cloudforms.ini in the Ansible Collections github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +msgstr "覆盖 cloudforms.ini 中由清单更新脚本使用的变量。一个变量配置示例包括在\n" " \n" -" view cloudforms.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." -msgstr "Override variables found in cloudforms.ini and used by the inventory update script. For an example variable configuration\n" -" \n" -" view cloudforms.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +" Ansible github repo 的 cloudforms.ini 中。 使用 JSON 或 YAML 格式输入清单变量。通过单选按钮可以切换这两个格式。详情请参阅 Ansible Tower 的相关文档。" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:217 msgid "Override variables found in ec2.ini and used by the inventory update script. For a detailed description of these variables" @@ -4442,20 +4442,19 @@ msgstr "覆写 ec2.ini 中由清单更新脚本使用的变量。有关这些变 #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:299 msgid "" "Override variables found in foreman.ini and used by the inventory update script. For an example variable configuration\n" +" \n" +" view foreman.ini in the Ansible Collections github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +msgstr "覆盖 foreman.ini 中由清单脚本使用的变量。一个变量配置示例包括在\n" " \n" -" view foreman.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." -msgstr "Override variables found in foreman.ini and used by the inventory update script. For an example variable configuration\n" -" \n" -" view foreman.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +" Ansible github repo 的 foreman.ini 中。 使用 JSON 或 YAML 格式输入清单变量。通过单选按旧可以切换这两个格式。详情请参阅 Ansible Tower 的相关文档。" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:265 msgid "" "Override variables found in openstack.yml and used by the inventory update script. For an example variable configuration\n" -" \n" -" view openstack.yml in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." -msgstr "Override variables found in openstack.yml and used by the inventory update script. For an example variable configuration\n" -" \n" -" view openstack.yml in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +" \n" +" view openstack.yml in the Openstack github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +msgstr "覆盖 openstack.yml 中由清单脚本使用的变量。一个变量配置示例包括在 \n" +" Openstack github repo 的 openstack.yml 中。 使用 JSON 或 YAML 格式输入清单变量。通过单选按旧可以切换这两个格式。详情请参阅 Ansible Tower 的相关文档。" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:241 msgid "Override variables found in vmware.ini and used by the inventory update script. For a detailed description of these variables" From de82c613fcbf662ba38e5f82441251959e28f078 Mon Sep 17 00:00:00 2001 From: Shane McDonald Date: Mon, 15 Jun 2020 20:54:14 -0400 Subject: [PATCH 133/494] Make tests pass with current versions of things --- Makefile | 7 +++-- awx/conf/tests/unit/test_registry.py | 7 +++-- awx/conf/tests/unit/test_settings.py | 7 +++-- .../tests/docs/test_swagger_generation.py | 12 ++++--- .../test_inventory_source_injectors.py | 31 ++++++++++--------- awx/main/tests/unit/test_fields.py | 2 +- awx/main/tests/unit/test_tasks.py | 4 +-- .../roles/image_build/templates/Dockerfile.j2 | 1 + pytest.ini | 6 +++- requirements/requirements.txt | 2 +- requirements/requirements_dev.txt | 5 ++- 11 files changed, 49 insertions(+), 35 deletions(-) diff --git a/Makefile b/Makefile index 3111686ace..dac56bc944 100644 --- a/Makefile +++ b/Makefile @@ -362,7 +362,7 @@ TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/ss # Run all API unit tests. test: - @if [ "$(VENV_BASE)" ]; then \ + if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/awx/bin/activate; \ fi; \ PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider -n auto $(TEST_DIRS) @@ -377,10 +377,11 @@ COLLECTION_NAMESPACE ?= awx COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE) test_collection: - @if [ "$(VENV_BASE)" ]; then \ + rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt + if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/awx/bin/activate; \ fi; \ - PYTHONPATH=$(PYTHONPATH):$(VENV_BASE)/awx/lib/python3.6/site-packages:/usr/lib/python3.6/site-packages py.test $(COLLECTION_TEST_DIRS) + py.test $(COLLECTION_TEST_DIRS) -v # The python path needs to be modified so that the tests can find Ansible within the container # First we will use anything expility set as PYTHONPATH # Second we will load any libraries out of the virtualenv (if it's unspecified that should be ok because python should not load out of an empty directory) diff --git a/awx/conf/tests/unit/test_registry.py b/awx/conf/tests/unit/test_registry.py index c25ea00724..ea5c66375f 100644 --- a/awx/conf/tests/unit/test_registry.py +++ b/awx/conf/tests/unit/test_registry.py @@ -29,9 +29,10 @@ def reg(request): # as "defined in a settings file". This is analogous to manually # specifying a setting on the filesystem (e.g., in a local_settings.py in # development, or in /etc/tower/conf.d/.py) - defaults = request.node.get_marker('defined_in_file') - if defaults: - settings.configure(**defaults.kwargs) + for marker in request.node.own_markers: + if marker.name == 'defined_in_file': + settings.configure(**marker.kwargs) + settings._wrapped = SettingsWrapper(settings._wrapped, cache, registry) diff --git a/awx/conf/tests/unit/test_settings.py b/awx/conf/tests/unit/test_settings.py index a95cbe54f7..a8344e4bf4 100644 --- a/awx/conf/tests/unit/test_settings.py +++ b/awx/conf/tests/unit/test_settings.py @@ -41,13 +41,16 @@ def settings(request): cache = LocMemCache(str(uuid4()), {}) # make a new random cache each time settings = LazySettings() registry = SettingsRegistry(settings) + defaults = {} # @pytest.mark.defined_in_file can be used to mark specific setting values # as "defined in a settings file". This is analogous to manually # specifying a setting on the filesystem (e.g., in a local_settings.py in # development, or in /etc/tower/conf.d/.py) - in_file_marker = request.node.get_marker('defined_in_file') - defaults = in_file_marker.kwargs if in_file_marker else {} + for marker in request.node.own_markers: + if marker.name == 'defined_in_file': + defaults = marker.kwargs + defaults['DEFAULTS_SNAPSHOT'] = {} settings.configure(**defaults) settings._wrapped = SettingsWrapper(settings._wrapped, diff --git a/awx/main/tests/docs/test_swagger_generation.py b/awx/main/tests/docs/test_swagger_generation.py index 59d1f6eece..5def85b3d3 100644 --- a/awx/main/tests/docs/test_swagger_generation.py +++ b/awx/main/tests/docs/test_swagger_generation.py @@ -50,8 +50,6 @@ class TestSwaggerGeneration(): data.update(response.accepted_renderer.get_customizations() or {}) data['host'] = None - if not pytest.config.getoption("--genschema"): - data['modified'] = datetime.datetime.utcnow().isoformat() data['schemes'] = ['https'] data['consumes'] = ['application/json'] @@ -79,10 +77,14 @@ class TestSwaggerGeneration(): data['paths'] = revised_paths self.__class__.JSON = data - def test_sanity(self, release): + def test_sanity(self, release, request): JSON = self.__class__.JSON JSON['info']['version'] = release + + if not request.config.getoption('--genschema'): + JSON['modified'] = datetime.datetime.utcnow().isoformat() + # Make some basic assertions about the rendered JSON so we can # be sure it doesn't break across DRF upgrades and view/serializer # changes. @@ -115,7 +117,7 @@ class TestSwaggerGeneration(): # hit a couple important endpoints so we always have example data get(path, user=admin, expect=200) - def test_autogen_response_examples(self, swagger_autogen): + def test_autogen_response_examples(self, swagger_autogen, request): for pattern, node in TestSwaggerGeneration.JSON['paths'].items(): pattern = pattern.replace('{id}', '[0-9]+') pattern = pattern.replace(r'{category_slug}', r'[a-zA-Z0-9\-]+') @@ -138,7 +140,7 @@ class TestSwaggerGeneration(): for param in node[method].get('parameters'): if param['in'] == 'body': node[method]['parameters'].remove(param) - if pytest.config.getoption("--genschema"): + if request.config.getoption("--genschema"): pytest.skip("In schema generator skipping swagger generator", allow_module_level=True) else: node[method].setdefault('parameters', []).append({ diff --git a/awx/main/tests/functional/test_inventory_source_injectors.py b/awx/main/tests/functional/test_inventory_source_injectors.py index 4e549e1be6..e46366f352 100644 --- a/awx/main/tests/functional/test_inventory_source_injectors.py +++ b/awx/main/tests/functional/test_inventory_source_injectors.py @@ -121,21 +121,24 @@ def credential_kind(source): @pytest.fixture -def fake_credential_factory(source): - ct = CredentialType.defaults[credential_kind(source)]() - ct.save() +def fake_credential_factory(): + def wrap(source): + ct = CredentialType.defaults[credential_kind(source)]() + ct.save() - inputs = {} - var_specs = {} # pivoted version of inputs - for element in ct.inputs.get('fields'): - var_specs[element['id']] = element - for var in var_specs.keys(): - inputs[var] = generate_fake_var(var_specs[var]) + inputs = {} + var_specs = {} # pivoted version of inputs + for element in ct.inputs.get('fields'): + var_specs[element['id']] = element + for var in var_specs.keys(): + inputs[var] = generate_fake_var(var_specs[var]) + + return Credential.objects.create( + credential_type=ct, + inputs=inputs + ) + return wrap - return Credential.objects.create( - credential_type=ct, - inputs=inputs - ) def read_content(private_data_dir, raw_env, inventory_update): @@ -248,7 +251,7 @@ def create_reference_data(source_dir, env, content): @pytest.mark.django_db @pytest.mark.parametrize('this_kind', CLOUD_PROVIDERS) @pytest.mark.parametrize('script_or_plugin', ['scripts', 'plugins']) -def test_inventory_update_injected_content(this_kind, script_or_plugin, inventory): +def test_inventory_update_injected_content(this_kind, script_or_plugin, inventory, fake_credential_factory): src_vars = dict(base_source_var='value_of_var') if this_kind in INI_TEST_VARS: src_vars.update(INI_TEST_VARS[this_kind]) diff --git a/awx/main/tests/unit/test_fields.py b/awx/main/tests/unit/test_fields.py index 429ab6faa0..94d3eaab92 100644 --- a/awx/main/tests/unit/test_fields.py +++ b/awx/main/tests/unit/test_fields.py @@ -158,7 +158,7 @@ def test_cred_type_injectors_schema(injectors, valid): ) field = CredentialType._meta.get_field('injectors') if valid is False: - with pytest.raises(ValidationError, message="Injector was supposed to throw a validation error, data: {}".format(injectors)): + with pytest.raises(ValidationError): field.clean(injectors, type_) else: field.clean(injectors, type_) diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index bd3245871e..9a62e8112b 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -2373,7 +2373,7 @@ def test_aquire_lock_open_fail_logged(logging_getLogger, os_open): ProjectUpdate = tasks.RunProjectUpdate() - with pytest.raises(OSError, message='dummy message'): + with pytest.raises(OSError): ProjectUpdate.acquire_lock(instance) assert logger.err.called_with("I/O error({0}) while trying to open lock file [{1}]: {2}".format(3, 'this_file_does_not_exist', 'dummy message')) @@ -2399,7 +2399,7 @@ def test_aquire_lock_acquisition_fail_logged(fcntl_lockf, logging_getLogger, os_ fcntl_lockf.side_effect = err ProjectUpdate = tasks.RunProjectUpdate() - with pytest.raises(IOError, message='dummy message'): + with pytest.raises(IOError): ProjectUpdate.acquire_lock(instance) os_close.assert_called_with(3) assert logger.err.called_with("I/O error({0}) while trying to aquire lock on file [{1}]: {2}".format(3, 'this_file_does_not_exist', 'dummy message')) diff --git a/installer/roles/image_build/templates/Dockerfile.j2 b/installer/roles/image_build/templates/Dockerfile.j2 index dcab14bed9..0b0c5962bd 100644 --- a/installer/roles/image_build/templates/Dockerfile.j2 +++ b/installer/roles/image_build/templates/Dockerfile.j2 @@ -213,6 +213,7 @@ RUN chmod u+s /usr/bin/bwrap ; \ {% if build_dev|bool %} RUN for dir in \ /venv \ + /venv/awx/lib/python3.6 \ /var/lib/awx/projects \ /var/lib/awx/rsyslog \ /var/run/awx-rsyslog \ diff --git a/pytest.ini b/pytest.ini index ecdcd74b9d..ff89dc85f3 100644 --- a/pytest.ini +++ b/pytest.ini @@ -8,4 +8,8 @@ markers = ac: access control test survey: tests related to survey feature inventory_import: tests of code used by inventory import command -junit_family=xunit2 \ No newline at end of file + defined_in_file: + job_permissions: + activity_stream_access: + job_runtime_vars: +junit_family=xunit2 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 9930111107..31a736e083 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -60,7 +60,7 @@ jaraco.functools==3.0.0 # via irc, jaraco.text, tempora jaraco.logging==3.0.0 # via irc jaraco.stream==3.0.0 # via irc jaraco.text==3.2.0 # via irc, jaraco.collections -jinja2==2.11.1 # via -r /awx_devel/requirements/requirements.in, openshift +jinja2==2.11.2 # via -r /awx_devel/requirements/requirements.in, openshift jsonschema==3.2.0 # via -r /awx_devel/requirements/requirements.in kubernetes==11.0.0 # via openshift lockfile==0.12.2 # via python-daemon diff --git a/requirements/requirements_dev.txt b/requirements/requirements_dev.txt index fa73f78c78..6fd0b2ed2f 100644 --- a/requirements/requirements_dev.txt +++ b/requirements/requirements_dev.txt @@ -5,15 +5,14 @@ ipython==5.2.1 unittest2 pep8 flake8 -pluggy==0.6.0 pyflakes -pytest==3.6.0 +pytest pytest-cov pytest-django pytest-pythonpath pytest-mock==1.11.1 pytest-timeout -pytest-xdist<1.28.0 +pytest-xdist tox # for awxkit logutils jupyter From 4c55685656342818f46287f7bf025e6a743b4eae Mon Sep 17 00:00:00 2001 From: Tom Page Date: Thu, 11 Jun 2020 17:52:30 +0100 Subject: [PATCH 134/494] Add tower_credential_input_source to awx_collection Signed-off-by: Tom Page --- .../modules/tower_credential_input_source.py | 134 +++++++++ .../test/awx/test_credential_input_source.py | 268 ++++++++++++++++++ .../tasks/main.yml | 74 +++++ 3 files changed, 476 insertions(+) create mode 100644 awx_collection/plugins/modules/tower_credential_input_source.py create mode 100644 awx_collection/test/awx/test_credential_input_source.py create mode 100644 awx_collection/tests/integration/targets/tower_credential_input_source/tasks/main.yml diff --git a/awx_collection/plugins/modules/tower_credential_input_source.py b/awx_collection/plugins/modules/tower_credential_input_source.py new file mode 100644 index 0000000000..d6245911ec --- /dev/null +++ b/awx_collection/plugins/modules/tower_credential_input_source.py @@ -0,0 +1,134 @@ +#!/usr/bin/python +# coding: utf-8 -*- + +# Copyright: (c) 2017, Wayne Witzel III +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + + +ANSIBLE_METADATA = {'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community'} + + +DOCUMENTATION = ''' +--- +module: tower_credential_input_source +author: "Tom Page (@Tompage1994)" +version_added: "2.3" +short_description: create, update, or destroy Ansible Tower credential input sources. +description: + - Create, update, or destroy Ansible Tower credential input sources. See + U(https://www.ansible.com/tower) for an overview. +options: + description: + description: + - The description to use for the credential input source. + type: str + input_field_name: + description: + - The input field the credential source will be used for + required: True + type: str + metadata: + description: + - A JSON or YAML string + required: False + type: str + target_credential: + description: + - The credential which will have its input defined by this source + required: true + type: str + source_credential: + description: + - The credential which is the source of the credential lookup + required: true + type: str + state: + description: + - Desired state of the resource. + choices: ["present", "absent"] + default: "present" + type: str + +extends_documentation_fragment: awx.awx.auth +''' + + +EXAMPLES = ''' +- name: Use CyberArk Lookup credential as password source + tower_credential_input_source: + input_field_name: password + target_credential: new_cred + source_credential: cyberark_lookup + metadata: + object_query: "Safe=MY_SAFE;Object=awxuser" + object_query_format: "Exact" + state: present + +''' + +from ..module_utils.tower_api import TowerModule + +def main(): + # Any additional arguments that are not fields of the item can be added here + argument_spec = dict( + description=dict(default=''), + input_field_name=dict(required=True), + target_credential=dict(required=True), + source_credential=dict(required=True), + metadata=dict(type=dict), + state=dict(choices=['present', 'absent'], default='present'), + ) + + # Create a module for ourselves + module = TowerModule(argument_spec=argument_spec) + + # Extract our parameters + description = module.params.get('description') + input_field_name = module.params.get('input_field_name') + target_credential = module.params.get('target_credential') + source_credential = module.params.get('source_credential') + metadata = module.params.get('metadata') + state = module.params.get('state') + + target_credential_id = module.resolve_name_to_id('credentials', target_credential) + source_credential_id = module.resolve_name_to_id('credentials', source_credential) + + # Attempt to look up the object based on the provided name, credential type and optional organization + lookup_data = { + 'target_credential': target_credential_id, + 'source_credential': source_credential_id, + 'input_field_name': input_field_name, + } + + credential_input_source = module.get_one('credential_input_sources', **{'data': lookup_data}) + + if state == 'absent': + # If the state was absent we can let the module delete it if needed, the module will handle exiting from this + if credential_input_source: + credential_input_source['name'] = '' + module.delete_if_needed(credential_input_source) + + # Create the data that gets sent for create and update + credential_input_source_fields = { + 'target_credential': target_credential_id, + 'source_credential': source_credential_id, + 'input_field_name': input_field_name, + } + if metadata: + credential_input_source_fields['metadata'] = metadata + if description: + credential_input_source_fields['description'] = description + + # If the state was present we can let the module build or update the existing group, this will return on its own + module.create_or_update_if_needed( + credential_input_source, credential_input_source_fields, endpoint='credential_input_sources', item_type='credential_input_source' + ) + + +if __name__ == '__main__': + main() diff --git a/awx_collection/test/awx/test_credential_input_source.py b/awx_collection/test/awx/test_credential_input_source.py new file mode 100644 index 0000000000..4c5cf84e8f --- /dev/null +++ b/awx_collection/test/awx/test_credential_input_source.py @@ -0,0 +1,268 @@ +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import pytest + +from awx.main.models import CredentialInputSource, Credential, CredentialType, Organization + + +# Test CyberArk AIM credential source +@pytest.fixture +def source_cred_aim(organization): + # Make a credential type which will be used by the credential + ct=CredentialType.defaults['aim']() + ct.save() + return Credential.objects.create( + name='CyberArk AIM Cred', + credential_type=ct, + inputs={ + "url": "https://cyberark.example.com", + "app_id": "myAppID", + "verify": "false" + } + ) + + +@pytest.mark.django_db +def test_aim_credential_source(run_module, admin_user, organization, silence_deprecation): + src_cred = source_cred_aim(organization) + ct=CredentialType.defaults['ssh']() + ct.save() + tgt_cred = Credential.objects.create( + name='Test Machine Credential', + organization=organization, + credential_type=ct, + inputs={'username': 'bob'} + ) + + result = run_module('tower_credential_input_source', dict( + source_credential=src_cred.name, + target_credential=tgt_cred.name, + input_field_name='password', + metadata={"object_query": "Safe=SUPERSAFE;Object=MyAccount"}, + state='present' + ), admin_user) + + assert not result.get('failed', False), result.get('msg', result) + assert result.get('changed'), result + + assert CredentialInputSource.objects.count() == 1 + cis = CredentialInputSource.objects.first() + + assert cis.metadata['object_query'] == "Safe=SUPERSAFE;Object=MyAccount" + assert cis.source_credential.name == src_cred.name + assert cis.target_credential.name == tgt_cred.name + assert cis.input_field_name == 'password' + assert result['id'] == cis.pk + + +# Test CyberArk Conjur credential source +@pytest.fixture +def source_cred_conjur(organization): + # Make a credential type which will be used by the credential + ct=CredentialType.defaults['conjur']() + ct.save() + return Credential.objects.create( + name='CyberArk CONJUR Cred', + credential_type=ct, + inputs={ + "url": "https://cyberark.example.com", + "api_key": "myApiKey", + "account": "account", + "username": "username" + } + ) + + +@pytest.mark.django_db +def test_conjur_credential_source(run_module, admin_user, organization, silence_deprecation): + src_cred = source_cred_conjur(organization) + ct=CredentialType.defaults['ssh']() + ct.save() + tgt_cred = Credential.objects.create( + name='Test Machine Credential', + organization=organization, + credential_type=ct, + inputs={'username': 'bob'} + ) + + result = run_module('tower_credential_input_source', dict( + source_credential=src_cred.name, + target_credential=tgt_cred.name, + input_field_name='password', + metadata={"secret_path": "/path/to/secret"}, + state='present' + ), admin_user) + + assert not result.get('failed', False), result.get('msg', result) + assert result.get('changed'), result + + assert CredentialInputSource.objects.count() == 1 + cis = CredentialInputSource.objects.first() + + assert cis.metadata['secret_path'] == "/path/to/secret" + assert cis.source_credential.name == src_cred.name + assert cis.target_credential.name == tgt_cred.name + assert cis.input_field_name == 'password' + assert result['id'] == cis.pk + + +# Test Hashicorp Vault secret credential source +@pytest.fixture +def source_cred_hashi_secret(organization): + # Make a credential type which will be used by the credential + ct=CredentialType.defaults['hashivault_kv']() + ct.save() + return Credential.objects.create( + name='HashiCorp secret Cred', + credential_type=ct, + inputs={ + "url": "https://secret.hash.example.com", + "token": "myApiKey", + "role_id": "role", + "secret_id": "secret" + } + ) + + +@pytest.mark.django_db +def test_hashi_secret_credential_source(run_module, admin_user, organization, silence_deprecation): + src_cred = source_cred_hashi_secret(organization) + ct=CredentialType.defaults['ssh']() + ct.save() + tgt_cred = Credential.objects.create( + name='Test Machine Credential', + organization=organization, + credential_type=ct, + inputs={'username': 'bob'} + ) + + result = run_module('tower_credential_input_source', dict( + source_credential=src_cred.name, + target_credential=tgt_cred.name, + input_field_name='password', + metadata={"secret_path": "/path/to/secret", "auth_path": "/path/to/auth", "secret_backend": "backend", "secret_key": "a_key"}, + state='present' + ), admin_user) + + assert not result.get('failed', False), result.get('msg', result) + assert result.get('changed'), result + + assert CredentialInputSource.objects.count() == 1 + cis = CredentialInputSource.objects.first() + + assert cis.metadata['secret_path'] == "/path/to/secret" + assert cis.metadata['auth_path'] == "/path/to/auth" + assert cis.metadata['secret_backend'] == "backend" + assert cis.metadata['secret_key'] == "a_key" + assert cis.source_credential.name == src_cred.name + assert cis.target_credential.name == tgt_cred.name + assert cis.input_field_name == 'password' + assert result['id'] == cis.pk + + +# Test Hashicorp Vault signed ssh credential source +@pytest.fixture +def source_cred_hashi_ssh(organization): + # Make a credential type which will be used by the credential + ct=CredentialType.defaults['hashivault_ssh']() + ct.save() + return Credential.objects.create( + name='HashiCorp ssh Cred', + credential_type=ct, + inputs={ + "url": "https://ssh.hash.example.com", + "token": "myApiKey", + "role_id": "role", + "secret_id": "secret" + } + ) + + +@pytest.mark.django_db +def test_hashi_ssh_credential_source(run_module, admin_user, organization, silence_deprecation): + src_cred = source_cred_hashi_ssh(organization) + ct=CredentialType.defaults['ssh']() + ct.save() + tgt_cred = Credential.objects.create( + name='Test Machine Credential', + organization=organization, + credential_type=ct, + inputs={'username': 'bob'} + ) + + result = run_module('tower_credential_input_source', dict( + source_credential=src_cred.name, + target_credential=tgt_cred.name, + input_field_name='password', + metadata={"secret_path": "/path/to/secret", "auth_path": "/path/to/auth", "role": "role", "public_key": "a_key", "valid_principals": "some_value"}, + state='present' + ), admin_user) + + assert not result.get('failed', False), result.get('msg', result) + assert result.get('changed'), result + + assert CredentialInputSource.objects.count() == 1 + cis = CredentialInputSource.objects.first() + + assert cis.metadata['secret_path'] == "/path/to/secret" + assert cis.metadata['auth_path'] == "/path/to/auth" + assert cis.metadata['role'] == "role" + assert cis.metadata['public_key'] == "a_key" + assert cis.metadata['valid_principals'] == "some_value" + assert cis.source_credential.name == src_cred.name + assert cis.target_credential.name == tgt_cred.name + assert cis.input_field_name == 'password' + assert result['id'] == cis.pk + + +# Test Azure Key Vault credential source +@pytest.fixture +def source_cred_azure_kv(organization): + # Make a credential type which will be used by the credential + ct=CredentialType.defaults['azure_kv']() + ct.save() + return Credential.objects.create( + name='Azure KV Cred', + credential_type=ct, + inputs={ + "url": "https://key.azure.example.com", + "client": "client", + "secret": "secret", + "tenant": "tenant", + "cloud_name": "the_cloud", + } + ) + + +@pytest.mark.django_db +def test_azure_kv_credential_source(run_module, admin_user, organization, silence_deprecation): + src_cred = source_cred_azure_kv(organization) + ct=CredentialType.defaults['ssh']() + ct.save() + tgt_cred = Credential.objects.create( + name='Test Machine Credential', + organization=organization, + credential_type=ct, + inputs={'username': 'bob'} + ) + + result = run_module('tower_credential_input_source', dict( + source_credential=src_cred.name, + target_credential=tgt_cred.name, + input_field_name='password', + metadata={"secret_field": "my_pass"}, + state='present' + ), admin_user) + + assert not result.get('failed', False), result.get('msg', result) + assert result.get('changed'), result + + assert CredentialInputSource.objects.count() == 1 + cis = CredentialInputSource.objects.first() + + assert cis.metadata['secret_field'] == "my_pass" + assert cis.source_credential.name == src_cred.name + assert cis.target_credential.name == tgt_cred.name + assert cis.input_field_name == 'password' + assert result['id'] == cis.pk diff --git a/awx_collection/tests/integration/targets/tower_credential_input_source/tasks/main.yml b/awx_collection/tests/integration/targets/tower_credential_input_source/tasks/main.yml new file mode 100644 index 0000000000..9e17847818 --- /dev/null +++ b/awx_collection/tests/integration/targets/tower_credential_input_source/tasks/main.yml @@ -0,0 +1,74 @@ +--- +- name: Generate names + set_fact: + src_cred_name: src_cred + target_cred_name: target_cred + +- name: Add Tower credential Lookup + tower_credential: + description: Credential for Testing Source + name: "{{ src_cred_name }}" + credential_type: CyberArk AIM Central Credential Provider Lookup + inputs: + url: "https://cyberark.example.com" + app_id: "My-App-ID" + organization: Default + register: result + +- assert: + that: + - "result is changed" + +- name: Add Tower credential Target + tower_credential: + description: Credential for Testing Target + name: "{{ target_cred_name }}" + credential_type: Machine + inputs: + username: user + organization: Default + register: result + +- assert: + that: + - "result is changed" + +- name: Add credential Input Source + tower_credential_input_source: + input_field_name: password + target_credential: "{{ target_cred_name }}" + source_credential: "{{ src_cred_name }}" + metadata: + object_query: "Safe=MY_SAFE;Object=AWX-user" + object_query_format: "Exact" + state: present + +- assert: + that: + - "result is changed" + +- name: Remove a Tower credential type + tower_credential_input_source: + input_field_name: password + target_credential: "{{ target_cred_name }}" + source_credential: "{{ src_cred_name }}" + state: absent + register: result + +- assert: + that: + - "result is changed" + +- name: Remove Tower credential Lookup + tower_credential: + name: "{{ src_cred_name }}" + organization: Default + state: absent + register: result + +- name: Remove Tower credential Lookup + tower_credential: + name: "{{ target_cred_name }}" + organization: Default + state: absent + register: result From ea175ac5b64c3f8a0750e2996cf5e0479e267562 Mon Sep 17 00:00:00 2001 From: Ben Schmitz Date: Wed, 6 May 2020 16:14:25 -0500 Subject: [PATCH 135/494] Allows for docker logger support --- installer/inventory | 7 +++++++ .../local_docker/templates/docker-compose.yml.j2 | 12 ++++++++++++ 2 files changed, 19 insertions(+) diff --git a/installer/inventory b/installer/inventory index 0c31c78012..85a875eb0a 100644 --- a/installer/inventory +++ b/installer/inventory @@ -155,3 +155,10 @@ secret_key=awxsecret # Docker compose explicit subnet. Set to avoid overlapping your existing LAN networks. #docker_compose_subnet="172.17.0.1/16" +# +# Allow for different docker logging drivers +# By Default; the logger will be json-file, however you can override +# that by uncommenting the docker_logger below. +# Be aware that journald may rate limit your log messages if you choose it. +# See: https://docs.docker.com/config/containers/logging/configure/ +# docker_logger=journald diff --git a/installer/roles/local_docker/templates/docker-compose.yml.j2 b/installer/roles/local_docker/templates/docker-compose.yml.j2 index eaa166a0ab..aab318ec77 100644 --- a/installer/roles/local_docker/templates/docker-compose.yml.j2 +++ b/installer/roles/local_docker/templates/docker-compose.yml.j2 @@ -68,6 +68,10 @@ services: http_proxy: {{ http_proxy | default('') }} https_proxy: {{ https_proxy | default('') }} no_proxy: {{ no_proxy | default('') }} + {% if docker_logger is defined %} + logging: + driver: {{ docker_logger }} + {% endif %} task: image: {{ awx_docker_actual_image }} @@ -138,6 +142,10 @@ services: volumes: - "{{ docker_compose_dir }}/redis.conf:/usr/local/etc/redis/redis.conf:ro" - "{{ docker_compose_dir }}/redis_socket:/var/run/redis/:rw" + {% if docker_logger is defined %} + logging: + driver: {{ docker_logger }} + {% endif %} {% if pg_hostname is not defined %} postgres: @@ -154,6 +162,10 @@ services: http_proxy: {{ http_proxy | default('') }} https_proxy: {{ https_proxy | default('') }} no_proxy: {{ no_proxy | default('') }} + {% if docker_logger is defined %} + logging: + driver: {{ docker_logger }} + {% endif %} {% endif %} {% if docker_compose_subnet is defined %} From 1c78190385db53b18fd70253c246410a031e2d4f Mon Sep 17 00:00:00 2001 From: Tom Page Date: Tue, 16 Jun 2020 13:56:49 +0100 Subject: [PATCH 136/494] Change cred_input_src to remove src_cred as primarykey Signed-off-by: Tom Page --- .../plugins/module_utils/tower_api.py | 5 ++ .../modules/tower_credential_input_source.py | 17 ++-- .../test/awx/test_credential_input_source.py | 83 +++++++++++++++++-- .../tasks/main.yml | 37 ++++++++- 4 files changed, 123 insertions(+), 19 deletions(-) diff --git a/awx_collection/plugins/module_utils/tower_api.py b/awx_collection/plugins/module_utils/tower_api.py index d836c457c0..a0b8b789ff 100644 --- a/awx_collection/plugins/module_utils/tower_api.py +++ b/awx_collection/plugins/module_utils/tower_api.py @@ -521,6 +521,9 @@ class TowerModule(AnsibleModule): elif item_type == 'o_auth2_access_token': # An oauth2 token has no name, instead we will use its id for any of the messages item_name = existing_item['id'] + elif item_type == 'credential_input_source': + # An credential_input_source has no name, instead we will use its id for any of the messages + item_name = existing_item['id'] else: self.fail_json(msg="Unable to process delete of {0} due to missing name".format(item_type)) @@ -691,6 +694,8 @@ class TowerModule(AnsibleModule): item_name = existing_item['username'] elif item_type == 'workflow_job_template_node': item_name = existing_item['identifier'] + elif item_type == 'credential_input_source': + item_name = existing_item['id'] else: item_name = existing_item['name'] item_id = existing_item['id'] diff --git a/awx_collection/plugins/modules/tower_credential_input_source.py b/awx_collection/plugins/modules/tower_credential_input_source.py index d6245911ec..ffaae2642f 100644 --- a/awx_collection/plugins/modules/tower_credential_input_source.py +++ b/awx_collection/plugins/modules/tower_credential_input_source.py @@ -1,7 +1,7 @@ #!/usr/bin/python # coding: utf-8 -*- -# Copyright: (c) 2017, Wayne Witzel III +# Copyright: (c) 2020, Tom Page # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function @@ -45,7 +45,6 @@ options: source_credential: description: - The credential which is the source of the credential lookup - required: true type: str state: description: @@ -79,7 +78,7 @@ def main(): description=dict(default=''), input_field_name=dict(required=True), target_credential=dict(required=True), - source_credential=dict(required=True), + source_credential=dict(default=''), metadata=dict(type=dict), state=dict(choices=['present', 'absent'], default='present'), ) @@ -96,29 +95,25 @@ def main(): state = module.params.get('state') target_credential_id = module.resolve_name_to_id('credentials', target_credential) - source_credential_id = module.resolve_name_to_id('credentials', source_credential) - # Attempt to look up the object based on the provided name, credential type and optional organization + # Attempt to look up the object based on the target credential and input field lookup_data = { 'target_credential': target_credential_id, - 'source_credential': source_credential_id, 'input_field_name': input_field_name, } - + module.json_output['all'] = module.get_all_endpoint('credential_input_sources', **{'data': {}}) credential_input_source = module.get_one('credential_input_sources', **{'data': lookup_data}) if state == 'absent': - # If the state was absent we can let the module delete it if needed, the module will handle exiting from this - if credential_input_source: - credential_input_source['name'] = '' module.delete_if_needed(credential_input_source) # Create the data that gets sent for create and update credential_input_source_fields = { 'target_credential': target_credential_id, - 'source_credential': source_credential_id, 'input_field_name': input_field_name, } + if source_credential: + credential_input_source_fields['source_credential'] = module.resolve_name_to_id('credentials', source_credential) if metadata: credential_input_source_fields['metadata'] = metadata if description: diff --git a/awx_collection/test/awx/test_credential_input_source.py b/awx_collection/test/awx/test_credential_input_source.py index 4c5cf84e8f..7c6a46093a 100644 --- a/awx_collection/test/awx/test_credential_input_source.py +++ b/awx_collection/test/awx/test_credential_input_source.py @@ -5,13 +5,16 @@ import pytest from awx.main.models import CredentialInputSource, Credential, CredentialType, Organization +@pytest.fixture +def get_aim_cred_type(): + ct=CredentialType.defaults['aim']() + ct.save() + return ct + # Test CyberArk AIM credential source @pytest.fixture -def source_cred_aim(organization): - # Make a credential type which will be used by the credential - ct=CredentialType.defaults['aim']() - ct.save() +def source_cred_aim(ct): return Credential.objects.create( name='CyberArk AIM Cred', credential_type=ct, @@ -25,7 +28,8 @@ def source_cred_aim(organization): @pytest.mark.django_db def test_aim_credential_source(run_module, admin_user, organization, silence_deprecation): - src_cred = source_cred_aim(organization) + cred_type = get_aim_cred_type() + src_cred = source_cred_aim(cred_type) ct=CredentialType.defaults['ssh']() ct.save() tgt_cred = Credential.objects.create( @@ -266,3 +270,72 @@ def test_azure_kv_credential_source(run_module, admin_user, organization, silenc assert cis.target_credential.name == tgt_cred.name assert cis.input_field_name == 'password' assert result['id'] == cis.pk + + +# Test Changing Credential Source +@pytest.fixture +def source_cred_aim_alt(ct): + return Credential.objects.create( + name='Alternate CyberArk AIM Cred', + credential_type=ct, + inputs={ + "url": "https://cyberark-alt.example.com", + "app_id": "myAltID", + "verify": "false" + } + ) + +@pytest.mark.django_db +def test_aim_credential_source(run_module, admin_user, organization, silence_deprecation): + cred_type=get_aim_cred_type() + src_cred = source_cred_aim(cred_type) + ct=CredentialType.defaults['ssh']() + ct.save() + tgt_cred = Credential.objects.create( + name='Test Machine Credential', + organization=organization, + credential_type=ct, + inputs={'username': 'bob'} + ) + + result = run_module('tower_credential_input_source', dict( + source_credential=src_cred.name, + target_credential=tgt_cred.name, + input_field_name='password', + metadata={"object_query": "Safe=SUPERSAFE;Object=MyAccount"}, + state='present' + ), admin_user) + + assert not result.get('failed', False), result.get('msg', result) + assert result.get('changed'), result + + unchangedResult = run_module('tower_credential_input_source', dict( + source_credential=src_cred.name, + target_credential=tgt_cred.name, + input_field_name='password', + metadata={"object_query": "Safe=SUPERSAFE;Object=MyAccount"}, + state='present' + ), admin_user) + + assert not unchangedResult.get('failed', False), result.get('msg', result) + assert not unchangedResult.get('changed'), result + + src_cred_alt = source_cred_aim_alt(cred_type) + + changedResult = run_module('tower_credential_input_source', dict( + source_credential=src_cred_alt.name, + target_credential=tgt_cred.name, + input_field_name='password', + state='present' + ), admin_user) + + assert not changedResult.get('failed', False), changedResult.get('msg', result) + assert changedResult.get('changed'), result + + assert CredentialInputSource.objects.count() == 1 + cis = CredentialInputSource.objects.first() + + assert cis.metadata['object_query'] == "Safe=SUPERSAFE;Object=MyAccount" + assert cis.source_credential.name == src_cred_alt.name + assert cis.target_credential.name == tgt_cred.name + assert cis.input_field_name == 'password' diff --git a/awx_collection/tests/integration/targets/tower_credential_input_source/tasks/main.yml b/awx_collection/tests/integration/targets/tower_credential_input_source/tasks/main.yml index 9e17847818..45be47ddf7 100644 --- a/awx_collection/tests/integration/targets/tower_credential_input_source/tasks/main.yml +++ b/awx_collection/tests/integration/targets/tower_credential_input_source/tasks/main.yml @@ -47,11 +47,32 @@ that: - "result is changed" -- name: Remove a Tower credential type +- name: Add Second Tower credential Lookup + tower_credential: + description: Credential for Testing Source Change + name: "{{ src_cred_name }}-2" + credential_type: CyberArk AIM Central Credential Provider Lookup + inputs: + url: "https://cyberark-prod.example.com" + app_id: "My-App-ID" + organization: Default + register: result + +- name: Change credential Input Source + tower_credential_input_source: + input_field_name: password + target_credential: "{{ target_cred_name }}" + source_credential: "{{ src_cred_name }}-2" + state: present + +- assert: + that: + - "result is changed" + +- name: Remove a Tower credential source tower_credential_input_source: input_field_name: password target_credential: "{{ target_cred_name }}" - source_credential: "{{ src_cred_name }}" state: absent register: result @@ -63,12 +84,22 @@ tower_credential: name: "{{ src_cred_name }}" organization: Default + credential_type: CyberArk AIM Central Credential Provider Lookup state: absent register: result -- name: Remove Tower credential Lookup +- name: Remove Alt Tower credential Lookup + tower_credential: + name: "{{ src_cred_name }}-2" + organization: Default + credential_type: CyberArk AIM Central Credential Provider Lookup + state: absent + register: result + +- name: Remove Tower credential tower_credential: name: "{{ target_cred_name }}" organization: Default + credential_type: Machine state: absent register: result From 73a39c1e5501416c6d80c81ba7e95097ff6b51d9 Mon Sep 17 00:00:00 2001 From: Tom Page Date: Tue, 16 Jun 2020 14:30:28 +0100 Subject: [PATCH 137/494] Cleanup of debug and change test fixtures Signed-off-by: Tom Page --- .../modules/tower_credential_input_source.py | 1 - .../test/awx/test_credential_input_source.py | 60 ++++++++----------- 2 files changed, 25 insertions(+), 36 deletions(-) diff --git a/awx_collection/plugins/modules/tower_credential_input_source.py b/awx_collection/plugins/modules/tower_credential_input_source.py index ffaae2642f..8e81d50569 100644 --- a/awx_collection/plugins/modules/tower_credential_input_source.py +++ b/awx_collection/plugins/modules/tower_credential_input_source.py @@ -101,7 +101,6 @@ def main(): 'target_credential': target_credential_id, 'input_field_name': input_field_name, } - module.json_output['all'] = module.get_all_endpoint('credential_input_sources', **{'data': {}}) credential_input_source = module.get_one('credential_input_sources', **{'data': lookup_data}) if state == 'absent': diff --git a/awx_collection/test/awx/test_credential_input_source.py b/awx_collection/test/awx/test_credential_input_source.py index 7c6a46093a..15773791da 100644 --- a/awx_collection/test/awx/test_credential_input_source.py +++ b/awx_collection/test/awx/test_credential_input_source.py @@ -6,7 +6,7 @@ import pytest from awx.main.models import CredentialInputSource, Credential, CredentialType, Organization @pytest.fixture -def get_aim_cred_type(): +def aim_cred_type(): ct=CredentialType.defaults['aim']() ct.save() return ct @@ -14,10 +14,10 @@ def get_aim_cred_type(): # Test CyberArk AIM credential source @pytest.fixture -def source_cred_aim(ct): +def source_cred_aim(aim_cred_type): return Credential.objects.create( name='CyberArk AIM Cred', - credential_type=ct, + credential_type=aim_cred_type, inputs={ "url": "https://cyberark.example.com", "app_id": "myAppID", @@ -27,9 +27,7 @@ def source_cred_aim(ct): @pytest.mark.django_db -def test_aim_credential_source(run_module, admin_user, organization, silence_deprecation): - cred_type = get_aim_cred_type() - src_cred = source_cred_aim(cred_type) +def test_aim_credential_source(run_module, admin_user, organization, source_cred_aim, silence_deprecation): ct=CredentialType.defaults['ssh']() ct.save() tgt_cred = Credential.objects.create( @@ -40,7 +38,7 @@ def test_aim_credential_source(run_module, admin_user, organization, silence_dep ) result = run_module('tower_credential_input_source', dict( - source_credential=src_cred.name, + source_credential=source_cred_aim.name, target_credential=tgt_cred.name, input_field_name='password', metadata={"object_query": "Safe=SUPERSAFE;Object=MyAccount"}, @@ -54,7 +52,7 @@ def test_aim_credential_source(run_module, admin_user, organization, silence_dep cis = CredentialInputSource.objects.first() assert cis.metadata['object_query'] == "Safe=SUPERSAFE;Object=MyAccount" - assert cis.source_credential.name == src_cred.name + assert cis.source_credential.name == source_cred_aim.name assert cis.target_credential.name == tgt_cred.name assert cis.input_field_name == 'password' assert result['id'] == cis.pk @@ -79,8 +77,7 @@ def source_cred_conjur(organization): @pytest.mark.django_db -def test_conjur_credential_source(run_module, admin_user, organization, silence_deprecation): - src_cred = source_cred_conjur(organization) +def test_conjur_credential_source(run_module, admin_user, organization, source_cred_conjur, silence_deprecation): ct=CredentialType.defaults['ssh']() ct.save() tgt_cred = Credential.objects.create( @@ -91,7 +88,7 @@ def test_conjur_credential_source(run_module, admin_user, organization, silence_ ) result = run_module('tower_credential_input_source', dict( - source_credential=src_cred.name, + source_credential=source_cred_conjur.name, target_credential=tgt_cred.name, input_field_name='password', metadata={"secret_path": "/path/to/secret"}, @@ -105,7 +102,7 @@ def test_conjur_credential_source(run_module, admin_user, organization, silence_ cis = CredentialInputSource.objects.first() assert cis.metadata['secret_path'] == "/path/to/secret" - assert cis.source_credential.name == src_cred.name + assert cis.source_credential.name == source_cred_conjur.name assert cis.target_credential.name == tgt_cred.name assert cis.input_field_name == 'password' assert result['id'] == cis.pk @@ -130,8 +127,7 @@ def source_cred_hashi_secret(organization): @pytest.mark.django_db -def test_hashi_secret_credential_source(run_module, admin_user, organization, silence_deprecation): - src_cred = source_cred_hashi_secret(organization) +def test_hashi_secret_credential_source(run_module, admin_user, organization, source_cred_hashi_secret, silence_deprecation): ct=CredentialType.defaults['ssh']() ct.save() tgt_cred = Credential.objects.create( @@ -142,7 +138,7 @@ def test_hashi_secret_credential_source(run_module, admin_user, organization, si ) result = run_module('tower_credential_input_source', dict( - source_credential=src_cred.name, + source_credential=source_cred_hashi_secret.name, target_credential=tgt_cred.name, input_field_name='password', metadata={"secret_path": "/path/to/secret", "auth_path": "/path/to/auth", "secret_backend": "backend", "secret_key": "a_key"}, @@ -159,7 +155,7 @@ def test_hashi_secret_credential_source(run_module, admin_user, organization, si assert cis.metadata['auth_path'] == "/path/to/auth" assert cis.metadata['secret_backend'] == "backend" assert cis.metadata['secret_key'] == "a_key" - assert cis.source_credential.name == src_cred.name + assert cis.source_credential.name == source_cred_hashi_secret.name assert cis.target_credential.name == tgt_cred.name assert cis.input_field_name == 'password' assert result['id'] == cis.pk @@ -184,8 +180,7 @@ def source_cred_hashi_ssh(organization): @pytest.mark.django_db -def test_hashi_ssh_credential_source(run_module, admin_user, organization, silence_deprecation): - src_cred = source_cred_hashi_ssh(organization) +def test_hashi_ssh_credential_source(run_module, admin_user, organization, source_cred_hashi_ssh, silence_deprecation): ct=CredentialType.defaults['ssh']() ct.save() tgt_cred = Credential.objects.create( @@ -196,7 +191,7 @@ def test_hashi_ssh_credential_source(run_module, admin_user, organization, silen ) result = run_module('tower_credential_input_source', dict( - source_credential=src_cred.name, + source_credential=source_cred_hashi_ssh.name, target_credential=tgt_cred.name, input_field_name='password', metadata={"secret_path": "/path/to/secret", "auth_path": "/path/to/auth", "role": "role", "public_key": "a_key", "valid_principals": "some_value"}, @@ -214,7 +209,7 @@ def test_hashi_ssh_credential_source(run_module, admin_user, organization, silen assert cis.metadata['role'] == "role" assert cis.metadata['public_key'] == "a_key" assert cis.metadata['valid_principals'] == "some_value" - assert cis.source_credential.name == src_cred.name + assert cis.source_credential.name == source_cred_hashi_ssh.name assert cis.target_credential.name == tgt_cred.name assert cis.input_field_name == 'password' assert result['id'] == cis.pk @@ -240,8 +235,7 @@ def source_cred_azure_kv(organization): @pytest.mark.django_db -def test_azure_kv_credential_source(run_module, admin_user, organization, silence_deprecation): - src_cred = source_cred_azure_kv(organization) +def test_azure_kv_credential_source(run_module, admin_user, organization, source_cred_azure_kv, silence_deprecation): ct=CredentialType.defaults['ssh']() ct.save() tgt_cred = Credential.objects.create( @@ -252,7 +246,7 @@ def test_azure_kv_credential_source(run_module, admin_user, organization, silenc ) result = run_module('tower_credential_input_source', dict( - source_credential=src_cred.name, + source_credential=source_cred_azure_kv.name, target_credential=tgt_cred.name, input_field_name='password', metadata={"secret_field": "my_pass"}, @@ -266,7 +260,7 @@ def test_azure_kv_credential_source(run_module, admin_user, organization, silenc cis = CredentialInputSource.objects.first() assert cis.metadata['secret_field'] == "my_pass" - assert cis.source_credential.name == src_cred.name + assert cis.source_credential.name == source_cred_azure_kv.name assert cis.target_credential.name == tgt_cred.name assert cis.input_field_name == 'password' assert result['id'] == cis.pk @@ -274,10 +268,10 @@ def test_azure_kv_credential_source(run_module, admin_user, organization, silenc # Test Changing Credential Source @pytest.fixture -def source_cred_aim_alt(ct): +def source_cred_aim_alt(aim_cred_type): return Credential.objects.create( name='Alternate CyberArk AIM Cred', - credential_type=ct, + credential_type=aim_cred_type, inputs={ "url": "https://cyberark-alt.example.com", "app_id": "myAltID", @@ -286,9 +280,7 @@ def source_cred_aim_alt(ct): ) @pytest.mark.django_db -def test_aim_credential_source(run_module, admin_user, organization, silence_deprecation): - cred_type=get_aim_cred_type() - src_cred = source_cred_aim(cred_type) +def test_aim_credential_source(run_module, admin_user, organization, source_cred_aim, source_cred_aim_alt, silence_deprecation): ct=CredentialType.defaults['ssh']() ct.save() tgt_cred = Credential.objects.create( @@ -299,7 +291,7 @@ def test_aim_credential_source(run_module, admin_user, organization, silence_dep ) result = run_module('tower_credential_input_source', dict( - source_credential=src_cred.name, + source_credential=source_cred_aim.name, target_credential=tgt_cred.name, input_field_name='password', metadata={"object_query": "Safe=SUPERSAFE;Object=MyAccount"}, @@ -310,7 +302,7 @@ def test_aim_credential_source(run_module, admin_user, organization, silence_dep assert result.get('changed'), result unchangedResult = run_module('tower_credential_input_source', dict( - source_credential=src_cred.name, + source_credential=source_cred_aim.name, target_credential=tgt_cred.name, input_field_name='password', metadata={"object_query": "Safe=SUPERSAFE;Object=MyAccount"}, @@ -320,10 +312,8 @@ def test_aim_credential_source(run_module, admin_user, organization, silence_dep assert not unchangedResult.get('failed', False), result.get('msg', result) assert not unchangedResult.get('changed'), result - src_cred_alt = source_cred_aim_alt(cred_type) - changedResult = run_module('tower_credential_input_source', dict( - source_credential=src_cred_alt.name, + source_credential=source_cred_aim_alt.name, target_credential=tgt_cred.name, input_field_name='password', state='present' @@ -336,6 +326,6 @@ def test_aim_credential_source(run_module, admin_user, organization, silence_dep cis = CredentialInputSource.objects.first() assert cis.metadata['object_query'] == "Safe=SUPERSAFE;Object=MyAccount" - assert cis.source_credential.name == src_cred_alt.name + assert cis.source_credential.name == source_cred_aim_alt.name assert cis.target_credential.name == tgt_cred.name assert cis.input_field_name == 'password' From fd18194b1b580532d49da34f9bc587896cb9092b Mon Sep 17 00:00:00 2001 From: Tom Page Date: Tue, 16 Jun 2020 15:57:19 +0100 Subject: [PATCH 138/494] Fix pylint errors --- .../modules/tower_credential_input_source.py | 1 + .../test/awx/test_credential_input_source.py | 82 ++++++++++--------- 2 files changed, 43 insertions(+), 40 deletions(-) diff --git a/awx_collection/plugins/modules/tower_credential_input_source.py b/awx_collection/plugins/modules/tower_credential_input_source.py index 8e81d50569..dcce387054 100644 --- a/awx_collection/plugins/modules/tower_credential_input_source.py +++ b/awx_collection/plugins/modules/tower_credential_input_source.py @@ -72,6 +72,7 @@ EXAMPLES = ''' from ..module_utils.tower_api import TowerModule + def main(): # Any additional arguments that are not fields of the item can be added here argument_spec = dict( diff --git a/awx_collection/test/awx/test_credential_input_source.py b/awx_collection/test/awx/test_credential_input_source.py index 15773791da..a676ab15cb 100644 --- a/awx_collection/test/awx/test_credential_input_source.py +++ b/awx_collection/test/awx/test_credential_input_source.py @@ -5,9 +5,10 @@ import pytest from awx.main.models import CredentialInputSource, Credential, CredentialType, Organization + @pytest.fixture def aim_cred_type(): - ct=CredentialType.defaults['aim']() + ct = CredentialType.defaults['aim']() ct.save() return ct @@ -19,16 +20,16 @@ def source_cred_aim(aim_cred_type): name='CyberArk AIM Cred', credential_type=aim_cred_type, inputs={ - "url": "https://cyberark.example.com", - "app_id": "myAppID", - "verify": "false" - } + "url": "https://cyberark.example.com", + "app_id": "myAppID", + "verify": "false" + } ) @pytest.mark.django_db def test_aim_credential_source(run_module, admin_user, organization, source_cred_aim, silence_deprecation): - ct=CredentialType.defaults['ssh']() + ct = CredentialType.defaults['ssh']() ct.save() tgt_cred = Credential.objects.create( name='Test Machine Credential', @@ -62,23 +63,23 @@ def test_aim_credential_source(run_module, admin_user, organization, source_cred @pytest.fixture def source_cred_conjur(organization): # Make a credential type which will be used by the credential - ct=CredentialType.defaults['conjur']() + ct = CredentialType.defaults['conjur']() ct.save() return Credential.objects.create( name='CyberArk CONJUR Cred', credential_type=ct, inputs={ - "url": "https://cyberark.example.com", - "api_key": "myApiKey", - "account": "account", - "username": "username" - } + "url": "https://cyberark.example.com", + "api_key": "myApiKey", + "account": "account", + "username": "username" + } ) @pytest.mark.django_db def test_conjur_credential_source(run_module, admin_user, organization, source_cred_conjur, silence_deprecation): - ct=CredentialType.defaults['ssh']() + ct = CredentialType.defaults['ssh']() ct.save() tgt_cred = Credential.objects.create( name='Test Machine Credential', @@ -112,23 +113,23 @@ def test_conjur_credential_source(run_module, admin_user, organization, source_c @pytest.fixture def source_cred_hashi_secret(organization): # Make a credential type which will be used by the credential - ct=CredentialType.defaults['hashivault_kv']() + ct = CredentialType.defaults['hashivault_kv']() ct.save() return Credential.objects.create( name='HashiCorp secret Cred', credential_type=ct, inputs={ - "url": "https://secret.hash.example.com", - "token": "myApiKey", - "role_id": "role", - "secret_id": "secret" - } + "url": "https://secret.hash.example.com", + "token": "myApiKey", + "role_id": "role", + "secret_id": "secret" + } ) @pytest.mark.django_db def test_hashi_secret_credential_source(run_module, admin_user, organization, source_cred_hashi_secret, silence_deprecation): - ct=CredentialType.defaults['ssh']() + ct = CredentialType.defaults['ssh']() ct.save() tgt_cred = Credential.objects.create( name='Test Machine Credential', @@ -165,23 +166,23 @@ def test_hashi_secret_credential_source(run_module, admin_user, organization, so @pytest.fixture def source_cred_hashi_ssh(organization): # Make a credential type which will be used by the credential - ct=CredentialType.defaults['hashivault_ssh']() + ct = CredentialType.defaults['hashivault_ssh']() ct.save() return Credential.objects.create( name='HashiCorp ssh Cred', credential_type=ct, inputs={ - "url": "https://ssh.hash.example.com", - "token": "myApiKey", - "role_id": "role", - "secret_id": "secret" - } + "url": "https://ssh.hash.example.com", + "token": "myApiKey", + "role_id": "role", + "secret_id": "secret" + } ) @pytest.mark.django_db def test_hashi_ssh_credential_source(run_module, admin_user, organization, source_cred_hashi_ssh, silence_deprecation): - ct=CredentialType.defaults['ssh']() + ct = CredentialType.defaults['ssh']() ct.save() tgt_cred = Credential.objects.create( name='Test Machine Credential', @@ -219,24 +220,24 @@ def test_hashi_ssh_credential_source(run_module, admin_user, organization, sourc @pytest.fixture def source_cred_azure_kv(organization): # Make a credential type which will be used by the credential - ct=CredentialType.defaults['azure_kv']() + ct = CredentialType.defaults['azure_kv']() ct.save() return Credential.objects.create( name='Azure KV Cred', credential_type=ct, inputs={ - "url": "https://key.azure.example.com", - "client": "client", - "secret": "secret", - "tenant": "tenant", - "cloud_name": "the_cloud", - } + "url": "https://key.azure.example.com", + "client": "client", + "secret": "secret", + "tenant": "tenant", + "cloud_name": "the_cloud", + } ) @pytest.mark.django_db def test_azure_kv_credential_source(run_module, admin_user, organization, source_cred_azure_kv, silence_deprecation): - ct=CredentialType.defaults['ssh']() + ct = CredentialType.defaults['ssh']() ct.save() tgt_cred = Credential.objects.create( name='Test Machine Credential', @@ -273,15 +274,16 @@ def source_cred_aim_alt(aim_cred_type): name='Alternate CyberArk AIM Cred', credential_type=aim_cred_type, inputs={ - "url": "https://cyberark-alt.example.com", - "app_id": "myAltID", - "verify": "false" - } + "url": "https://cyberark-alt.example.com", + "app_id": "myAltID", + "verify": "false" + } ) + @pytest.mark.django_db def test_aim_credential_source(run_module, admin_user, organization, source_cred_aim, source_cred_aim_alt, silence_deprecation): - ct=CredentialType.defaults['ssh']() + ct = CredentialType.defaults['ssh']() ct.save() tgt_cred = Credential.objects.create( name='Test Machine Credential', From 7bf1d4946e71cbbc6934e68dbb238ae740c72d23 Mon Sep 17 00:00:00 2001 From: Tom Page Date: Tue, 16 Jun 2020 17:14:52 +0100 Subject: [PATCH 139/494] Fixed final lint issues Signed-off-by: Tom Page --- .../plugins/modules/tower_credential_input_source.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/awx_collection/plugins/modules/tower_credential_input_source.py b/awx_collection/plugins/modules/tower_credential_input_source.py index dcce387054..bc2cb85579 100644 --- a/awx_collection/plugins/modules/tower_credential_input_source.py +++ b/awx_collection/plugins/modules/tower_credential_input_source.py @@ -36,7 +36,7 @@ options: description: - A JSON or YAML string required: False - type: str + type: dict target_credential: description: - The credential which will have its input defined by this source @@ -80,7 +80,7 @@ def main(): input_field_name=dict(required=True), target_credential=dict(required=True), source_credential=dict(default=''), - metadata=dict(type=dict), + metadata=dict(type="dict"), state=dict(choices=['present', 'absent'], default='present'), ) From 70afbe0b8d64159d8c926bd50e91456d5853e24e Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Tue, 16 Jun 2020 12:36:51 -0400 Subject: [PATCH 140/494] fix a regression in how job host summaries are generated this change fixes a bug introduced in the optimization at https://github.com/ansible/awx/pull/7352 1. Create inventory with multiple hosts 2. Run a playbook with a limit to match only one host 3. Run job, verify that it only acts on the one host 4. Go to inventory host list and see that all the hosts have last_job updated to point to the job that only acted on one host. --- awx/main/models/events.py | 6 +- .../tests/functional/models/test_events.py | 57 ++++++++++++++++++- 2 files changed, 60 insertions(+), 3 deletions(-) diff --git a/awx/main/models/events.py b/awx/main/models/events.py index ac33a311f4..1f79b0e24b 100644 --- a/awx/main/models/events.py +++ b/awx/main/models/events.py @@ -338,7 +338,7 @@ class BasePlaybookEvent(CreatedModifiedModel): if isinstance(self, JobEvent): hostnames = self._hostnames() - self._update_host_summary_from_stats(hostnames) + self._update_host_summary_from_stats(set(hostnames)) if self.job.inventory: try: self.job.inventory.update_computed_fields() @@ -521,7 +521,9 @@ class JobEvent(BasePlaybookEvent): for summary in JobHostSummary.objects.filter(job_id=job.id).values('id', 'host_id') ) for h in all_hosts: - h.last_job_id = job.id + # if the hostname *shows up* in the playbook_on_stats event + if h.name in hostnames: + h.last_job_id = job.id if h.id in host_mapping: h.last_job_host_summary_id = host_mapping[h.id] Host.objects.bulk_update(all_hosts, ['last_job_id', 'last_job_host_summary_id']) diff --git a/awx/main/tests/functional/models/test_events.py b/awx/main/tests/functional/models/test_events.py index 7f881a2fea..943bd34654 100644 --- a/awx/main/tests/functional/models/test_events.py +++ b/awx/main/tests/functional/models/test_events.py @@ -3,7 +3,7 @@ import pytest from django.utils.timezone import now -from awx.main.models import Job, JobEvent, Inventory, Host +from awx.main.models import Job, JobEvent, Inventory, Host, JobHostSummary @pytest.mark.django_db @@ -153,3 +153,58 @@ def test_host_summary_generation_with_deleted_hosts(): assert ids == [-1, -1, -1, -1, -1, 6, 7, 8, 9, 10] assert names == ['Host 0', 'Host 1', 'Host 2', 'Host 3', 'Host 4', 'Host 5', 'Host 6', 'Host 7', 'Host 8', 'Host 9'] + + +@pytest.mark.django_db +def test_host_summary_generation_with_limit(): + # Make an inventory with 10 hosts, run a playbook with a --limit + # pointed at *one* host, + # Verify that *only* that host has an associated JobHostSummary and that + # *only* that host has an updated value for .last_job. + hostnames = [f'Host {i}' for i in range(10)] + inv = Inventory() + inv.save() + Host.objects.bulk_create([ + Host(created=now(), modified=now(), name=h, inventory_id=inv.id) + for h in hostnames + ]) + j = Job(inventory=inv) + j.save() + + # host map is a data structure that tracks a mapping of host name --> ID + # for the inventory, _regardless_ of whether or not there's a limit + # applied to the actual playbook run + host_map = dict((host.name, host.id) for host in inv.hosts.all()) + + # by making the playbook_on_stats *only* include Host 1, we're emulating + # the behavior of a `--limit=Host 1` + matching_host = Host.objects.get(name='Host 1') + JobEvent.create_from_data( + job_id=j.pk, + parent_uuid='abc123', + event='playbook_on_stats', + event_data={ + 'ok': {matching_host.name: len(matching_host.name)}, # effectively, limit=Host 1 + 'changed': {}, + 'dark': {}, + 'failures': {}, + 'ignored': {}, + 'processed': {}, + 'rescued': {}, + 'skipped': {}, + }, + host_map=host_map + ).save() + + # since the playbook_on_stats only references one host, + # there should *only* be on JobHostSummary record (and it should + # be related to the appropriate Host) + assert JobHostSummary.objects.count() == 1 + for h in Host.objects.all(): + if h.name == 'Host 1': + assert h.last_job_id == j.id + assert h.last_job_host_summary_id == JobHostSummary.objects.first().id + else: + # all other hosts in the inventory should remain untouched + assert h.last_job_id is None + assert h.last_job_host_summary_id is None From eaadbe9730ec37d11c15c76ec7be6645b8a2da53 Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Tue, 16 Jun 2020 12:36:51 -0400 Subject: [PATCH 141/494] fix a regression in how job host summaries are generated this change fixes a bug introduced in the optimization at https://github.com/ansible/awx/pull/7352 1. Create inventory with multiple hosts 2. Run a playbook with a limit to match only one host 3. Run job, verify that it only acts on the one host 4. Go to inventory host list and see that all the hosts have last_job updated to point to the job that only acted on one host. --- awx/main/models/events.py | 6 +- .../tests/functional/models/test_events.py | 57 ++++++++++++++++++- 2 files changed, 60 insertions(+), 3 deletions(-) diff --git a/awx/main/models/events.py b/awx/main/models/events.py index ac33a311f4..1f79b0e24b 100644 --- a/awx/main/models/events.py +++ b/awx/main/models/events.py @@ -338,7 +338,7 @@ class BasePlaybookEvent(CreatedModifiedModel): if isinstance(self, JobEvent): hostnames = self._hostnames() - self._update_host_summary_from_stats(hostnames) + self._update_host_summary_from_stats(set(hostnames)) if self.job.inventory: try: self.job.inventory.update_computed_fields() @@ -521,7 +521,9 @@ class JobEvent(BasePlaybookEvent): for summary in JobHostSummary.objects.filter(job_id=job.id).values('id', 'host_id') ) for h in all_hosts: - h.last_job_id = job.id + # if the hostname *shows up* in the playbook_on_stats event + if h.name in hostnames: + h.last_job_id = job.id if h.id in host_mapping: h.last_job_host_summary_id = host_mapping[h.id] Host.objects.bulk_update(all_hosts, ['last_job_id', 'last_job_host_summary_id']) diff --git a/awx/main/tests/functional/models/test_events.py b/awx/main/tests/functional/models/test_events.py index 7f881a2fea..943bd34654 100644 --- a/awx/main/tests/functional/models/test_events.py +++ b/awx/main/tests/functional/models/test_events.py @@ -3,7 +3,7 @@ import pytest from django.utils.timezone import now -from awx.main.models import Job, JobEvent, Inventory, Host +from awx.main.models import Job, JobEvent, Inventory, Host, JobHostSummary @pytest.mark.django_db @@ -153,3 +153,58 @@ def test_host_summary_generation_with_deleted_hosts(): assert ids == [-1, -1, -1, -1, -1, 6, 7, 8, 9, 10] assert names == ['Host 0', 'Host 1', 'Host 2', 'Host 3', 'Host 4', 'Host 5', 'Host 6', 'Host 7', 'Host 8', 'Host 9'] + + +@pytest.mark.django_db +def test_host_summary_generation_with_limit(): + # Make an inventory with 10 hosts, run a playbook with a --limit + # pointed at *one* host, + # Verify that *only* that host has an associated JobHostSummary and that + # *only* that host has an updated value for .last_job. + hostnames = [f'Host {i}' for i in range(10)] + inv = Inventory() + inv.save() + Host.objects.bulk_create([ + Host(created=now(), modified=now(), name=h, inventory_id=inv.id) + for h in hostnames + ]) + j = Job(inventory=inv) + j.save() + + # host map is a data structure that tracks a mapping of host name --> ID + # for the inventory, _regardless_ of whether or not there's a limit + # applied to the actual playbook run + host_map = dict((host.name, host.id) for host in inv.hosts.all()) + + # by making the playbook_on_stats *only* include Host 1, we're emulating + # the behavior of a `--limit=Host 1` + matching_host = Host.objects.get(name='Host 1') + JobEvent.create_from_data( + job_id=j.pk, + parent_uuid='abc123', + event='playbook_on_stats', + event_data={ + 'ok': {matching_host.name: len(matching_host.name)}, # effectively, limit=Host 1 + 'changed': {}, + 'dark': {}, + 'failures': {}, + 'ignored': {}, + 'processed': {}, + 'rescued': {}, + 'skipped': {}, + }, + host_map=host_map + ).save() + + # since the playbook_on_stats only references one host, + # there should *only* be on JobHostSummary record (and it should + # be related to the appropriate Host) + assert JobHostSummary.objects.count() == 1 + for h in Host.objects.all(): + if h.name == 'Host 1': + assert h.last_job_id == j.id + assert h.last_job_host_summary_id == JobHostSummary.objects.first().id + else: + # all other hosts in the inventory should remain untouched + assert h.last_job_id is None + assert h.last_job_host_summary_id is None From 8d63ebf0e00ff01443108807278f57949427c0c0 Mon Sep 17 00:00:00 2001 From: mabashian Date: Mon, 15 Jun 2020 17:20:30 -0400 Subject: [PATCH 142/494] Upgrade to the latest angular-scheduler --- awx/ui/package-lock.json | 58 +++++++++++++++++++++++++--------------- awx/ui/package.json | 2 +- 2 files changed, 37 insertions(+), 23 deletions(-) diff --git a/awx/ui/package-lock.json b/awx/ui/package-lock.json index 3e072d5294..00fe8894d5 100644 --- a/awx/ui/package-lock.json +++ b/awx/ui/package-lock.json @@ -246,14 +246,14 @@ "integrity": "sha512-nB/xe7JQWF9nLvhHommAICQ3eWrfRETo0EVGFESi952CDzDa+GAJ/2BFBNw44QqQPxj1Xua/uYKrbLsOGWZdbQ==" }, "angular-scheduler": { - "version": "git+https://git@github.com/ansible/angular-scheduler.git#a519c52312cb4430a59a8d58e01d3eac3fe5018a", - "from": "git+https://git@github.com/ansible/angular-scheduler.git#v0.4.1", + "version": "git+https://git@github.com/ansible/angular-scheduler.git#f9595a06db0e08d426b144a6fbe68f3966b77ae4", + "from": "git+https://git@github.com/ansible/angular-scheduler.git#v0.4.2", "requires": { - "angular": "~1.7.2", - "angular-tz-extensions": "github:ansible/angular-tz-extensions#fc60660f43ee9ff84da94ca71ab27ef0c20fd77d", + "angular": "^1.7.9", + "angular-tz-extensions": "github:ansible/angular-tz-extensions", "jquery": "*", "jquery-ui": "*", - "lodash": "~3.8.0", + "lodash": "^4.17.15", "moment": "^2.10.2", "rrule": "github:jkbrzt/rrule#4ff63b2f8524fd6d5ba6e80db770953b5cd08a0c" }, @@ -266,14 +266,9 @@ "angular-filters": "^1.1.2", "jquery": "^3.1.0", "jstimezonedetect": "1.0.5", - "timezone-js": "github:ansible/timezone-js#6937de14ce0c193961538bb5b3b12b7ef62a358f" + "timezone-js": "github:ansible/timezone-js#0.4.14" } }, - "lodash": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.8.0.tgz", - "integrity": "sha1-N265i9zZOCqTZcM8TLglDeEyW5E=" - }, "rrule": { "version": "github:jkbrzt/rrule#4ff63b2f8524fd6d5ba6e80db770953b5cd08a0c", "from": "github:jkbrzt/rrule#4ff63b2f8524fd6d5ba6e80db770953b5cd08a0c" @@ -1817,6 +1812,7 @@ "resolved": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz", "integrity": "sha1-OciRjO/1eZ+D+UkqhI9iWt0Mdm8=", "dev": true, + "optional": true, "requires": { "hoek": "2.x.x" } @@ -5381,7 +5377,8 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true + "dev": true, + "optional": true }, "aproba": { "version": "1.2.0", @@ -5405,13 +5402,15 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true + "dev": true, + "optional": true }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dev": true, + "optional": true, "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -5421,19 +5420,22 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", - "dev": true + "dev": true, + "optional": true }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true + "dev": true, + "optional": true }, "console-control-strings": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=", - "dev": true + "dev": true, + "optional": true }, "core-util-is": { "version": "1.0.2", @@ -5554,7 +5556,8 @@ "version": "2.0.3", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", - "dev": true + "dev": true, + "optional": true }, "ini": { "version": "1.3.5", @@ -5568,6 +5571,7 @@ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", "dev": true, + "optional": true, "requires": { "number-is-nan": "^1.0.0" } @@ -5584,6 +5588,7 @@ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", "dev": true, + "optional": true, "requires": { "brace-expansion": "^1.1.7" } @@ -5689,7 +5694,8 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", - "dev": true + "dev": true, + "optional": true }, "object-assign": { "version": "4.1.1", @@ -5703,6 +5709,7 @@ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", "dev": true, + "optional": true, "requires": { "wrappy": "1" } @@ -5798,7 +5805,8 @@ "version": "5.1.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==", - "dev": true + "dev": true, + "optional": true }, "safer-buffer": { "version": "2.1.2", @@ -5840,6 +5848,7 @@ "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", "dev": true, + "optional": true, "requires": { "code-point-at": "^1.0.0", "is-fullwidth-code-point": "^1.0.0", @@ -5861,6 +5870,7 @@ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", "dev": true, + "optional": true, "requires": { "ansi-regex": "^2.0.0" } @@ -5893,7 +5903,8 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true + "dev": true, + "optional": true } } }, @@ -6640,7 +6651,8 @@ "version": "2.16.3", "resolved": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz", "integrity": "sha1-ILt0A9POo5jpHcRxCo/xuCdKJe0=", - "dev": true + "dev": true, + "optional": true }, "home-or-tmp": { "version": "2.0.0", @@ -9138,6 +9150,7 @@ "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.3.5.tgz", "integrity": "sha512-Gi1W4k059gyRbyVUZQ4mEqLm0YIUiGYfvxhF6SIlk3ui1WVxMTGfGdQ2SInh3PDrRTVvPKgULkpJtT4RH10+VA==", "dev": true, + "optional": true, "requires": { "safe-buffer": "^5.1.2", "yallist": "^3.0.0" @@ -9147,7 +9160,8 @@ "version": "3.0.3", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", "integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==", - "dev": true + "dev": true, + "optional": true } } }, diff --git a/awx/ui/package.json b/awx/ui/package.json index 60e79f0cd1..1fd3d7152d 100644 --- a/awx/ui/package.json +++ b/awx/ui/package.json @@ -107,7 +107,7 @@ "angular-moment": "^1.3.0", "angular-mousewheel": "^1.0.5", "angular-sanitize": "^1.7.9", - "angular-scheduler": "git+https://git@github.com/ansible/angular-scheduler#v0.4.1", + "angular-scheduler": "git+https://git@github.com/ansible/angular-scheduler.git#v0.4.2", "angular-tz-extensions": "git+https://git@github.com/ansible/angular-tz-extensions#v0.5.2", "angular-xeditable": "~0.8.0", "ansi-to-html": "^0.6.3", From d5fcf733f074027c1af602b72ce5754aa720966a Mon Sep 17 00:00:00 2001 From: mabashian Date: Mon, 15 Jun 2020 17:56:20 -0400 Subject: [PATCH 143/494] Upgrades to the latest angular-codemirror --- awx/ui/package-lock.json | 19 +++++++------------ awx/ui/package.json | 2 +- 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/awx/ui/package-lock.json b/awx/ui/package-lock.json index 00fe8894d5..f55c54c56a 100644 --- a/awx/ui/package-lock.json +++ b/awx/ui/package-lock.json @@ -158,23 +158,18 @@ "from": "git+https://git@github.com/ansible/angular-breadcrumb.git#0.4.1" }, "angular-codemirror": { - "version": "git+https://git@github.com/ansible/angular-codemirror.git#447f071eff8f6fde7b5ec769c57c7dc98a014fdf", - "from": "git+https://git@github.com/ansible/angular-codemirror.git#v1.1.2", + "version": "git+https://git@github.com/ansible/angular-codemirror.git#4ee3223f505cb0df2406a1e61073beaeebd3b56a", + "from": "git+https://git@github.com/ansible/angular-codemirror.git#v1.2.1", "requires": { - "angular": "~1.6.6", + "angular": "^1.7.9", "codemirror": "^5.17.0", - "jquery": "^3.2.1" + "jquery": "^3.5.1" }, "dependencies": { - "angular": { - "version": "1.6.10", - "resolved": "https://registry.npmjs.org/angular/-/angular-1.6.10.tgz", - "integrity": "sha512-PCZ5/hVdvPQiYyH0VwsPjrErPHRcITnaXxhksceOXgtJeesKHLA7KDu4X/yvcAi+1zdGgGF+9pDxkJvghXI9Wg==" - }, "jquery": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/jquery/-/jquery-3.3.1.tgz", - "integrity": "sha512-Ubldcmxp5np52/ENotGxlLe6aGMvmF4R8S6tZjsP6Knsaxd/xp3Zrh50cG93lR6nPXyUFwzN3ZSOQI0wRJNdGg==" + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/jquery/-/jquery-3.5.1.tgz", + "integrity": "sha512-XwIBPqcMn57FxfT+Go5pzySnm4KWkT1Tv7gjrpT1srtf8Weynl6R273VJ5GjkRb51IzMp5nbaPjJXMWeju2MKg==" } } }, diff --git a/awx/ui/package.json b/awx/ui/package.json index 1fd3d7152d..28af9e0f4a 100644 --- a/awx/ui/package.json +++ b/awx/ui/package.json @@ -99,7 +99,7 @@ "@uirouter/angularjs": "1.0.18", "angular": "^1.7.9", "angular-breadcrumb": "git+https://git@github.com/ansible/angular-breadcrumb#0.4.1", - "angular-codemirror": "git+https://git@github.com/ansible/angular-codemirror#v1.1.2", + "angular-codemirror": "git+https://git@github.com/ansible/angular-codemirror.git#v1.2.1", "angular-cookies": "^1.7.9", "angular-drag-and-drop-lists": "git+https://git@github.com/ansible/angular-drag-and-drop-lists#v1.4.1", "angular-duration-format": "^1.0.1", From 34dd034f7ca02b299578c68a839e4ce493c4858e Mon Sep 17 00:00:00 2001 From: mabashian Date: Mon, 15 Jun 2020 18:06:44 -0400 Subject: [PATCH 144/494] Upgrades to the latest angular-tz-extensions --- awx/ui/package-lock.json | 25 ++++++++----------------- awx/ui/package.json | 2 +- 2 files changed, 9 insertions(+), 18 deletions(-) diff --git a/awx/ui/package-lock.json b/awx/ui/package-lock.json index f55c54c56a..10774dc279 100644 --- a/awx/ui/package-lock.json +++ b/awx/ui/package-lock.json @@ -271,29 +271,20 @@ } }, "angular-tz-extensions": { - "version": "git+https://git@github.com/ansible/angular-tz-extensions.git#9cabb05d58079092bfb29ccae721b35b46f28af6", - "from": "git+https://git@github.com/ansible/angular-tz-extensions.git#v0.5.2", + "version": "git+https://git@github.com/ansible/angular-tz-extensions.git#5c594b5756d29637601020bba16274f10ee0ed65", + "from": "git+https://git@github.com/ansible/angular-tz-extensions.git#v0.6.1", "requires": { - "angular": "~1.6.6", + "angular": "^1.7.9", "angular-filters": "^1.1.2", - "jquery": "^3.1.0", + "jquery": "^3.5.1", "jstimezonedetect": "1.0.5", - "timezone-js": "github:ansible/timezone-js#6937de14ce0c193961538bb5b3b12b7ef62a358f" + "timezone-js": "github:ansible/timezone-js#0.4.14" }, "dependencies": { - "angular": { - "version": "1.6.10", - "resolved": "https://registry.npmjs.org/angular/-/angular-1.6.10.tgz", - "integrity": "sha512-PCZ5/hVdvPQiYyH0VwsPjrErPHRcITnaXxhksceOXgtJeesKHLA7KDu4X/yvcAi+1zdGgGF+9pDxkJvghXI9Wg==" - }, "jquery": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/jquery/-/jquery-3.3.1.tgz", - "integrity": "sha512-Ubldcmxp5np52/ENotGxlLe6aGMvmF4R8S6tZjsP6Knsaxd/xp3Zrh50cG93lR6nPXyUFwzN3ZSOQI0wRJNdGg==" - }, - "timezone-js": { - "version": "github:ansible/timezone-js#6937de14ce0c193961538bb5b3b12b7ef62a358f", - "from": "github:ansible/timezone-js#6937de14ce0c193961538bb5b3b12b7ef62a358f" + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/jquery/-/jquery-3.5.1.tgz", + "integrity": "sha512-XwIBPqcMn57FxfT+Go5pzySnm4KWkT1Tv7gjrpT1srtf8Weynl6R273VJ5GjkRb51IzMp5nbaPjJXMWeju2MKg==" } } }, diff --git a/awx/ui/package.json b/awx/ui/package.json index 28af9e0f4a..ffe76211f7 100644 --- a/awx/ui/package.json +++ b/awx/ui/package.json @@ -108,7 +108,7 @@ "angular-mousewheel": "^1.0.5", "angular-sanitize": "^1.7.9", "angular-scheduler": "git+https://git@github.com/ansible/angular-scheduler.git#v0.4.2", - "angular-tz-extensions": "git+https://git@github.com/ansible/angular-tz-extensions#v0.5.2", + "angular-tz-extensions": "git+https://git@github.com/ansible/angular-tz-extensions.git#v0.6.1", "angular-xeditable": "~0.8.0", "ansi-to-html": "^0.6.3", "babel-polyfill": "^6.26.0", From 5e3ce7b7eacd1a4699377eea0dafd89f83ab3bed Mon Sep 17 00:00:00 2001 From: mabashian Date: Tue, 16 Jun 2020 10:27:28 -0400 Subject: [PATCH 145/494] Update ngToast to most recent version --- awx/ui/package-lock.json | 20 ++++---------------- awx/ui/package.json | 2 +- 2 files changed, 5 insertions(+), 17 deletions(-) diff --git a/awx/ui/package-lock.json b/awx/ui/package-lock.json index 10774dc279..70c42991ab 100644 --- a/awx/ui/package-lock.json +++ b/awx/ui/package-lock.json @@ -9458,23 +9458,11 @@ "dev": true }, "ng-toast": { - "version": "git+https://git@github.com/ansible/ngToast.git#2c2038381d5cfcab26fdefe98e0408a52e71daa5", - "from": "git+https://git@github.com/ansible/ngToast.git#v2.1.1", + "version": "git+https://git@github.com/ansible/ngToast.git#4550efb9cf7fb90c9bfbd9476f1fe650aaad5b6e", + "from": "git+https://git@github.com/ansible/ngToast.git#v2.2.1", "requires": { - "angular": "~1.6.6", - "angular-sanitize": "~1.6.6" - }, - "dependencies": { - "angular": { - "version": "1.6.10", - "resolved": "https://registry.npmjs.org/angular/-/angular-1.6.10.tgz", - "integrity": "sha512-PCZ5/hVdvPQiYyH0VwsPjrErPHRcITnaXxhksceOXgtJeesKHLA7KDu4X/yvcAi+1zdGgGF+9pDxkJvghXI9Wg==" - }, - "angular-sanitize": { - "version": "1.6.10", - "resolved": "https://registry.npmjs.org/angular-sanitize/-/angular-sanitize-1.6.10.tgz", - "integrity": "sha512-01i1Xoq9ykUrsoYQMSB6dWZmPp9Df5hfCqMAGGzJBWZ7L2WY0OtUphdI0YvR8ZF9lAsWtGNtsEFilObjq5nTgQ==" - } + "angular": "^1.7.9", + "angular-sanitize": "^1.7.9" } }, "ngtemplate-loader": { diff --git a/awx/ui/package.json b/awx/ui/package.json index ffe76211f7..b4a94fd4e4 100644 --- a/awx/ui/package.json +++ b/awx/ui/package.json @@ -130,7 +130,7 @@ "lr-infinite-scroll": "git+https://git@github.com/lorenzofox3/lrInfiniteScroll", "mathjs": "^3.15.0", "moment": "^2.19.4", - "ng-toast": "git+https://git@github.com/ansible/ngToast#v2.1.1", + "ng-toast": "git+https://git@github.com/ansible/ngToast.git#v2.2.1", "nvd3": "^1.8.6", "popper.js": "~1.14.4", "reconnectingwebsocket": "^1.0.0", From 781b6758375731bb188828b0c96b33404fb0ca31 Mon Sep 17 00:00:00 2001 From: mabashian Date: Tue, 16 Jun 2020 12:39:48 -0400 Subject: [PATCH 146/494] Upgrade jquery to 3.5.1 --- awx/ui/client/src/shared/form-generator.js | 14 ++-- .../list-generator/list-generator.factory.js | 2 +- .../job_templates/job-template.form.js | 2 +- awx/ui/client/src/templates/workflows.form.js | 2 +- awx/ui/package-lock.json | 77 +++++++++---------- awx/ui/package.json | 4 +- 6 files changed, 47 insertions(+), 54 deletions(-) diff --git a/awx/ui/client/src/shared/form-generator.js b/awx/ui/client/src/shared/form-generator.js index b5f515537b..fca3a419f2 100644 --- a/awx/ui/client/src/shared/form-generator.js +++ b/awx/ui/client/src/shared/form-generator.js @@ -589,7 +589,7 @@ angular.module('FormGenerator', [GeneratorHelpers.name, 'Utilities', listGenerat html += (field.readonly) ? "disabled " : ""; html += (field.ngChange) ? "ng-change=\"" +field.ngChange + "\" " : ""; html += (field.ngDisabled) ? "ng-disabled=\"" + field.ngDisabled + "\" " : ""; - html += `>${field.label}`; + html += `/>${field.label}`; html += (field.awPopOver) ? Attr(field, 'awPopOver', fld) : ""; html += ``; @@ -781,7 +781,7 @@ angular.module('FormGenerator', [GeneratorHelpers.name, 'Utilities', listGenerat html += field.awRequiredWhen.alwaysShowAsterisk ? "data-awrequired-always-show-asterisk=true " : ""; } html += (field.awValidUrl) ? "aw-valid-url " : ""; - html += ">\n"; + html += "/>\n"; } if (field.clear) { @@ -806,7 +806,7 @@ angular.module('FormGenerator', [GeneratorHelpers.name, 'Utilities', listGenerat data-placement="top" id="${this.form.name}_${fld}_gen_btn" > - + `; const genHashButtonTemplate = _.get(field, 'genHashButtonTemplate', defaultGenHashButtonTemplate); @@ -900,7 +900,7 @@ angular.module('FormGenerator', [GeneratorHelpers.name, 'Utilities', listGenerat html += field.awRequiredWhen.alwaysShowAsterisk ? "data-awrequired-always-show-asterisk=true " : ""; } html += (field.awValidUrl) ? "aw-valid-url " : ""; - html += ">\n"; + html += "/>\n"; } html += "\n"; @@ -1117,7 +1117,7 @@ angular.module('FormGenerator', [GeneratorHelpers.name, 'Utilities', listGenerat html += field.awRequiredWhen.reqExpression ? "aw-required-when=\"" + field.awRequiredWhen.reqExpression + "\" " : ""; html += field.awRequiredWhen.alwaysShowAsterisk ? "data-awrequired-always-show-asterisk=true " : ""; } - html += " >\n"; + html += " />\n"; // Add error messages if (field.required) { @@ -1219,7 +1219,7 @@ angular.module('FormGenerator', [GeneratorHelpers.name, 'Utilities', listGenerat html += (field.ngChange) ? Attr(field, 'ngChange') : ""; html += (field.readonly) ? "disabled " : ""; html += (field.ngDisabled) ? Attr(field, 'ngDisabled') : ""; - html += " > "; + html += " /> "; html += field.label; html += "\n"; html += "
'; } else { if (options.input_type === "radio") { //added by JT so that lookup forms can be either radio inputs or check box inputs - innerTable += ``; + innerTable += ``; } else { // its assumed that options.input_type = checkbox innerTable += " - + `, diff --git a/awx/ui/client/src/templates/workflows.form.js b/awx/ui/client/src/templates/workflows.form.js index d0acdf134a..f65f08f353 100644 --- a/awx/ui/client/src/templates/workflows.form.js +++ b/awx/ui/client/src/templates/workflows.form.js @@ -218,7 +218,7 @@ export default ['NotificationsList', 'i18n', function(NotificationsList, i18n) { data-placement="top" id="workflow_job_template_webhook_key_gen_btn" > - + `, diff --git a/awx/ui/package-lock.json b/awx/ui/package-lock.json index 70c42991ab..27bdc044cc 100644 --- a/awx/ui/package-lock.json +++ b/awx/ui/package-lock.json @@ -241,12 +241,12 @@ "integrity": "sha512-nB/xe7JQWF9nLvhHommAICQ3eWrfRETo0EVGFESi952CDzDa+GAJ/2BFBNw44QqQPxj1Xua/uYKrbLsOGWZdbQ==" }, "angular-scheduler": { - "version": "git+https://git@github.com/ansible/angular-scheduler.git#f9595a06db0e08d426b144a6fbe68f3966b77ae4", - "from": "git+https://git@github.com/ansible/angular-scheduler.git#v0.4.2", + "version": "git+https://git@github.com/ansible/angular-scheduler.git#6a2d33b06b1143e7449c4427f222fd05559f3a23", + "from": "git+https://git@github.com/ansible/angular-scheduler.git#v0.4.3", "requires": { "angular": "^1.7.9", - "angular-tz-extensions": "github:ansible/angular-tz-extensions", - "jquery": "*", + "angular-tz-extensions": "github:ansible/angular-tz-extensions#5c594b5756d29637601020bba16274f10ee0ed65", + "jquery": "^3.5.1", "jquery-ui": "*", "lodash": "^4.17.15", "moment": "^2.10.2", @@ -254,16 +254,21 @@ }, "dependencies": { "angular-tz-extensions": { - "version": "github:ansible/angular-tz-extensions#fc60660f43ee9ff84da94ca71ab27ef0c20fd77d", + "version": "github:ansible/angular-tz-extensions#5c594b5756d29637601020bba16274f10ee0ed65", "from": "github:ansible/angular-tz-extensions", "requires": { - "angular": "~1.7.2", + "angular": "^1.7.9", "angular-filters": "^1.1.2", - "jquery": "^3.1.0", + "jquery": "^3.5.1", "jstimezonedetect": "1.0.5", - "timezone-js": "github:ansible/timezone-js#0.4.14" + "timezone-js": "github:ansible/timezone-js#6937de14ce0c193961538bb5b3b12b7ef62a358f" } }, + "jquery": { + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/jquery/-/jquery-3.5.1.tgz", + "integrity": "sha512-XwIBPqcMn57FxfT+Go5pzySnm4KWkT1Tv7gjrpT1srtf8Weynl6R273VJ5GjkRb51IzMp5nbaPjJXMWeju2MKg==" + }, "rrule": { "version": "github:jkbrzt/rrule#4ff63b2f8524fd6d5ba6e80db770953b5cd08a0c", "from": "github:jkbrzt/rrule#4ff63b2f8524fd6d5ba6e80db770953b5cd08a0c" @@ -278,7 +283,7 @@ "angular-filters": "^1.1.2", "jquery": "^3.5.1", "jstimezonedetect": "1.0.5", - "timezone-js": "github:ansible/timezone-js#0.4.14" + "timezone-js": "github:ansible/timezone-js#6937de14ce0c193961538bb5b3b12b7ef62a358f" }, "dependencies": { "jquery": { @@ -1798,7 +1803,6 @@ "resolved": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz", "integrity": "sha1-OciRjO/1eZ+D+UkqhI9iWt0Mdm8=", "dev": true, - "optional": true, "requires": { "hoek": "2.x.x" } @@ -1814,6 +1818,13 @@ "integrity": "sha512-213St/G8KT3mjs4qu4qwww74KWysMaIeqgq5OhrboZjIjemIpyuxlSo9FNNI5+KzpkkxkRRba+oewiRGV42B1A==", "requires": { "jquery": ">=1.7.1 <4.0.0" + }, + "dependencies": { + "jquery": { + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/jquery/-/jquery-3.5.1.tgz", + "integrity": "sha512-XwIBPqcMn57FxfT+Go5pzySnm4KWkT1Tv7gjrpT1srtf8Weynl6R273VJ5GjkRb51IzMp5nbaPjJXMWeju2MKg==" + } } }, "brace-expansion": { @@ -5363,8 +5374,7 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true, - "optional": true + "dev": true }, "aproba": { "version": "1.2.0", @@ -5388,15 +5398,13 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true, - "optional": true + "dev": true }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dev": true, - "optional": true, "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -5406,22 +5414,19 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", - "dev": true, - "optional": true + "dev": true }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true, - "optional": true + "dev": true }, "console-control-strings": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=", - "dev": true, - "optional": true + "dev": true }, "core-util-is": { "version": "1.0.2", @@ -5542,8 +5547,7 @@ "version": "2.0.3", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", - "dev": true, - "optional": true + "dev": true }, "ini": { "version": "1.3.5", @@ -5557,7 +5561,6 @@ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", "dev": true, - "optional": true, "requires": { "number-is-nan": "^1.0.0" } @@ -5574,7 +5577,6 @@ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", "dev": true, - "optional": true, "requires": { "brace-expansion": "^1.1.7" } @@ -5680,8 +5682,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", - "dev": true, - "optional": true + "dev": true }, "object-assign": { "version": "4.1.1", @@ -5695,7 +5696,6 @@ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", "dev": true, - "optional": true, "requires": { "wrappy": "1" } @@ -5791,8 +5791,7 @@ "version": "5.1.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==", - "dev": true, - "optional": true + "dev": true }, "safer-buffer": { "version": "2.1.2", @@ -5834,7 +5833,6 @@ "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", "dev": true, - "optional": true, "requires": { "code-point-at": "^1.0.0", "is-fullwidth-code-point": "^1.0.0", @@ -5856,7 +5854,6 @@ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", "dev": true, - "optional": true, "requires": { "ansi-regex": "^2.0.0" } @@ -5889,8 +5886,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true, - "optional": true + "dev": true } } }, @@ -6637,8 +6633,7 @@ "version": "2.16.3", "resolved": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz", "integrity": "sha1-ILt0A9POo5jpHcRxCo/xuCdKJe0=", - "dev": true, - "optional": true + "dev": true }, "home-or-tmp": { "version": "2.0.0", @@ -7830,9 +7825,9 @@ "integrity": "sha1-+eIwPUUH9tdDVac2ZNFED7Wg71k=" }, "jquery": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/jquery/-/jquery-3.4.1.tgz", - "integrity": "sha512-36+AdBzCL+y6qjw5Tx7HgzeGCzC81MDDgaUP8ld2zhx58HdqXGoBd+tHdrBMiyjGQs0Hxs/MLZTu/eHNJJuWPw==" + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/jquery/-/jquery-3.5.1.tgz", + "integrity": "sha512-XwIBPqcMn57FxfT+Go5pzySnm4KWkT1Tv7gjrpT1srtf8Weynl6R273VJ5GjkRb51IzMp5nbaPjJXMWeju2MKg==" }, "jquery-mousewheel": { "version": "3.1.13", @@ -9136,7 +9131,6 @@ "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.3.5.tgz", "integrity": "sha512-Gi1W4k059gyRbyVUZQ4mEqLm0YIUiGYfvxhF6SIlk3ui1WVxMTGfGdQ2SInh3PDrRTVvPKgULkpJtT4RH10+VA==", "dev": true, - "optional": true, "requires": { "safe-buffer": "^5.1.2", "yallist": "^3.0.0" @@ -9146,8 +9140,7 @@ "version": "3.0.3", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", "integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==", - "dev": true, - "optional": true + "dev": true } } }, diff --git a/awx/ui/package.json b/awx/ui/package.json index b4a94fd4e4..1f6b603665 100644 --- a/awx/ui/package.json +++ b/awx/ui/package.json @@ -107,7 +107,7 @@ "angular-moment": "^1.3.0", "angular-mousewheel": "^1.0.5", "angular-sanitize": "^1.7.9", - "angular-scheduler": "git+https://git@github.com/ansible/angular-scheduler.git#v0.4.2", + "angular-scheduler": "git+https://git@github.com/ansible/angular-scheduler.git#v0.4.3", "angular-tz-extensions": "git+https://git@github.com/ansible/angular-tz-extensions.git#v0.6.1", "angular-xeditable": "~0.8.0", "ansi-to-html": "^0.6.3", @@ -122,7 +122,7 @@ "html-entities": "^1.2.1", "inherits": "^1.0.2", "javascript-detect-element-resize": "^0.5.3", - "jquery": "^3.4.1", + "jquery": "^3.5.1", "jquery-ui": "^1.12.1", "js-yaml": "^3.13.1", "legacy-loader": "0.0.2", From 93498b2d7f1d2597be764aab8d66c1cc25bdd7e3 Mon Sep 17 00:00:00 2001 From: ansible-translation-bot Date: Fri, 12 Jun 2020 17:03:37 +0000 Subject: [PATCH 147/494] UI translation strings for release_3.7.1 branch --- awx/locale/fr/LC_MESSAGES/django.po | 364 ++++++++++++++-------------- awx/locale/ja/LC_MESSAGES/django.po | 364 ++++++++++++++-------------- awx/locale/zh/LC_MESSAGES/django.po | 364 ++++++++++++++-------------- awx/ui/po/fr.po | 25 +- awx/ui/po/ja.po | 24 +- awx/ui/po/zh.po | 27 +-- 6 files changed, 582 insertions(+), 586 deletions(-) diff --git a/awx/locale/fr/LC_MESSAGES/django.po b/awx/locale/fr/LC_MESSAGES/django.po index 2e07a6232a..62c2ba7292 100644 --- a/awx/locale/fr/LC_MESSAGES/django.po +++ b/awx/locale/fr/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2020-04-27 13:55+0000\n" +"POT-Creation-Date: 2020-05-28 21:45+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -528,7 +528,7 @@ msgstr "Le projet de modèle d'inventaire est manquant ou non défini." msgid "Unknown, job may have been ran before launch configurations were saved." msgstr "Inconnu, il se peut que le job ait été exécuté avant que les configurations de lancement ne soient sauvegardées." -#: awx/api/serializers.py:3252 awx/main/tasks.py:2795 awx/main/tasks.py:2813 +#: awx/api/serializers.py:3252 awx/main/tasks.py:2800 awx/main/tasks.py:2818 msgid "{} are prohibited from use in ad hoc commands." msgstr "{} ne sont pas autorisés à utiliser les commandes ad hoc." @@ -547,324 +547,324 @@ msgstr "La variable fournie {} n'a pas de valeur de base de données de remplaç msgid "\"$encrypted$ is a reserved keyword, may not be used for {}.\"" msgstr "\"$encrypted$ est un mot clé réservé et ne peut pas être utilisé comme {}.\"" -#: awx/api/serializers.py:4070 +#: awx/api/serializers.py:4078 msgid "A project is required to run a job." msgstr "Un projet est nécessaire pour exécuter une tâche." -#: awx/api/serializers.py:4072 +#: awx/api/serializers.py:4080 msgid "Missing a revision to run due to failed project update." msgstr "Une révision n'a pas été exécutée en raison de l'échec de la mise à jour du projet." -#: awx/api/serializers.py:4076 +#: awx/api/serializers.py:4084 msgid "The inventory associated with this Job Template is being deleted." msgstr "L'inventaire associé à ce modèle de tâche est en cours de suppression." -#: awx/api/serializers.py:4078 awx/api/serializers.py:4194 +#: awx/api/serializers.py:4086 awx/api/serializers.py:4202 msgid "The provided inventory is being deleted." msgstr "L'inventaire fourni est en cours de suppression." -#: awx/api/serializers.py:4086 +#: awx/api/serializers.py:4094 msgid "Cannot assign multiple {} credentials." msgstr "Ne peut pas attribuer plusieurs identifiants {}." -#: awx/api/serializers.py:4090 +#: awx/api/serializers.py:4098 msgid "Cannot assign a Credential of kind `{}`" msgstr "Ne peut pas attribuer d'information d'identification de type `{}`" -#: awx/api/serializers.py:4103 +#: awx/api/serializers.py:4111 msgid "" "Removing {} credential at launch time without replacement is not supported. " "Provided list lacked credential(s): {}." msgstr "Le retrait des identifiants {} au moment du lancement sans procurer de valeurs de remplacement n'est pas pris en charge. La liste fournie manquait d'identifiant(s): {}." -#: awx/api/serializers.py:4192 +#: awx/api/serializers.py:4200 msgid "The inventory associated with this Workflow is being deleted." msgstr "L'inventaire associé à ce flux de travail est en cours de suppression." -#: awx/api/serializers.py:4263 +#: awx/api/serializers.py:4271 msgid "Message type '{}' invalid, must be either 'message' or 'body'" msgstr "Type de message '{}' invalide, doit être soit 'message' soit 'body'" -#: awx/api/serializers.py:4269 +#: awx/api/serializers.py:4277 msgid "Expected string for '{}', found {}, " msgstr "Chaîne attendue pour '{}', trouvé {}, " -#: awx/api/serializers.py:4273 +#: awx/api/serializers.py:4281 msgid "Messages cannot contain newlines (found newline in {} event)" msgstr "Les messages ne peuvent pas contenir de nouvelles lignes (trouvé nouvelle ligne dans l'événement {})" -#: awx/api/serializers.py:4279 +#: awx/api/serializers.py:4287 msgid "Expected dict for 'messages' field, found {}" msgstr "Dict attendu pour le champ 'messages', trouvé {}" -#: awx/api/serializers.py:4283 +#: awx/api/serializers.py:4291 msgid "" "Event '{}' invalid, must be one of 'started', 'success', 'error', or " "'workflow_approval'" msgstr "L'événement '{}' est invalide, il doit être de type 'started', 'success', 'error' ou 'workflow_approval'" -#: awx/api/serializers.py:4289 +#: awx/api/serializers.py:4297 msgid "Expected dict for event '{}', found {}" msgstr "Dict attendu pour l'événement '{}', trouvé {}" -#: awx/api/serializers.py:4294 +#: awx/api/serializers.py:4302 msgid "" "Workflow Approval event '{}' invalid, must be one of 'running', 'approved', " "'timed_out', or 'denied'" msgstr "L'événement d'approbation de workflow '{}' n'est pas valide, il doit être 'running', 'approved', 'timed_out' ou 'denied'" -#: awx/api/serializers.py:4301 +#: awx/api/serializers.py:4309 msgid "Expected dict for workflow approval event '{}', found {}" msgstr "Dict attendu pour l'événement d'approbation du workflow '{}', trouvé {}" -#: awx/api/serializers.py:4328 +#: awx/api/serializers.py:4336 msgid "Unable to render message '{}': {}" msgstr "Impossible de rendre le message '{}' : {}" -#: awx/api/serializers.py:4330 +#: awx/api/serializers.py:4338 msgid "Field '{}' unavailable" msgstr "Champ '{}' non disponible" -#: awx/api/serializers.py:4332 +#: awx/api/serializers.py:4340 msgid "Security error due to field '{}'" msgstr "Erreur de sécurité due au champ '{}'" -#: awx/api/serializers.py:4352 +#: awx/api/serializers.py:4360 msgid "Webhook body for '{}' should be a json dictionary. Found type '{}'." msgstr "Le corps du webhook pour '{}' doit être un dictionnaire json. Trouvé le type '{}'." -#: awx/api/serializers.py:4355 +#: awx/api/serializers.py:4363 msgid "Webhook body for '{}' is not a valid json dictionary ({})." msgstr "Le corps du webhook pour '{}' n'est pas un dictionnaire json valide ({})." -#: awx/api/serializers.py:4373 +#: awx/api/serializers.py:4381 msgid "" "Missing required fields for Notification Configuration: notification_type" msgstr "Champs obligatoires manquants pour la configuration des notifications : notification_type" -#: awx/api/serializers.py:4400 +#: awx/api/serializers.py:4408 msgid "No values specified for field '{}'" msgstr "Aucune valeur spécifiée pour le champ '{}'" -#: awx/api/serializers.py:4405 +#: awx/api/serializers.py:4413 msgid "HTTP method must be either 'POST' or 'PUT'." msgstr "La méthode HTTP doit être soit 'POST' soit 'PUT'." -#: awx/api/serializers.py:4407 +#: awx/api/serializers.py:4415 msgid "Missing required fields for Notification Configuration: {}." msgstr "Champs obligatoires manquants pour la configuration des notifications : {}." -#: awx/api/serializers.py:4410 +#: awx/api/serializers.py:4418 msgid "Configuration field '{}' incorrect type, expected {}." msgstr "Type de champ de configuration '{}' incorrect, {} attendu." -#: awx/api/serializers.py:4427 +#: awx/api/serializers.py:4435 msgid "Notification body" msgstr "Corps de notification" -#: awx/api/serializers.py:4507 +#: awx/api/serializers.py:4515 msgid "" "Valid DTSTART required in rrule. Value should start with: DTSTART:" "YYYYMMDDTHHMMSSZ" msgstr "DTSTART valide obligatoire dans rrule. La valeur doit commencer par : DTSTART:YYYYMMDDTHHMMSSZ" -#: awx/api/serializers.py:4509 +#: awx/api/serializers.py:4517 msgid "" "DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ." msgstr "DTSTART ne peut correspondre à une DateHeure naïve. Spécifier ;TZINFO= ou YYYYMMDDTHHMMSSZZ." -#: awx/api/serializers.py:4511 +#: awx/api/serializers.py:4519 msgid "Multiple DTSTART is not supported." msgstr "Une seule valeur DTSTART est prise en charge." -#: awx/api/serializers.py:4513 +#: awx/api/serializers.py:4521 msgid "RRULE required in rrule." msgstr "RRULE obligatoire dans rrule." -#: awx/api/serializers.py:4515 +#: awx/api/serializers.py:4523 msgid "Multiple RRULE is not supported." msgstr "Une seule valeur RRULE est prise en charge." -#: awx/api/serializers.py:4517 +#: awx/api/serializers.py:4525 msgid "INTERVAL required in rrule." msgstr "INTERVAL obligatoire dans rrule." -#: awx/api/serializers.py:4519 +#: awx/api/serializers.py:4527 msgid "SECONDLY is not supported." msgstr "SECONDLY n'est pas pris en charge." -#: awx/api/serializers.py:4521 +#: awx/api/serializers.py:4529 msgid "Multiple BYMONTHDAYs not supported." msgstr "Une seule valeur BYMONTHDAY est prise en charge." -#: awx/api/serializers.py:4523 +#: awx/api/serializers.py:4531 msgid "Multiple BYMONTHs not supported." msgstr "Une seule valeur BYMONTH est prise en charge." -#: awx/api/serializers.py:4525 +#: awx/api/serializers.py:4533 msgid "BYDAY with numeric prefix not supported." msgstr "BYDAY avec un préfixe numérique non pris en charge." -#: awx/api/serializers.py:4527 +#: awx/api/serializers.py:4535 msgid "BYYEARDAY not supported." msgstr "BYYEARDAY non pris en charge." -#: awx/api/serializers.py:4529 +#: awx/api/serializers.py:4537 msgid "BYWEEKNO not supported." msgstr "BYWEEKNO non pris en charge." -#: awx/api/serializers.py:4531 +#: awx/api/serializers.py:4539 msgid "RRULE may not contain both COUNT and UNTIL" msgstr "RRULE peut contenir à la fois COUNT et UNTIL" -#: awx/api/serializers.py:4535 +#: awx/api/serializers.py:4543 msgid "COUNT > 999 is unsupported." msgstr "COUNT > 999 non pris en charge." -#: awx/api/serializers.py:4541 +#: awx/api/serializers.py:4549 msgid "rrule parsing failed validation: {}" msgstr "L'analyse rrule n'a pas pu être validée : {}" -#: awx/api/serializers.py:4603 +#: awx/api/serializers.py:4611 msgid "Inventory Source must be a cloud resource." msgstr "La source d'inventaire doit être une ressource cloud." -#: awx/api/serializers.py:4605 +#: awx/api/serializers.py:4613 msgid "Manual Project cannot have a schedule set." msgstr "Le projet manuel ne peut pas avoir de calendrier défini." -#: awx/api/serializers.py:4608 +#: awx/api/serializers.py:4616 msgid "" "Inventory sources with `update_on_project_update` cannot be scheduled. " "Schedule its source project `{}` instead." msgstr "Impossible de planifier les sources d'inventaire avec `update_on_project_update`. Planifiez plutôt son projet source`{}`." -#: awx/api/serializers.py:4618 +#: awx/api/serializers.py:4626 msgid "" "Count of jobs in the running or waiting state that are targeted for this " "instance" msgstr "Le nombre de jobs en cours d'exécution ou en attente qui sont ciblés pour cette instance." -#: awx/api/serializers.py:4623 +#: awx/api/serializers.py:4631 msgid "Count of all jobs that target this instance" msgstr "Le nombre de jobs qui ciblent cette instance." -#: awx/api/serializers.py:4656 +#: awx/api/serializers.py:4664 msgid "" "Count of jobs in the running or waiting state that are targeted for this " "instance group" msgstr "Le nombre de jobs en cours d'exécution ou en attente qui sont ciblés pour ce groupe d'instances." -#: awx/api/serializers.py:4661 +#: awx/api/serializers.py:4669 msgid "Count of all jobs that target this instance group" msgstr "Le nombre de jobs qui ciblent ce groupe d'instances" -#: awx/api/serializers.py:4666 +#: awx/api/serializers.py:4674 msgid "Indicates whether instance group controls any other group" msgstr "Indique si le groupe d'instances contrôle un autre groupe" -#: awx/api/serializers.py:4670 +#: awx/api/serializers.py:4678 msgid "" "Indicates whether instances in this group are isolated.Isolated groups have " "a designated controller group." msgstr "Indique si les instances de ce groupe sont isolées. Les groupes isolés ont un groupe de contrôleurs désigné." -#: awx/api/serializers.py:4675 +#: awx/api/serializers.py:4683 msgid "" "Indicates whether instances in this group are containerized.Containerized " "groups have a designated Openshift or Kubernetes cluster." msgstr "Indique si les instances de ce groupe sont conteneurisées. Les groupes conteneurisés ont un groupe Openshift ou Kubernetes désigné." -#: awx/api/serializers.py:4683 +#: awx/api/serializers.py:4691 msgid "Policy Instance Percentage" msgstr "Pourcentage d'instances de stratégie" -#: awx/api/serializers.py:4684 +#: awx/api/serializers.py:4692 msgid "" "Minimum percentage of all instances that will be automatically assigned to " "this group when new instances come online." msgstr "Le pourcentage minimum de toutes les instances qui seront automatiquement assignées à ce groupe lorsque de nouvelles instances seront mises en ligne." -#: awx/api/serializers.py:4689 +#: awx/api/serializers.py:4697 msgid "Policy Instance Minimum" msgstr "Instances de stratégies minimum" -#: awx/api/serializers.py:4690 +#: awx/api/serializers.py:4698 msgid "" "Static minimum number of Instances that will be automatically assign to this " "group when new instances come online." msgstr "Nombre minimum statique d'instances qui seront automatiquement assignées à ce groupe lors de la mise en ligne de nouvelles instances." -#: awx/api/serializers.py:4695 +#: awx/api/serializers.py:4703 msgid "Policy Instance List" msgstr "Listes d'instances de stratégie" -#: awx/api/serializers.py:4696 +#: awx/api/serializers.py:4704 msgid "List of exact-match Instances that will be assigned to this group" msgstr "Liste des cas de concordance exacte qui seront assignés à ce groupe." -#: awx/api/serializers.py:4722 +#: awx/api/serializers.py:4730 msgid "Duplicate entry {}." msgstr "Entrée dupliquée {}." -#: awx/api/serializers.py:4724 +#: awx/api/serializers.py:4732 msgid "{} is not a valid hostname of an existing instance." msgstr "{} n'est pas un nom d'hôte valide d'instance existante." -#: awx/api/serializers.py:4726 awx/api/views/mixin.py:98 +#: awx/api/serializers.py:4734 awx/api/views/mixin.py:98 msgid "" "Isolated instances may not be added or removed from instances groups via the " "API." msgstr "Des instances isolées ne peuvent pas être ajoutées ou supprimées de groupes d'instances via l'API." -#: awx/api/serializers.py:4728 awx/api/views/mixin.py:102 +#: awx/api/serializers.py:4736 awx/api/views/mixin.py:102 msgid "Isolated instance group membership may not be managed via the API." msgstr "L'appartenance à un groupe d'instances isolées n'est sans doute pas gérée par l'API." -#: awx/api/serializers.py:4730 awx/api/serializers.py:4735 -#: awx/api/serializers.py:4740 +#: awx/api/serializers.py:4738 awx/api/serializers.py:4743 +#: awx/api/serializers.py:4748 msgid "Containerized instances may not be managed via the API" msgstr "Les instances conteneurisées ne peuvent pas être gérées via l'API" -#: awx/api/serializers.py:4745 +#: awx/api/serializers.py:4753 msgid "tower instance group name may not be changed." msgstr "Le nom de groupe de l'instance Tower ne peut pas être modifié." -#: awx/api/serializers.py:4750 +#: awx/api/serializers.py:4758 msgid "Only Kubernetes credentials can be associated with an Instance Group" msgstr "Seuls les identifiants Kubernetes peuvent être associés à un groupe d'instances" -#: awx/api/serializers.py:4789 +#: awx/api/serializers.py:4797 msgid "" "When present, shows the field name of the role or relationship that changed." msgstr "Le cas échéant, affiche le nom de champ du rôle ou de la relation qui a changé." -#: awx/api/serializers.py:4791 +#: awx/api/serializers.py:4799 msgid "" "When present, shows the model on which the role or relationship was defined." msgstr "Le cas échéant, affiche le modèle sur lequel le rôle ou la relation a été défini." -#: awx/api/serializers.py:4824 +#: awx/api/serializers.py:4832 msgid "" "A summary of the new and changed values when an object is created, updated, " "or deleted" msgstr "Un récapitulatif des valeurs nouvelles et modifiées lorsqu'un objet est créé, mis à jour ou supprimé" -#: awx/api/serializers.py:4826 +#: awx/api/serializers.py:4834 msgid "" "For create, update, and delete events this is the object type that was " "affected. For associate and disassociate events this is the object type " "associated or disassociated with object2." msgstr "Pour créer, mettre à jour et supprimer des événements, il s'agit du type d'objet qui a été affecté. Pour associer et dissocier des événements, il s'agit du type d'objet associé à ou dissocié de object2." -#: awx/api/serializers.py:4829 +#: awx/api/serializers.py:4837 msgid "" "Unpopulated for create, update, and delete events. For associate and " "disassociate events this is the object type that object1 is being associated " "with." msgstr "Laisser vide pour créer, mettre à jour et supprimer des événements. Pour associer et dissocier des événements, il s'agit du type d'objet auquel object1 est associé." -#: awx/api/serializers.py:4832 +#: awx/api/serializers.py:4840 msgid "The action taken with respect to the given object(s)." msgstr "Action appliquée par rapport à l'objet ou aux objets donnés." @@ -1638,7 +1638,7 @@ msgstr "Exemple de paramètre" msgid "Example setting which can be different for each user." msgstr "Exemple de paramètre qui peut être différent pour chaque utilisateur." -#: awx/conf/conf.py:92 awx/conf/registry.py:81 awx/conf/views.py:55 +#: awx/conf/conf.py:92 awx/conf/registry.py:81 awx/conf/views.py:56 msgid "User" msgstr "Utilisateur" @@ -1741,15 +1741,15 @@ msgstr "Système" msgid "OtherSystem" msgstr "Autre Système" -#: awx/conf/views.py:47 +#: awx/conf/views.py:48 msgid "Setting Categories" msgstr "Catégories de paramètre" -#: awx/conf/views.py:69 +#: awx/conf/views.py:70 msgid "Setting Detail" msgstr "Détails du paramètre" -#: awx/conf/views.py:160 +#: awx/conf/views.py:162 msgid "Logging Connectivity Test" msgstr "Journalisation du test de connectivité" @@ -2795,7 +2795,7 @@ msgstr "URL Conjur" msgid "API Key" msgstr "Clé API" -#: awx/main/credential_plugins/conjur.py:28 awx/main/models/inventory.py:1017 +#: awx/main/credential_plugins/conjur.py:28 awx/main/models/inventory.py:1018 msgid "Account" msgstr "Compte" @@ -2882,7 +2882,7 @@ msgid "" msgstr "Nom du backend secret (s'il est laissé vide, le premier segment du chemin secret sera utilisé)." #: awx/main/credential_plugins/hashivault.py:60 -#: awx/main/models/inventory.py:1022 +#: awx/main/models/inventory.py:1023 msgid "Key Name" msgstr "Nom de la clé" @@ -3259,7 +3259,7 @@ msgid "" "Management (IAM) users." msgstr "Le service de jeton de sécurité (STS) est un service Web qui permet de demander des informations d’identification provisoires avec des privilèges limités pour les utilisateurs d’AWS Identity and Access Management (IAM)." -#: awx/main/models/credential/__init__.py:780 awx/main/models/inventory.py:832 +#: awx/main/models/credential/__init__.py:780 awx/main/models/inventory.py:833 msgid "OpenStack" msgstr "OpenStack" @@ -3300,7 +3300,7 @@ msgstr "Les domaines OpenStack définissent les limites administratives. Ils son msgid "Verify SSL" msgstr "Vérifier SSL" -#: awx/main/models/credential/__init__.py:823 awx/main/models/inventory.py:829 +#: awx/main/models/credential/__init__.py:823 awx/main/models/inventory.py:830 msgid "VMware vCenter" msgstr "VMware vCenter" @@ -3313,7 +3313,7 @@ msgid "" "Enter the hostname or IP address that corresponds to your VMware vCenter." msgstr "Saisir le nom d’hôte ou l’adresse IP qui correspond à votre VMware vCenter." -#: awx/main/models/credential/__init__.py:849 awx/main/models/inventory.py:830 +#: awx/main/models/credential/__init__.py:849 awx/main/models/inventory.py:831 msgid "Red Hat Satellite 6" msgstr "Red Hat Satellite 6" @@ -3327,7 +3327,7 @@ msgid "" "example, https://satellite.example.org" msgstr "Veuillez saisir l’URL qui correspond à votre serveur Red Hat Satellite 6. Par exemple, https://satellite.example.org" -#: awx/main/models/credential/__init__.py:875 awx/main/models/inventory.py:831 +#: awx/main/models/credential/__init__.py:875 awx/main/models/inventory.py:832 msgid "Red Hat CloudForms" msgstr "Red Hat CloudForms" @@ -3341,7 +3341,7 @@ msgid "" "instance. For example, https://cloudforms.example.org" msgstr "Veuillez saisir l’URL de la machine virtuelle qui correspond à votre instance de CloudForm. Par exemple, https://cloudforms.example.org" -#: awx/main/models/credential/__init__.py:902 awx/main/models/inventory.py:827 +#: awx/main/models/credential/__init__.py:902 awx/main/models/inventory.py:828 msgid "Google Compute Engine" msgstr "Google Compute Engine" @@ -3370,7 +3370,7 @@ msgid "" "Paste the contents of the PEM file associated with the service account email." msgstr "Collez le contenu du fichier PEM associé à l’adresse électronique du compte de service." -#: awx/main/models/credential/__init__.py:936 awx/main/models/inventory.py:828 +#: awx/main/models/credential/__init__.py:936 awx/main/models/inventory.py:829 msgid "Microsoft Azure Resource Manager" msgstr "Microsoft Azure Resource Manager" @@ -3408,7 +3408,7 @@ msgstr "Jeton d'accès personnel GitLab" msgid "This token needs to come from your profile settings in GitLab" msgstr "Ce jeton doit provenir de vos paramètres de profil dans GitLab" -#: awx/main/models/credential/__init__.py:1041 awx/main/models/inventory.py:833 +#: awx/main/models/credential/__init__.py:1041 awx/main/models/inventory.py:834 msgid "Red Hat Virtualization" msgstr "Red Hat Virtualization" @@ -3424,7 +3424,7 @@ msgstr "Fichier CA" msgid "Absolute file path to the CA file to use (optional)" msgstr "Chemin d'accès absolu vers le fichier CA à utiliser (en option)" -#: awx/main/models/credential/__init__.py:1091 awx/main/models/inventory.py:834 +#: awx/main/models/credential/__init__.py:1091 awx/main/models/inventory.py:835 msgid "Ansible Tower" msgstr "Ansible Tower" @@ -3468,7 +3468,7 @@ msgstr "La source doit être une information d'identification externe" msgid "Input field must be defined on target credential (options are {})." msgstr "Le champ de saisie doit être défini sur des informations d'identification externes (les options sont {})." -#: awx/main/models/events.py:152 awx/main/models/events.py:655 +#: awx/main/models/events.py:152 awx/main/models/events.py:674 msgid "Host Failed" msgstr "Échec de l'hôte" @@ -3476,7 +3476,7 @@ msgstr "Échec de l'hôte" msgid "Host Started" msgstr "Hôte démarré" -#: awx/main/models/events.py:154 awx/main/models/events.py:656 +#: awx/main/models/events.py:154 awx/main/models/events.py:675 msgid "Host OK" msgstr "Hôte OK" @@ -3484,11 +3484,11 @@ msgstr "Hôte OK" msgid "Host Failure" msgstr "Échec de l'hôte" -#: awx/main/models/events.py:156 awx/main/models/events.py:662 +#: awx/main/models/events.py:156 awx/main/models/events.py:681 msgid "Host Skipped" msgstr "Hôte ignoré" -#: awx/main/models/events.py:157 awx/main/models/events.py:657 +#: awx/main/models/events.py:157 awx/main/models/events.py:676 msgid "Host Unreachable" msgstr "Hôte inaccessible" @@ -3572,27 +3572,27 @@ msgstr "Scène démarrée" msgid "Playbook Complete" msgstr "Playbook terminé" -#: awx/main/models/events.py:184 awx/main/models/events.py:672 +#: awx/main/models/events.py:184 awx/main/models/events.py:691 msgid "Debug" msgstr "Déboguer" -#: awx/main/models/events.py:185 awx/main/models/events.py:673 +#: awx/main/models/events.py:185 awx/main/models/events.py:692 msgid "Verbose" msgstr "Verbeux" -#: awx/main/models/events.py:186 awx/main/models/events.py:674 +#: awx/main/models/events.py:186 awx/main/models/events.py:693 msgid "Deprecated" msgstr "Obsolète" -#: awx/main/models/events.py:187 awx/main/models/events.py:675 +#: awx/main/models/events.py:187 awx/main/models/events.py:694 msgid "Warning" msgstr "Avertissement" -#: awx/main/models/events.py:188 awx/main/models/events.py:676 +#: awx/main/models/events.py:188 awx/main/models/events.py:695 msgid "System Warning" msgstr "Avertissement système" -#: awx/main/models/events.py:189 awx/main/models/events.py:677 +#: awx/main/models/events.py:189 awx/main/models/events.py:696 #: awx/main/models/unified_jobs.py:75 msgid "Error" msgstr "Erreur" @@ -3620,300 +3620,300 @@ msgid "" "this group" msgstr "Liste des cas de concordance exacte qui seront toujours assignés automatiquement à ce groupe." -#: awx/main/models/inventory.py:79 +#: awx/main/models/inventory.py:80 msgid "Hosts have a direct link to this inventory." msgstr "Les hôtes ont un lien direct vers cet inventaire." -#: awx/main/models/inventory.py:80 +#: awx/main/models/inventory.py:81 msgid "Hosts for inventory generated using the host_filter property." msgstr "Hôtes pour inventaire générés avec la propriété host_filter." -#: awx/main/models/inventory.py:85 +#: awx/main/models/inventory.py:86 msgid "inventories" msgstr "inventaires" -#: awx/main/models/inventory.py:92 +#: awx/main/models/inventory.py:93 msgid "Organization containing this inventory." msgstr "Organisation contenant cet inventaire." -#: awx/main/models/inventory.py:99 +#: awx/main/models/inventory.py:100 msgid "Inventory variables in JSON or YAML format." msgstr "Variables d'inventaire au format JSON ou YAML." -#: awx/main/models/inventory.py:104 +#: awx/main/models/inventory.py:105 msgid "" "This field is deprecated and will be removed in a future release. Flag " "indicating whether any hosts in this inventory have failed." msgstr "Ce champ est obsolète et sera supprimé dans une prochaine version. Indicateur signalant si des hôtes de cet inventaire ont échoué." -#: awx/main/models/inventory.py:110 +#: awx/main/models/inventory.py:111 msgid "" "This field is deprecated and will be removed in a future release. Total " "number of hosts in this inventory." msgstr "Ce champ est obsolète et sera supprimé dans une prochaine version. Nombre total d'hôtes dans cet inventaire." -#: awx/main/models/inventory.py:116 +#: awx/main/models/inventory.py:117 msgid "" "This field is deprecated and will be removed in a future release. Number of " "hosts in this inventory with active failures." msgstr "Ce champ est obsolète et sera supprimé dans une prochaine version. Nombre d'hôtes dans cet inventaire avec des échecs actifs." -#: awx/main/models/inventory.py:122 +#: awx/main/models/inventory.py:123 msgid "" "This field is deprecated and will be removed in a future release. Total " "number of groups in this inventory." msgstr "Ce champ est obsolète et sera supprimé dans une prochaine version. Nombre total de groupes dans cet inventaire." -#: awx/main/models/inventory.py:128 +#: awx/main/models/inventory.py:129 msgid "" "This field is deprecated and will be removed in a future release. Flag " "indicating whether this inventory has any external inventory sources." msgstr "Ce champ est obsolète et sera supprimé dans une prochaine version. Indicateur signalant si cet inventaire a des sources d’inventaire externes." -#: awx/main/models/inventory.py:134 +#: awx/main/models/inventory.py:135 msgid "" "Total number of external inventory sources configured within this inventory." msgstr "Nombre total de sources d'inventaire externes configurées dans cet inventaire." -#: awx/main/models/inventory.py:139 +#: awx/main/models/inventory.py:140 msgid "Number of external inventory sources in this inventory with failures." msgstr "Nombre total de sources d'inventaire externes en échec dans cet inventaire." -#: awx/main/models/inventory.py:146 +#: awx/main/models/inventory.py:147 msgid "Kind of inventory being represented." msgstr "Genre d'inventaire représenté." -#: awx/main/models/inventory.py:152 +#: awx/main/models/inventory.py:153 msgid "Filter that will be applied to the hosts of this inventory." msgstr "Filtre appliqué aux hôtes de cet inventaire." -#: awx/main/models/inventory.py:180 +#: awx/main/models/inventory.py:181 msgid "" "Credentials to be used by hosts belonging to this inventory when accessing " "Red Hat Insights API." msgstr "Informations d'identification à utiliser par les hôtes appartenant à cet inventaire lors de l'accès à l'API Red Hat Insights ." -#: awx/main/models/inventory.py:189 +#: awx/main/models/inventory.py:190 msgid "Flag indicating the inventory is being deleted." msgstr "Marqueur indiquant que cet inventaire est en cours de suppression." -#: awx/main/models/inventory.py:244 +#: awx/main/models/inventory.py:245 msgid "Could not parse subset as slice specification." msgstr "N'a pas pu traiter les sous-ensembles en tant que spécification de découpage." -#: awx/main/models/inventory.py:248 +#: awx/main/models/inventory.py:249 msgid "Slice number must be less than total number of slices." msgstr "Le nombre de tranches doit être inférieur au nombre total de tranches." -#: awx/main/models/inventory.py:250 +#: awx/main/models/inventory.py:251 msgid "Slice number must be 1 or higher." msgstr "Le nombre de tranches doit être 1 ou valeur supérieure." -#: awx/main/models/inventory.py:387 +#: awx/main/models/inventory.py:388 msgid "Assignment not allowed for Smart Inventory" msgstr "Attribution non autorisée pour un inventaire Smart" -#: awx/main/models/inventory.py:389 awx/main/models/projects.py:166 +#: awx/main/models/inventory.py:390 awx/main/models/projects.py:166 msgid "Credential kind must be 'insights'." msgstr "Le genre d'informations d'identification doit être 'insights'." -#: awx/main/models/inventory.py:474 +#: awx/main/models/inventory.py:475 msgid "Is this host online and available for running jobs?" msgstr "Cet hôte est-il en ligne et disponible pour exécuter des tâches ?" -#: awx/main/models/inventory.py:480 +#: awx/main/models/inventory.py:481 msgid "" "The value used by the remote inventory source to uniquely identify the host" msgstr "Valeur utilisée par la source d'inventaire distante pour identifier l'hôte de façon unique" -#: awx/main/models/inventory.py:485 +#: awx/main/models/inventory.py:486 msgid "Host variables in JSON or YAML format." msgstr "Variables d'hôte au format JSON ou YAML." -#: awx/main/models/inventory.py:508 +#: awx/main/models/inventory.py:509 msgid "Inventory source(s) that created or modified this host." msgstr "Sources d'inventaire qui ont créé ou modifié cet hôte." -#: awx/main/models/inventory.py:513 +#: awx/main/models/inventory.py:514 msgid "Arbitrary JSON structure of most recent ansible_facts, per-host." msgstr "Structure JSON arbitraire des faits ansible les plus récents, par hôte." -#: awx/main/models/inventory.py:519 +#: awx/main/models/inventory.py:520 msgid "The date and time ansible_facts was last modified." msgstr "Date et heure de la dernière modification apportée à ansible_facts." -#: awx/main/models/inventory.py:526 +#: awx/main/models/inventory.py:527 msgid "Red Hat Insights host unique identifier." msgstr "Identifiant unique de l'hôte de Red Hat Insights." -#: awx/main/models/inventory.py:640 +#: awx/main/models/inventory.py:641 msgid "Group variables in JSON or YAML format." msgstr "Variables de groupe au format JSON ou YAML." -#: awx/main/models/inventory.py:646 +#: awx/main/models/inventory.py:647 msgid "Hosts associated directly with this group." msgstr "Hôtes associés directement à ce groupe." -#: awx/main/models/inventory.py:652 +#: awx/main/models/inventory.py:653 msgid "Inventory source(s) that created or modified this group." msgstr "Sources d'inventaire qui ont créé ou modifié ce groupe." -#: awx/main/models/inventory.py:824 +#: awx/main/models/inventory.py:825 msgid "File, Directory or Script" msgstr "Fichier, répertoire ou script" -#: awx/main/models/inventory.py:825 +#: awx/main/models/inventory.py:826 msgid "Sourced from a Project" msgstr "Provenance d'un projet" -#: awx/main/models/inventory.py:826 +#: awx/main/models/inventory.py:827 msgid "Amazon EC2" msgstr "Amazon EC2" -#: awx/main/models/inventory.py:835 +#: awx/main/models/inventory.py:836 msgid "Custom Script" msgstr "Script personnalisé" -#: awx/main/models/inventory.py:952 +#: awx/main/models/inventory.py:953 msgid "Inventory source variables in YAML or JSON format." msgstr "Variables de source d'inventaire au format JSON ou YAML." -#: awx/main/models/inventory.py:963 +#: awx/main/models/inventory.py:964 msgid "" "Comma-separated list of filter expressions (EC2 only). Hosts are imported " "when ANY of the filters match." msgstr "Liste d'expressions de filtre séparées par des virgules (EC2 uniquement). Les hôtes sont importés lorsque l'UN des filtres correspondent." -#: awx/main/models/inventory.py:969 +#: awx/main/models/inventory.py:970 msgid "Limit groups automatically created from inventory source (EC2 only)." msgstr "Limiter automatiquement les groupes créés à partir de la source d'inventaire (EC2 uniquement)." -#: awx/main/models/inventory.py:973 +#: awx/main/models/inventory.py:974 msgid "Overwrite local groups and hosts from remote inventory source." msgstr "Écraser les groupes locaux et les hôtes de la source d'inventaire distante." -#: awx/main/models/inventory.py:977 +#: awx/main/models/inventory.py:978 msgid "Overwrite local variables from remote inventory source." msgstr "Écraser les variables locales de la source d'inventaire distante." -#: awx/main/models/inventory.py:982 awx/main/models/jobs.py:154 +#: awx/main/models/inventory.py:983 awx/main/models/jobs.py:154 #: awx/main/models/projects.py:135 msgid "The amount of time (in seconds) to run before the task is canceled." msgstr "Délai écoulé (en secondes) avant que la tâche ne soit annulée." -#: awx/main/models/inventory.py:1015 +#: awx/main/models/inventory.py:1016 msgid "Image ID" msgstr "ID d'image" -#: awx/main/models/inventory.py:1016 +#: awx/main/models/inventory.py:1017 msgid "Availability Zone" msgstr "Zone de disponibilité" -#: awx/main/models/inventory.py:1018 +#: awx/main/models/inventory.py:1019 msgid "Instance ID" msgstr "ID d'instance" -#: awx/main/models/inventory.py:1019 +#: awx/main/models/inventory.py:1020 msgid "Instance State" msgstr "État de l'instance" -#: awx/main/models/inventory.py:1020 +#: awx/main/models/inventory.py:1021 msgid "Platform" msgstr "Plateforme " -#: awx/main/models/inventory.py:1021 +#: awx/main/models/inventory.py:1022 msgid "Instance Type" msgstr "Type d'instance" -#: awx/main/models/inventory.py:1023 +#: awx/main/models/inventory.py:1024 msgid "Region" msgstr "Région" -#: awx/main/models/inventory.py:1024 +#: awx/main/models/inventory.py:1025 msgid "Security Group" msgstr "Groupe de sécurité" -#: awx/main/models/inventory.py:1025 +#: awx/main/models/inventory.py:1026 msgid "Tags" msgstr "Balises" -#: awx/main/models/inventory.py:1026 +#: awx/main/models/inventory.py:1027 msgid "Tag None" msgstr "Ne rien baliser" -#: awx/main/models/inventory.py:1027 +#: awx/main/models/inventory.py:1028 msgid "VPC ID" msgstr "ID VPC" -#: awx/main/models/inventory.py:1095 +#: awx/main/models/inventory.py:1096 #, python-format msgid "" "Cloud-based inventory sources (such as %s) require credentials for the " "matching cloud service." msgstr "Les sources d'inventaire cloud (telles que %s) requièrent des informations d'identification pour le service cloud correspondant." -#: awx/main/models/inventory.py:1101 +#: awx/main/models/inventory.py:1102 msgid "Credential is required for a cloud source." msgstr "Les informations d'identification sont requises pour une source cloud." -#: awx/main/models/inventory.py:1104 +#: awx/main/models/inventory.py:1105 msgid "" "Credentials of type machine, source control, insights and vault are " "disallowed for custom inventory sources." msgstr "Les identifiants de type machine, contrôle de la source, insights ou archivage sécurisé ne sont pas autorisés par les sources d'inventaire personnalisées." -#: awx/main/models/inventory.py:1109 +#: awx/main/models/inventory.py:1110 msgid "" "Credentials of type insights and vault are disallowed for scm inventory " "sources." msgstr "Les identifiants de type insights ou archivage sécurisé ne sont pas autorisés pour les sources d'inventaire scm." -#: awx/main/models/inventory.py:1169 +#: awx/main/models/inventory.py:1170 #, python-format msgid "Invalid %(source)s region: %(region)s" msgstr "Région %(source)s non valide : %(region)s" -#: awx/main/models/inventory.py:1193 +#: awx/main/models/inventory.py:1194 #, python-format msgid "Invalid filter expression: %(filter)s" msgstr "Expression de filtre non valide : %(filter)s" -#: awx/main/models/inventory.py:1214 +#: awx/main/models/inventory.py:1215 #, python-format msgid "Invalid group by choice: %(choice)s" msgstr "Choix de regroupement non valide : %(choice)s" -#: awx/main/models/inventory.py:1242 +#: awx/main/models/inventory.py:1243 msgid "Project containing inventory file used as source." msgstr "Projet contenant le fichier d'inventaire utilisé comme source." -#: awx/main/models/inventory.py:1415 +#: awx/main/models/inventory.py:1416 msgid "" "More than one SCM-based inventory source with update on project update per-" "inventory not allowed." msgstr "On n'autorise pas plus d'une source d'inventaire basé SCM avec mise à jour pré-inventaire ou mise à jour projet." -#: awx/main/models/inventory.py:1422 +#: awx/main/models/inventory.py:1423 msgid "" "Cannot update SCM-based inventory source on launch if set to update on " "project update. Instead, configure the corresponding source project to " "update on launch." msgstr "Impossible de mettre à jour une source d'inventaire SCM lors du lancement si elle est définie pour se mettre à jour lors de l'actualisation du projet. À la place, configurez le projet source correspondant pour qu'il se mette à jour au moment du lancement." -#: awx/main/models/inventory.py:1428 +#: awx/main/models/inventory.py:1429 msgid "Cannot set source_path if not SCM type." msgstr "Impossible de définir chemin_source si pas du type SCM." -#: awx/main/models/inventory.py:1471 +#: awx/main/models/inventory.py:1472 msgid "" "Inventory files from this Project Update were used for the inventory update." msgstr "Les fichiers d'inventaire de cette mise à jour de projet ont été utilisés pour la mise à jour de l'inventaire." -#: awx/main/models/inventory.py:1582 +#: awx/main/models/inventory.py:1583 msgid "Inventory script contents" msgstr "Contenus des scripts d'inventaire" -#: awx/main/models/inventory.py:1587 +#: awx/main/models/inventory.py:1588 msgid "Organization owning this inventory script" msgstr "Organisation propriétaire de ce script d'inventaire." @@ -4012,28 +4012,28 @@ msgstr "Inventaire appliqué en tant qu'invite, en supposant que le modèle de t msgid "job host summaries" msgstr "récapitulatifs des hôtes pour la tâche" -#: awx/main/models/jobs.py:1158 +#: awx/main/models/jobs.py:1144 msgid "Remove jobs older than a certain number of days" msgstr "Supprimer les tâches plus anciennes qu'un certain nombre de jours" -#: awx/main/models/jobs.py:1159 +#: awx/main/models/jobs.py:1145 msgid "Remove activity stream entries older than a certain number of days" msgstr "Supprimer les entrées du flux d'activité plus anciennes qu'un certain nombre de jours" -#: awx/main/models/jobs.py:1160 +#: awx/main/models/jobs.py:1146 msgid "Removes expired browser sessions from the database" msgstr "Supprime les sessions de navigateur expirées dans la base de données" -#: awx/main/models/jobs.py:1161 +#: awx/main/models/jobs.py:1147 msgid "Removes expired OAuth 2 access tokens and refresh tokens" msgstr "Supprime les jetons d'accès OAuth 2 et les jetons d’actualisation arrivés à expiration" -#: awx/main/models/jobs.py:1231 +#: awx/main/models/jobs.py:1217 #, python-brace-format msgid "Variables {list_of_keys} are not allowed for system jobs." msgstr "Les variables {list_of_keys} ne sont pas autorisées pour les tâches système." -#: awx/main/models/jobs.py:1247 +#: awx/main/models/jobs.py:1233 msgid "days must be a positive integer." msgstr "jours doit être un entier positif." @@ -4777,7 +4777,7 @@ msgstr "Aucun chemin de traitement des erreurs trouvé, flux de travail marqué msgid "The approval node {name} ({pk}) has expired after {timeout} seconds." msgstr "Le nœud d'approbation {name} ({pk}) a expiré après {timeout} secondes." -#: awx/main/tasks.py:1053 +#: awx/main/tasks.py:1049 msgid "Invalid virtual environment selected: {}" msgstr "Environnement virtuel non valide sélectionné : {}" @@ -4814,53 +4814,53 @@ msgstr "Aucun chemin de traitement des erreurs pour le ou les nœuds de tâche d msgid "Unable to convert \"%s\" to boolean" msgstr "Impossible de convertir \"%s\" en booléen" -#: awx/main/utils/common.py:275 +#: awx/main/utils/common.py:261 #, python-format msgid "Unsupported SCM type \"%s\"" msgstr "Type de SCM \"%s\" non pris en charge" -#: awx/main/utils/common.py:282 awx/main/utils/common.py:294 -#: awx/main/utils/common.py:313 +#: awx/main/utils/common.py:268 awx/main/utils/common.py:280 +#: awx/main/utils/common.py:299 #, python-format msgid "Invalid %s URL" msgstr "URL %s non valide" -#: awx/main/utils/common.py:284 awx/main/utils/common.py:323 +#: awx/main/utils/common.py:270 awx/main/utils/common.py:309 #, python-format msgid "Unsupported %s URL" msgstr "URL %s non prise en charge" -#: awx/main/utils/common.py:325 +#: awx/main/utils/common.py:311 #, python-format msgid "Unsupported host \"%s\" for file:// URL" msgstr "Hôte \"%s\" non pris en charge pour le fichier ://URL" -#: awx/main/utils/common.py:327 +#: awx/main/utils/common.py:313 #, python-format msgid "Host is required for %s URL" msgstr "L'hôte est requis pour l'URL %s" -#: awx/main/utils/common.py:345 +#: awx/main/utils/common.py:331 #, python-format msgid "Username must be \"git\" for SSH access to %s." msgstr "Le nom d'utilisateur doit être \"git\" pour l'accès SSH à %s." -#: awx/main/utils/common.py:351 +#: awx/main/utils/common.py:337 #, python-format msgid "Username must be \"hg\" for SSH access to %s." msgstr "Le nom d'utilisateur doit être \"hg\" pour l'accès SSH à %s." -#: awx/main/utils/common.py:682 +#: awx/main/utils/common.py:668 #, python-brace-format msgid "Input type `{data_type}` is not a dictionary" msgstr "Le type d'entrée ’{data_type}’ n'est pas un dictionnaire" -#: awx/main/utils/common.py:715 +#: awx/main/utils/common.py:701 #, python-brace-format msgid "Variables not compatible with JSON standard (error: {json_error})" msgstr "Variables non compatibles avec la norme JSON (error : {json_error})" -#: awx/main/utils/common.py:721 +#: awx/main/utils/common.py:707 #, python-brace-format msgid "" "Cannot parse as JSON (error: {json_error}) or YAML (error: {yaml_error})." diff --git a/awx/locale/ja/LC_MESSAGES/django.po b/awx/locale/ja/LC_MESSAGES/django.po index db72a7ec8f..a878a3d1ad 100644 --- a/awx/locale/ja/LC_MESSAGES/django.po +++ b/awx/locale/ja/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2020-04-27 13:55+0000\n" +"POT-Creation-Date: 2020-05-28 21:45+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -528,7 +528,7 @@ msgstr "ジョブテンプレートインベントリーが見つからないか msgid "Unknown, job may have been ran before launch configurations were saved." msgstr "不明です。ジョブは起動設定が保存される前に実行された可能性があります。" -#: awx/api/serializers.py:3252 awx/main/tasks.py:2795 awx/main/tasks.py:2813 +#: awx/api/serializers.py:3252 awx/main/tasks.py:2800 awx/main/tasks.py:2818 msgid "{} are prohibited from use in ad hoc commands." msgstr "{} の使用はアドホックコマンドで禁止されています。" @@ -547,324 +547,324 @@ msgstr "指定された変数 {} には置き換えるデータベースの値 msgid "\"$encrypted$ is a reserved keyword, may not be used for {}.\"" msgstr "\"$encrypted$ は予約されたキーワードで、{} には使用できません。\"" -#: awx/api/serializers.py:4070 +#: awx/api/serializers.py:4078 msgid "A project is required to run a job." msgstr "ジョブを実行するにはプロジェクトが必要です。" -#: awx/api/serializers.py:4072 +#: awx/api/serializers.py:4080 msgid "Missing a revision to run due to failed project update." msgstr "プロジェクトの更新に失敗したため、実行するリビジョンがありません。" -#: awx/api/serializers.py:4076 +#: awx/api/serializers.py:4084 msgid "The inventory associated with this Job Template is being deleted." msgstr "このジョブテンプレートに関連付けられているインベントリーが削除されています。" -#: awx/api/serializers.py:4078 awx/api/serializers.py:4194 +#: awx/api/serializers.py:4086 awx/api/serializers.py:4202 msgid "The provided inventory is being deleted." msgstr "指定されたインベントリーが削除されています。" -#: awx/api/serializers.py:4086 +#: awx/api/serializers.py:4094 msgid "Cannot assign multiple {} credentials." msgstr "複数の {} 認証情報を割り当てることができません。" -#: awx/api/serializers.py:4090 +#: awx/api/serializers.py:4098 msgid "Cannot assign a Credential of kind `{}`" msgstr "`{}`の種類の認証情報を割り当てることができません。" -#: awx/api/serializers.py:4103 +#: awx/api/serializers.py:4111 msgid "" "Removing {} credential at launch time without replacement is not supported. " "Provided list lacked credential(s): {}." msgstr "置き換えなしで起動時に {} 認証情報を削除することはサポートされていません。指定された一覧には認証情報がありません: {}" -#: awx/api/serializers.py:4192 +#: awx/api/serializers.py:4200 msgid "The inventory associated with this Workflow is being deleted." msgstr "このワークフローに関連付けられているインベントリーが削除されています。" -#: awx/api/serializers.py:4263 +#: awx/api/serializers.py:4271 msgid "Message type '{}' invalid, must be either 'message' or 'body'" msgstr "メッセージタイプ '{}' が無効です。'メッセージ' または 'ボディー' のいずれかに指定する必要があります。" -#: awx/api/serializers.py:4269 +#: awx/api/serializers.py:4277 msgid "Expected string for '{}', found {}, " msgstr "'{}' の文字列が必要ですが、{} が見つかりました。 " -#: awx/api/serializers.py:4273 +#: awx/api/serializers.py:4281 msgid "Messages cannot contain newlines (found newline in {} event)" msgstr "メッセージでは改行を追加できません ({} イベントに改行が含まれます)" -#: awx/api/serializers.py:4279 +#: awx/api/serializers.py:4287 msgid "Expected dict for 'messages' field, found {}" msgstr "'messages' フィールドには辞書が必要ですが、{} が見つかりました。" -#: awx/api/serializers.py:4283 +#: awx/api/serializers.py:4291 msgid "" "Event '{}' invalid, must be one of 'started', 'success', 'error', or " "'workflow_approval'" msgstr "イベント '{}' は無効です。'started'、'success'、'error' または 'workflow_approval' のいずれかでなければなりません。" -#: awx/api/serializers.py:4289 +#: awx/api/serializers.py:4297 msgid "Expected dict for event '{}', found {}" msgstr "イベント '{}' には辞書が必要ですが、{} が見つかりました。" -#: awx/api/serializers.py:4294 +#: awx/api/serializers.py:4302 msgid "" "Workflow Approval event '{}' invalid, must be one of 'running', 'approved', " "'timed_out', or 'denied'" msgstr "ワークフロー承認イベント '{}' が無効です。'running'、'approved'、'timed_out' または 'denied' のいずれかでなければなりません。" -#: awx/api/serializers.py:4301 +#: awx/api/serializers.py:4309 msgid "Expected dict for workflow approval event '{}', found {}" msgstr "ワークフロー承認イベント '{}' には辞書が必要ですが、{} が見つかりました。" -#: awx/api/serializers.py:4328 +#: awx/api/serializers.py:4336 msgid "Unable to render message '{}': {}" msgstr "メッセージ '{}' のレンダリングができません: {}" -#: awx/api/serializers.py:4330 +#: awx/api/serializers.py:4338 msgid "Field '{}' unavailable" msgstr "フィールド '{}' が利用できません" -#: awx/api/serializers.py:4332 +#: awx/api/serializers.py:4340 msgid "Security error due to field '{}'" msgstr "フィールド '{}' が原因のセキュリティーエラー" -#: awx/api/serializers.py:4352 +#: awx/api/serializers.py:4360 msgid "Webhook body for '{}' should be a json dictionary. Found type '{}'." msgstr "'{}' の Webhook のボディーは json 辞書でなければなりません。'{}' のタイプが見つかりました。" -#: awx/api/serializers.py:4355 +#: awx/api/serializers.py:4363 msgid "Webhook body for '{}' is not a valid json dictionary ({})." msgstr "'{}' の Webhook ボディーは有効な json 辞書ではありません ({})。" -#: awx/api/serializers.py:4373 +#: awx/api/serializers.py:4381 msgid "" "Missing required fields for Notification Configuration: notification_type" msgstr "通知設定の必須フィールドがありません: notification_type" -#: awx/api/serializers.py:4400 +#: awx/api/serializers.py:4408 msgid "No values specified for field '{}'" msgstr "フィールド '{}' に値が指定されていません" -#: awx/api/serializers.py:4405 +#: awx/api/serializers.py:4413 msgid "HTTP method must be either 'POST' or 'PUT'." msgstr "HTTP メソッドは 'POST' または 'PUT' のいずれかでなければなりません。" -#: awx/api/serializers.py:4407 +#: awx/api/serializers.py:4415 msgid "Missing required fields for Notification Configuration: {}." msgstr "通知設定の必須フィールドがありません: {}。" -#: awx/api/serializers.py:4410 +#: awx/api/serializers.py:4418 msgid "Configuration field '{}' incorrect type, expected {}." msgstr "設定フィールド '{}' のタイプが正しくありません。{} が予期されました。" -#: awx/api/serializers.py:4427 +#: awx/api/serializers.py:4435 msgid "Notification body" msgstr "通知ボディー" -#: awx/api/serializers.py:4507 +#: awx/api/serializers.py:4515 msgid "" "Valid DTSTART required in rrule. Value should start with: DTSTART:" "YYYYMMDDTHHMMSSZ" msgstr "有効な DTSTART が rrule で必要です。値は DTSTART:YYYYMMDDTHHMMSSZ で開始する必要があります。" -#: awx/api/serializers.py:4509 +#: awx/api/serializers.py:4517 msgid "" "DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ." msgstr "DTSTART をネイティブの日時にすることができません。;TZINFO= or YYYYMMDDTHHMMSSZZ を指定します。" -#: awx/api/serializers.py:4511 +#: awx/api/serializers.py:4519 msgid "Multiple DTSTART is not supported." msgstr "複数の DTSTART はサポートされません。" -#: awx/api/serializers.py:4513 +#: awx/api/serializers.py:4521 msgid "RRULE required in rrule." msgstr "RRULE が rrule で必要です。" -#: awx/api/serializers.py:4515 +#: awx/api/serializers.py:4523 msgid "Multiple RRULE is not supported." msgstr "複数の RRULE はサポートされません。" -#: awx/api/serializers.py:4517 +#: awx/api/serializers.py:4525 msgid "INTERVAL required in rrule." msgstr "INTERVAL が rrule で必要です。" -#: awx/api/serializers.py:4519 +#: awx/api/serializers.py:4527 msgid "SECONDLY is not supported." msgstr "SECONDLY はサポートされません。" -#: awx/api/serializers.py:4521 +#: awx/api/serializers.py:4529 msgid "Multiple BYMONTHDAYs not supported." msgstr "複数の BYMONTHDAY はサポートされません。" -#: awx/api/serializers.py:4523 +#: awx/api/serializers.py:4531 msgid "Multiple BYMONTHs not supported." msgstr "複数の BYMONTH はサポートされません。" -#: awx/api/serializers.py:4525 +#: awx/api/serializers.py:4533 msgid "BYDAY with numeric prefix not supported." msgstr "数字の接頭辞のある BYDAY はサポートされません。" -#: awx/api/serializers.py:4527 +#: awx/api/serializers.py:4535 msgid "BYYEARDAY not supported." msgstr "BYYEARDAY はサポートされません。" -#: awx/api/serializers.py:4529 +#: awx/api/serializers.py:4537 msgid "BYWEEKNO not supported." msgstr "BYWEEKNO はサポートされません。" -#: awx/api/serializers.py:4531 +#: awx/api/serializers.py:4539 msgid "RRULE may not contain both COUNT and UNTIL" msgstr "RRULE には COUNT と UNTIL の両方を含めることができません" -#: awx/api/serializers.py:4535 +#: awx/api/serializers.py:4543 msgid "COUNT > 999 is unsupported." msgstr "COUNT > 999 はサポートされません。" -#: awx/api/serializers.py:4541 +#: awx/api/serializers.py:4549 msgid "rrule parsing failed validation: {}" msgstr "rrule の構文解析で検証に失敗しました: {}" -#: awx/api/serializers.py:4603 +#: awx/api/serializers.py:4611 msgid "Inventory Source must be a cloud resource." msgstr "インベントリーソースはクラウドリソースでなければなりません。" -#: awx/api/serializers.py:4605 +#: awx/api/serializers.py:4613 msgid "Manual Project cannot have a schedule set." msgstr "手動プロジェクトにはスケジュールを設定できません。" -#: awx/api/serializers.py:4608 +#: awx/api/serializers.py:4616 msgid "" "Inventory sources with `update_on_project_update` cannot be scheduled. " "Schedule its source project `{}` instead." msgstr "「update_on_project_update」が設定されたインベントリーソースはスケジュールできません。代わりのそのソースプロジェクト「{}」 をスケジュールします。" -#: awx/api/serializers.py:4618 +#: awx/api/serializers.py:4626 msgid "" "Count of jobs in the running or waiting state that are targeted for this " "instance" msgstr "このインスタンスにターゲット設定されている実行中または待機状態のジョブの数" -#: awx/api/serializers.py:4623 +#: awx/api/serializers.py:4631 msgid "Count of all jobs that target this instance" msgstr "このインスタンスをターゲットに設定するすべてのジョブの数" -#: awx/api/serializers.py:4656 +#: awx/api/serializers.py:4664 msgid "" "Count of jobs in the running or waiting state that are targeted for this " "instance group" msgstr "このインスタンスグループにターゲット設定されている実行中または待機状態のジョブの数" -#: awx/api/serializers.py:4661 +#: awx/api/serializers.py:4669 msgid "Count of all jobs that target this instance group" msgstr "このインスタンスグループをターゲットに設定するすべてのジョブの数" -#: awx/api/serializers.py:4666 +#: awx/api/serializers.py:4674 msgid "Indicates whether instance group controls any other group" msgstr "インスタンスグループが他のグループを制御するかどうかを指定します。" -#: awx/api/serializers.py:4670 +#: awx/api/serializers.py:4678 msgid "" "Indicates whether instances in this group are isolated.Isolated groups have " "a designated controller group." msgstr "このグループ内でインスタンスを分離させるかを指定します。分離されたグループには指定したコントローラーグループがあります。" -#: awx/api/serializers.py:4675 +#: awx/api/serializers.py:4683 msgid "" "Indicates whether instances in this group are containerized.Containerized " "groups have a designated Openshift or Kubernetes cluster." msgstr "このグループ内でインスタンスをコンテナー化するかを指定します。コンテナー化したグループには、指定の OpenShift または Kubernetes クラスターが含まれます。" -#: awx/api/serializers.py:4683 +#: awx/api/serializers.py:4691 msgid "Policy Instance Percentage" msgstr "ポリシーインスタンスの割合" -#: awx/api/serializers.py:4684 +#: awx/api/serializers.py:4692 msgid "" "Minimum percentage of all instances that will be automatically assigned to " "this group when new instances come online." msgstr "新規インスタンスがオンラインになると、このグループに自動的に最小限割り当てられるインスタンスの割合を選択します。" -#: awx/api/serializers.py:4689 +#: awx/api/serializers.py:4697 msgid "Policy Instance Minimum" msgstr "ポリシーインスタンスの最小値" -#: awx/api/serializers.py:4690 +#: awx/api/serializers.py:4698 msgid "" "Static minimum number of Instances that will be automatically assign to this " "group when new instances come online." msgstr "新規インスタンスがオンラインになると、このグループに自動的に最小限割り当てられるインスタンス数を入力します。" -#: awx/api/serializers.py:4695 +#: awx/api/serializers.py:4703 msgid "Policy Instance List" msgstr "ポリシーインスタンスの一覧" -#: awx/api/serializers.py:4696 +#: awx/api/serializers.py:4704 msgid "List of exact-match Instances that will be assigned to this group" msgstr "このグループに割り当てられる完全一致のインスタンスの一覧" -#: awx/api/serializers.py:4722 +#: awx/api/serializers.py:4730 msgid "Duplicate entry {}." msgstr "重複するエントリー {}。" -#: awx/api/serializers.py:4724 +#: awx/api/serializers.py:4732 msgid "{} is not a valid hostname of an existing instance." msgstr "{} は既存インスタンスの有効なホスト名ではありません。" -#: awx/api/serializers.py:4726 awx/api/views/mixin.py:98 +#: awx/api/serializers.py:4734 awx/api/views/mixin.py:98 msgid "" "Isolated instances may not be added or removed from instances groups via the " "API." msgstr "分離されたインスタンスは、API 経由でインスタンスグループから追加したり、削除したりすることができません。" -#: awx/api/serializers.py:4728 awx/api/views/mixin.py:102 +#: awx/api/serializers.py:4736 awx/api/views/mixin.py:102 msgid "Isolated instance group membership may not be managed via the API." msgstr "分離されたインスタンスグループのメンバーシップは API で管理できません。" -#: awx/api/serializers.py:4730 awx/api/serializers.py:4735 -#: awx/api/serializers.py:4740 +#: awx/api/serializers.py:4738 awx/api/serializers.py:4743 +#: awx/api/serializers.py:4748 msgid "Containerized instances may not be managed via the API" msgstr "コンテナー化されたインスタンスは API で管理されないことがあります" -#: awx/api/serializers.py:4745 +#: awx/api/serializers.py:4753 msgid "tower instance group name may not be changed." msgstr "Tower のインスタンスグループ名は変更できません。" -#: awx/api/serializers.py:4750 +#: awx/api/serializers.py:4758 msgid "Only Kubernetes credentials can be associated with an Instance Group" msgstr "インスタンスグループに関連付けることができる Kubernetes 認証情報のみです" -#: awx/api/serializers.py:4789 +#: awx/api/serializers.py:4797 msgid "" "When present, shows the field name of the role or relationship that changed." msgstr "これがある場合には、変更された関係またはロールのフィールド名を表示します。" -#: awx/api/serializers.py:4791 +#: awx/api/serializers.py:4799 msgid "" "When present, shows the model on which the role or relationship was defined." msgstr "これがある場合には、ロールまたは関係が定義されているモデルを表示します。" -#: awx/api/serializers.py:4824 +#: awx/api/serializers.py:4832 msgid "" "A summary of the new and changed values when an object is created, updated, " "or deleted" msgstr "オブジェクトの作成、更新または削除時の新規値および変更された値の概要" -#: awx/api/serializers.py:4826 +#: awx/api/serializers.py:4834 msgid "" "For create, update, and delete events this is the object type that was " "affected. For associate and disassociate events this is the object type " "associated or disassociated with object2." msgstr "作成、更新、および削除イベントの場合、これは影響を受けたオブジェクトタイプになります。関連付けおよび関連付け解除イベントの場合、これは object2 に関連付けられたか、またはその関連付けが解除されたオブジェクトタイプになります。" -#: awx/api/serializers.py:4829 +#: awx/api/serializers.py:4837 msgid "" "Unpopulated for create, update, and delete events. For associate and " "disassociate events this is the object type that object1 is being associated " "with." msgstr "作成、更新、および削除イベントの場合は設定されません。関連付けおよび関連付け解除イベントの場合、これは object1 が関連付けられるオブジェクトタイプになります。" -#: awx/api/serializers.py:4832 +#: awx/api/serializers.py:4840 msgid "The action taken with respect to the given object(s)." msgstr "指定されたオブジェクトについて実行されたアクション。" @@ -1638,7 +1638,7 @@ msgstr "設定例" msgid "Example setting which can be different for each user." msgstr "ユーザーごとに異なる設定例" -#: awx/conf/conf.py:92 awx/conf/registry.py:81 awx/conf/views.py:55 +#: awx/conf/conf.py:92 awx/conf/registry.py:81 awx/conf/views.py:56 msgid "User" msgstr "ユーザー" @@ -1741,15 +1741,15 @@ msgstr "システム" msgid "OtherSystem" msgstr "他のシステム" -#: awx/conf/views.py:47 +#: awx/conf/views.py:48 msgid "Setting Categories" msgstr "設定カテゴリー" -#: awx/conf/views.py:69 +#: awx/conf/views.py:70 msgid "Setting Detail" msgstr "設定の詳細" -#: awx/conf/views.py:160 +#: awx/conf/views.py:162 msgid "Logging Connectivity Test" msgstr "ロギング接続テスト" @@ -2794,7 +2794,7 @@ msgstr "Conjur URL" msgid "API Key" msgstr "API キー" -#: awx/main/credential_plugins/conjur.py:28 awx/main/models/inventory.py:1017 +#: awx/main/credential_plugins/conjur.py:28 awx/main/models/inventory.py:1018 msgid "Account" msgstr "アカウント" @@ -2881,7 +2881,7 @@ msgid "" msgstr "KV シークレットバックエンド名 (空白の場合は、シークレットパスの最初のセグメントが使用されます)。" #: awx/main/credential_plugins/hashivault.py:60 -#: awx/main/models/inventory.py:1022 +#: awx/main/models/inventory.py:1023 msgid "Key Name" msgstr "キー名" @@ -3258,7 +3258,7 @@ msgid "" "Management (IAM) users." msgstr "セキュリティートークンサービス (STS) は、AWS Identity and Access Management (IAM) ユーザーの一時的な、権限の制限された認証情報を要求できる web サービスです。" -#: awx/main/models/credential/__init__.py:780 awx/main/models/inventory.py:832 +#: awx/main/models/credential/__init__.py:780 awx/main/models/inventory.py:833 msgid "OpenStack" msgstr "OpenStack" @@ -3298,7 +3298,7 @@ msgstr "OpenStack ドメインは管理上の境界を定義します。これ msgid "Verify SSL" msgstr "SSL の検証" -#: awx/main/models/credential/__init__.py:823 awx/main/models/inventory.py:829 +#: awx/main/models/credential/__init__.py:823 awx/main/models/inventory.py:830 msgid "VMware vCenter" msgstr "VMware vCenter" @@ -3311,7 +3311,7 @@ msgid "" "Enter the hostname or IP address that corresponds to your VMware vCenter." msgstr "VMware vCenter に対応するホスト名または IP アドレスを入力します。" -#: awx/main/models/credential/__init__.py:849 awx/main/models/inventory.py:830 +#: awx/main/models/credential/__init__.py:849 awx/main/models/inventory.py:831 msgid "Red Hat Satellite 6" msgstr "Red Hat Satellite 6" @@ -3325,7 +3325,7 @@ msgid "" "example, https://satellite.example.org" msgstr "Red Hat Satellite 6 Server に対応する URL を入力します (例: https://satellite.example.org)。" -#: awx/main/models/credential/__init__.py:875 awx/main/models/inventory.py:831 +#: awx/main/models/credential/__init__.py:875 awx/main/models/inventory.py:832 msgid "Red Hat CloudForms" msgstr "Red Hat CloudForms" @@ -3339,7 +3339,7 @@ msgid "" "instance. For example, https://cloudforms.example.org" msgstr "CloudForms インスタンスに対応する仮想マシンの URL を入力します (例: https://cloudforms.example.org)。" -#: awx/main/models/credential/__init__.py:902 awx/main/models/inventory.py:827 +#: awx/main/models/credential/__init__.py:902 awx/main/models/inventory.py:828 msgid "Google Compute Engine" msgstr "Google Compute Engine" @@ -3368,7 +3368,7 @@ msgid "" "Paste the contents of the PEM file associated with the service account email." msgstr "サービスアカウントメールに関連付けられた PEM ファイルの内容を貼り付けます。" -#: awx/main/models/credential/__init__.py:936 awx/main/models/inventory.py:828 +#: awx/main/models/credential/__init__.py:936 awx/main/models/inventory.py:829 msgid "Microsoft Azure Resource Manager" msgstr "Microsoft Azure Resource Manager" @@ -3406,7 +3406,7 @@ msgstr "GitLab パーソナルアクセストークン" msgid "This token needs to come from your profile settings in GitLab" msgstr "このトークンは GitLab のプロファイル設定から取得する必要があります。" -#: awx/main/models/credential/__init__.py:1041 awx/main/models/inventory.py:833 +#: awx/main/models/credential/__init__.py:1041 awx/main/models/inventory.py:834 msgid "Red Hat Virtualization" msgstr "Red Hat Virtualization" @@ -3422,7 +3422,7 @@ msgstr "CA ファイル" msgid "Absolute file path to the CA file to use (optional)" msgstr "使用する CA ファイルへの絶対ファイルパス (オプション)" -#: awx/main/models/credential/__init__.py:1091 awx/main/models/inventory.py:834 +#: awx/main/models/credential/__init__.py:1091 awx/main/models/inventory.py:835 msgid "Ansible Tower" msgstr "Ansible Tower" @@ -3466,7 +3466,7 @@ msgstr "ソースは、外部の認証情報でなければなりません。" msgid "Input field must be defined on target credential (options are {})." msgstr "入力フィールドは、ターゲットの認証情報 (オプションは {}) で定義する必要があります。" -#: awx/main/models/events.py:152 awx/main/models/events.py:655 +#: awx/main/models/events.py:152 awx/main/models/events.py:674 msgid "Host Failed" msgstr "ホストの失敗" @@ -3474,7 +3474,7 @@ msgstr "ホストの失敗" msgid "Host Started" msgstr "ホストの開始" -#: awx/main/models/events.py:154 awx/main/models/events.py:656 +#: awx/main/models/events.py:154 awx/main/models/events.py:675 msgid "Host OK" msgstr "ホスト OK" @@ -3482,11 +3482,11 @@ msgstr "ホスト OK" msgid "Host Failure" msgstr "ホストの失敗" -#: awx/main/models/events.py:156 awx/main/models/events.py:662 +#: awx/main/models/events.py:156 awx/main/models/events.py:681 msgid "Host Skipped" msgstr "ホストがスキップされました" -#: awx/main/models/events.py:157 awx/main/models/events.py:657 +#: awx/main/models/events.py:157 awx/main/models/events.py:676 msgid "Host Unreachable" msgstr "ホストに到達できません" @@ -3570,27 +3570,27 @@ msgstr "プレイの開始" msgid "Playbook Complete" msgstr "Playbook の完了" -#: awx/main/models/events.py:184 awx/main/models/events.py:672 +#: awx/main/models/events.py:184 awx/main/models/events.py:691 msgid "Debug" msgstr "デバッグ" -#: awx/main/models/events.py:185 awx/main/models/events.py:673 +#: awx/main/models/events.py:185 awx/main/models/events.py:692 msgid "Verbose" msgstr "詳細" -#: awx/main/models/events.py:186 awx/main/models/events.py:674 +#: awx/main/models/events.py:186 awx/main/models/events.py:693 msgid "Deprecated" msgstr "非推奨" -#: awx/main/models/events.py:187 awx/main/models/events.py:675 +#: awx/main/models/events.py:187 awx/main/models/events.py:694 msgid "Warning" msgstr "警告" -#: awx/main/models/events.py:188 awx/main/models/events.py:676 +#: awx/main/models/events.py:188 awx/main/models/events.py:695 msgid "System Warning" msgstr "システム警告" -#: awx/main/models/events.py:189 awx/main/models/events.py:677 +#: awx/main/models/events.py:189 awx/main/models/events.py:696 #: awx/main/models/unified_jobs.py:75 msgid "Error" msgstr "エラー" @@ -3618,300 +3618,300 @@ msgid "" "this group" msgstr "このグループに常に自動的に割り当てられる完全一致のインスタンスの一覧" -#: awx/main/models/inventory.py:79 +#: awx/main/models/inventory.py:80 msgid "Hosts have a direct link to this inventory." msgstr "ホストにはこのインベントリーへの直接のリンクがあります。" -#: awx/main/models/inventory.py:80 +#: awx/main/models/inventory.py:81 msgid "Hosts for inventory generated using the host_filter property." msgstr "host_filter プロパティーを使用して生成されたインベントリーのホスト。" -#: awx/main/models/inventory.py:85 +#: awx/main/models/inventory.py:86 msgid "inventories" msgstr "インベントリー" -#: awx/main/models/inventory.py:92 +#: awx/main/models/inventory.py:93 msgid "Organization containing this inventory." msgstr "このインベントリーを含む組織。" -#: awx/main/models/inventory.py:99 +#: awx/main/models/inventory.py:100 msgid "Inventory variables in JSON or YAML format." msgstr "JSON または YAML 形式のインベントリー変数。" -#: awx/main/models/inventory.py:104 +#: awx/main/models/inventory.py:105 msgid "" "This field is deprecated and will be removed in a future release. Flag " "indicating whether any hosts in this inventory have failed." msgstr "このフィールドは非推奨で、今後のリリースで削除予定です。このインベントリーのホストが失敗したかどうかを示すフラグ。" -#: awx/main/models/inventory.py:110 +#: awx/main/models/inventory.py:111 msgid "" "This field is deprecated and will be removed in a future release. Total " "number of hosts in this inventory." msgstr "このフィールドは非推奨で、今後のリリースで削除予定です。このインベントリーでの合計ホスト数。" -#: awx/main/models/inventory.py:116 +#: awx/main/models/inventory.py:117 msgid "" "This field is deprecated and will be removed in a future release. Number of " "hosts in this inventory with active failures." msgstr "このフィールドは非推奨で、今後のリリースで削除予定です。このインベントリーで障害が発生中のホスト数。" -#: awx/main/models/inventory.py:122 +#: awx/main/models/inventory.py:123 msgid "" "This field is deprecated and will be removed in a future release. Total " "number of groups in this inventory." msgstr "このフィールドは非推奨で、今後のリリースで削除予定です。このインベントリーでの合計グループ数。" -#: awx/main/models/inventory.py:128 +#: awx/main/models/inventory.py:129 msgid "" "This field is deprecated and will be removed in a future release. Flag " "indicating whether this inventory has any external inventory sources." msgstr "このフィールドは非推奨で、今後のリリースで削除予定です。このインベントリーに外部のインベントリーソースがあるかどうかを示すフラグ。" -#: awx/main/models/inventory.py:134 +#: awx/main/models/inventory.py:135 msgid "" "Total number of external inventory sources configured within this inventory." msgstr "このインベントリー内で設定される外部インベントリーソースの合計数。" -#: awx/main/models/inventory.py:139 +#: awx/main/models/inventory.py:140 msgid "Number of external inventory sources in this inventory with failures." msgstr "エラーのあるこのインベントリー内の外部インベントリーソースの数。" -#: awx/main/models/inventory.py:146 +#: awx/main/models/inventory.py:147 msgid "Kind of inventory being represented." msgstr "表示されているインベントリーの種類。" -#: awx/main/models/inventory.py:152 +#: awx/main/models/inventory.py:153 msgid "Filter that will be applied to the hosts of this inventory." msgstr "このインべントリーのホストに適用されるフィルター。" -#: awx/main/models/inventory.py:180 +#: awx/main/models/inventory.py:181 msgid "" "Credentials to be used by hosts belonging to this inventory when accessing " "Red Hat Insights API." msgstr "Red Hat Insights API へのアクセス時にこのインベントリーに属するホストによって使用される認証情報。" -#: awx/main/models/inventory.py:189 +#: awx/main/models/inventory.py:190 msgid "Flag indicating the inventory is being deleted." msgstr "このインベントリーが削除されていることを示すフラグ。" -#: awx/main/models/inventory.py:244 +#: awx/main/models/inventory.py:245 msgid "Could not parse subset as slice specification." msgstr "サブセットをスライスの詳細として解析できませんでした。" -#: awx/main/models/inventory.py:248 +#: awx/main/models/inventory.py:249 msgid "Slice number must be less than total number of slices." msgstr "スライス番号はスライスの合計数より小さくなければなりません。" -#: awx/main/models/inventory.py:250 +#: awx/main/models/inventory.py:251 msgid "Slice number must be 1 or higher." msgstr "スライス番号は 1 以上でなければなりません。" -#: awx/main/models/inventory.py:387 +#: awx/main/models/inventory.py:388 msgid "Assignment not allowed for Smart Inventory" msgstr "割り当てはスマートインベントリーでは許可されません" -#: awx/main/models/inventory.py:389 awx/main/models/projects.py:166 +#: awx/main/models/inventory.py:390 awx/main/models/projects.py:166 msgid "Credential kind must be 'insights'." msgstr "認証情報の種類は「insights」である必要があります。" -#: awx/main/models/inventory.py:474 +#: awx/main/models/inventory.py:475 msgid "Is this host online and available for running jobs?" msgstr "このホストはオンラインで、ジョブを実行するために利用できますか?" -#: awx/main/models/inventory.py:480 +#: awx/main/models/inventory.py:481 msgid "" "The value used by the remote inventory source to uniquely identify the host" msgstr "ホストを一意に識別するためにリモートインベントリーソースで使用される値" -#: awx/main/models/inventory.py:485 +#: awx/main/models/inventory.py:486 msgid "Host variables in JSON or YAML format." msgstr "JSON または YAML 形式のホスト変数。" -#: awx/main/models/inventory.py:508 +#: awx/main/models/inventory.py:509 msgid "Inventory source(s) that created or modified this host." msgstr "このホストを作成または変更したインベントリーソース。" -#: awx/main/models/inventory.py:513 +#: awx/main/models/inventory.py:514 msgid "Arbitrary JSON structure of most recent ansible_facts, per-host." msgstr "ホスト別の最新 ansible_facts の任意の JSON 構造。" -#: awx/main/models/inventory.py:519 +#: awx/main/models/inventory.py:520 msgid "The date and time ansible_facts was last modified." msgstr "ansible_facts の最終変更日時。" -#: awx/main/models/inventory.py:526 +#: awx/main/models/inventory.py:527 msgid "Red Hat Insights host unique identifier." msgstr "Red Hat Insights ホスト固有 ID。" -#: awx/main/models/inventory.py:640 +#: awx/main/models/inventory.py:641 msgid "Group variables in JSON or YAML format." msgstr "JSON または YAML 形式のグループ変数。" -#: awx/main/models/inventory.py:646 +#: awx/main/models/inventory.py:647 msgid "Hosts associated directly with this group." msgstr "このグループに直接関連付けられたホスト。" -#: awx/main/models/inventory.py:652 +#: awx/main/models/inventory.py:653 msgid "Inventory source(s) that created or modified this group." msgstr "このグループを作成または変更したインベントリーソース。" -#: awx/main/models/inventory.py:824 +#: awx/main/models/inventory.py:825 msgid "File, Directory or Script" msgstr "ファイル、ディレクトリーまたはスクリプト" -#: awx/main/models/inventory.py:825 +#: awx/main/models/inventory.py:826 msgid "Sourced from a Project" msgstr "ソース: プロジェクト" -#: awx/main/models/inventory.py:826 +#: awx/main/models/inventory.py:827 msgid "Amazon EC2" msgstr "Amazon EC2" -#: awx/main/models/inventory.py:835 +#: awx/main/models/inventory.py:836 msgid "Custom Script" msgstr "カスタムスクリプト" -#: awx/main/models/inventory.py:952 +#: awx/main/models/inventory.py:953 msgid "Inventory source variables in YAML or JSON format." msgstr "YAML または JSON 形式のインベントリーソース変数。" -#: awx/main/models/inventory.py:963 +#: awx/main/models/inventory.py:964 msgid "" "Comma-separated list of filter expressions (EC2 only). Hosts are imported " "when ANY of the filters match." msgstr "カンマ区切りのフィルター式の一覧 (EC2 のみ) です。ホストは、フィルターのいずれかが一致する場合にインポートされます。" -#: awx/main/models/inventory.py:969 +#: awx/main/models/inventory.py:970 msgid "Limit groups automatically created from inventory source (EC2 only)." msgstr "インベントリーソースから自動的に作成されるグループを制限します (EC2 のみ)。" -#: awx/main/models/inventory.py:973 +#: awx/main/models/inventory.py:974 msgid "Overwrite local groups and hosts from remote inventory source." msgstr "リモートインベントリーソースからのローカルグループおよびホストを上書きします。" -#: awx/main/models/inventory.py:977 +#: awx/main/models/inventory.py:978 msgid "Overwrite local variables from remote inventory source." msgstr "リモートインベントリーソースからのローカル変数を上書きします。" -#: awx/main/models/inventory.py:982 awx/main/models/jobs.py:154 +#: awx/main/models/inventory.py:983 awx/main/models/jobs.py:154 #: awx/main/models/projects.py:135 msgid "The amount of time (in seconds) to run before the task is canceled." msgstr "タスクが取り消される前の実行時間 (秒数)。" -#: awx/main/models/inventory.py:1015 +#: awx/main/models/inventory.py:1016 msgid "Image ID" msgstr "イメージ ID" -#: awx/main/models/inventory.py:1016 +#: awx/main/models/inventory.py:1017 msgid "Availability Zone" msgstr "アベイラビリティーゾーン" -#: awx/main/models/inventory.py:1018 +#: awx/main/models/inventory.py:1019 msgid "Instance ID" msgstr "インスタンス ID" -#: awx/main/models/inventory.py:1019 +#: awx/main/models/inventory.py:1020 msgid "Instance State" msgstr "インスタンスの状態" -#: awx/main/models/inventory.py:1020 +#: awx/main/models/inventory.py:1021 msgid "Platform" msgstr "プラットフォーム" -#: awx/main/models/inventory.py:1021 +#: awx/main/models/inventory.py:1022 msgid "Instance Type" msgstr "インスタンスタイプ" -#: awx/main/models/inventory.py:1023 +#: awx/main/models/inventory.py:1024 msgid "Region" msgstr "リージョン" -#: awx/main/models/inventory.py:1024 +#: awx/main/models/inventory.py:1025 msgid "Security Group" msgstr "セキュリティーグループ" -#: awx/main/models/inventory.py:1025 +#: awx/main/models/inventory.py:1026 msgid "Tags" msgstr "タグ" -#: awx/main/models/inventory.py:1026 +#: awx/main/models/inventory.py:1027 msgid "Tag None" msgstr "タグ None" -#: awx/main/models/inventory.py:1027 +#: awx/main/models/inventory.py:1028 msgid "VPC ID" msgstr "VPC ID" -#: awx/main/models/inventory.py:1095 +#: awx/main/models/inventory.py:1096 #, python-format msgid "" "Cloud-based inventory sources (such as %s) require credentials for the " "matching cloud service." msgstr "クラウドベースのインベントリーソース (%s など) には一致するクラウドサービスの認証情報が必要です。" -#: awx/main/models/inventory.py:1101 +#: awx/main/models/inventory.py:1102 msgid "Credential is required for a cloud source." msgstr "認証情報がクラウドソースに必要です。" -#: awx/main/models/inventory.py:1104 +#: awx/main/models/inventory.py:1105 msgid "" "Credentials of type machine, source control, insights and vault are " "disallowed for custom inventory sources." msgstr "タイプがマシン、ソースコントロール、Insights および Vault の認証情報はカスタムインベントリーソースには許可されません。" -#: awx/main/models/inventory.py:1109 +#: awx/main/models/inventory.py:1110 msgid "" "Credentials of type insights and vault are disallowed for scm inventory " "sources." msgstr "タイプが Insights および Vault の認証情報は SCM のインベントリーソースには許可されません。" -#: awx/main/models/inventory.py:1169 +#: awx/main/models/inventory.py:1170 #, python-format msgid "Invalid %(source)s region: %(region)s" msgstr "無効な %(source)s リージョン: %(region)s" -#: awx/main/models/inventory.py:1193 +#: awx/main/models/inventory.py:1194 #, python-format msgid "Invalid filter expression: %(filter)s" msgstr "無効なフィルター式: %(filter)s" -#: awx/main/models/inventory.py:1214 +#: awx/main/models/inventory.py:1215 #, python-format msgid "Invalid group by choice: %(choice)s" msgstr "無効なグループ (選択による): %(choice)s" -#: awx/main/models/inventory.py:1242 +#: awx/main/models/inventory.py:1243 msgid "Project containing inventory file used as source." msgstr "ソースとして使用されるインベントリーファイルが含まれるプロジェクト。" -#: awx/main/models/inventory.py:1415 +#: awx/main/models/inventory.py:1416 msgid "" "More than one SCM-based inventory source with update on project update per-" "inventory not allowed." msgstr "複数の SCM ベースのインベントリーソースについて、インベントリー別のプロジェクト更新時の更新は許可されません。" -#: awx/main/models/inventory.py:1422 +#: awx/main/models/inventory.py:1423 msgid "" "Cannot update SCM-based inventory source on launch if set to update on " "project update. Instead, configure the corresponding source project to " "update on launch." msgstr "プロジェクト更新時の更新に設定している場合、SCM ベースのインベントリーソースを更新できません。その代わりに起動時に更新するように対応するソースプロジェクトを設定します。" -#: awx/main/models/inventory.py:1428 +#: awx/main/models/inventory.py:1429 msgid "Cannot set source_path if not SCM type." msgstr "SCM タイプでない場合 source_path を設定できません。" -#: awx/main/models/inventory.py:1471 +#: awx/main/models/inventory.py:1472 msgid "" "Inventory files from this Project Update were used for the inventory update." msgstr "このプロジェクト更新のインベントリーファイルがインベントリー更新に使用されました。" -#: awx/main/models/inventory.py:1582 +#: awx/main/models/inventory.py:1583 msgid "Inventory script contents" msgstr "インベントリースクリプトの内容" -#: awx/main/models/inventory.py:1587 +#: awx/main/models/inventory.py:1588 msgid "Organization owning this inventory script" msgstr "このインベントリースクリプトを所有する組織" @@ -4010,28 +4010,28 @@ msgstr "インベントリーがプロンプトとして適用されると、ジ msgid "job host summaries" msgstr "ジョブホストの概要" -#: awx/main/models/jobs.py:1158 +#: awx/main/models/jobs.py:1144 msgid "Remove jobs older than a certain number of days" msgstr "特定の日数より前のジョブを削除" -#: awx/main/models/jobs.py:1159 +#: awx/main/models/jobs.py:1145 msgid "Remove activity stream entries older than a certain number of days" msgstr "特定の日数より前のアクティビティーストリームのエントリーを削除" -#: awx/main/models/jobs.py:1160 +#: awx/main/models/jobs.py:1146 msgid "Removes expired browser sessions from the database" msgstr "期限切れブラウザーセッションをデータベースから削除" -#: awx/main/models/jobs.py:1161 +#: awx/main/models/jobs.py:1147 msgid "Removes expired OAuth 2 access tokens and refresh tokens" msgstr "期限切れの OAuth 2 アクセストークンを削除し、トークンを更新" -#: awx/main/models/jobs.py:1231 +#: awx/main/models/jobs.py:1217 #, python-brace-format msgid "Variables {list_of_keys} are not allowed for system jobs." msgstr "システムジョブでは変数 {list_of_keys} を使用できません。" -#: awx/main/models/jobs.py:1247 +#: awx/main/models/jobs.py:1233 msgid "days must be a positive integer." msgstr "日数は正の整数である必要があります。" @@ -4775,7 +4775,7 @@ msgstr "エラーの処理パスが見つかりません。ワークフローを msgid "The approval node {name} ({pk}) has expired after {timeout} seconds." msgstr "承認ノード {name} ({pk}) は {timeout} 秒後に失効しました。" -#: awx/main/tasks.py:1053 +#: awx/main/tasks.py:1049 msgid "Invalid virtual environment selected: {}" msgstr "無効な仮想環境が選択されました: {}" @@ -4812,53 +4812,53 @@ msgstr "ワークフロージョブのノードにエラーハンドルパスが msgid "Unable to convert \"%s\" to boolean" msgstr "\"%s\" をブール値に変換できません" -#: awx/main/utils/common.py:275 +#: awx/main/utils/common.py:261 #, python-format msgid "Unsupported SCM type \"%s\"" msgstr "サポートされない SCM タイプ \"%s\"" -#: awx/main/utils/common.py:282 awx/main/utils/common.py:294 -#: awx/main/utils/common.py:313 +#: awx/main/utils/common.py:268 awx/main/utils/common.py:280 +#: awx/main/utils/common.py:299 #, python-format msgid "Invalid %s URL" msgstr "無効な %s URL" -#: awx/main/utils/common.py:284 awx/main/utils/common.py:323 +#: awx/main/utils/common.py:270 awx/main/utils/common.py:309 #, python-format msgid "Unsupported %s URL" msgstr "サポートされない %s URL" -#: awx/main/utils/common.py:325 +#: awx/main/utils/common.py:311 #, python-format msgid "Unsupported host \"%s\" for file:// URL" msgstr "file:// URL でサポートされないホスト \"%s\"" -#: awx/main/utils/common.py:327 +#: awx/main/utils/common.py:313 #, python-format msgid "Host is required for %s URL" msgstr "%s URL にはホストが必要です" -#: awx/main/utils/common.py:345 +#: awx/main/utils/common.py:331 #, python-format msgid "Username must be \"git\" for SSH access to %s." msgstr "%s への SSH アクセスではユーザー名を \"git\" にする必要があります。" -#: awx/main/utils/common.py:351 +#: awx/main/utils/common.py:337 #, python-format msgid "Username must be \"hg\" for SSH access to %s." msgstr "%s への SSH アクセスではユーザー名を \"hg\" にする必要があります。" -#: awx/main/utils/common.py:682 +#: awx/main/utils/common.py:668 #, python-brace-format msgid "Input type `{data_type}` is not a dictionary" msgstr "入力タイプ `{data_type}` は辞書ではありません" -#: awx/main/utils/common.py:715 +#: awx/main/utils/common.py:701 #, python-brace-format msgid "Variables not compatible with JSON standard (error: {json_error})" msgstr "変数には JSON 標準との互換性がありません (エラー: {json_error})" -#: awx/main/utils/common.py:721 +#: awx/main/utils/common.py:707 #, python-brace-format msgid "" "Cannot parse as JSON (error: {json_error}) or YAML (error: {yaml_error})." diff --git a/awx/locale/zh/LC_MESSAGES/django.po b/awx/locale/zh/LC_MESSAGES/django.po index b615e5d02b..7827966e11 100644 --- a/awx/locale/zh/LC_MESSAGES/django.po +++ b/awx/locale/zh/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2020-04-27 13:55+0000\n" +"POT-Creation-Date: 2020-05-28 21:45+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -527,7 +527,7 @@ msgstr "作业模板清单缺失或未定义。" msgid "Unknown, job may have been ran before launch configurations were saved." msgstr "未知,在保存启动配置前作业可能已经运行。" -#: awx/api/serializers.py:3252 awx/main/tasks.py:2795 awx/main/tasks.py:2813 +#: awx/api/serializers.py:3252 awx/main/tasks.py:2800 awx/main/tasks.py:2818 msgid "{} are prohibited from use in ad hoc commands." msgstr "{} 被禁止在临时命令中使用。" @@ -546,324 +546,324 @@ msgstr "提供的变量 {} 没有要替换的数据库值。" msgid "\"$encrypted$ is a reserved keyword, may not be used for {}.\"" msgstr "\"$encrypted$ 是一个保留的关键字,可能不能用于 {}\"" -#: awx/api/serializers.py:4070 +#: awx/api/serializers.py:4078 msgid "A project is required to run a job." msgstr "运行一个作业时需要一个项目。" -#: awx/api/serializers.py:4072 +#: awx/api/serializers.py:4080 msgid "Missing a revision to run due to failed project update." msgstr "由于项目更新失败,缺少运行的修订版本。" -#: awx/api/serializers.py:4076 +#: awx/api/serializers.py:4084 msgid "The inventory associated with this Job Template is being deleted." msgstr "与此作业模板关联的清单将被删除。" -#: awx/api/serializers.py:4078 awx/api/serializers.py:4194 +#: awx/api/serializers.py:4086 awx/api/serializers.py:4202 msgid "The provided inventory is being deleted." msgstr "提供的清单将被删除。" -#: awx/api/serializers.py:4086 +#: awx/api/serializers.py:4094 msgid "Cannot assign multiple {} credentials." msgstr "无法分配多个 {} 凭证。" -#: awx/api/serializers.py:4090 +#: awx/api/serializers.py:4098 msgid "Cannot assign a Credential of kind `{}`" msgstr "无法分配类型为 `{}` 的凭证" -#: awx/api/serializers.py:4103 +#: awx/api/serializers.py:4111 msgid "" "Removing {} credential at launch time without replacement is not supported. " "Provided list lacked credential(s): {}." msgstr "不支持在不替换的情况下在启动时删除 {} 凭证。提供的列表缺少凭证:{}。" -#: awx/api/serializers.py:4192 +#: awx/api/serializers.py:4200 msgid "The inventory associated with this Workflow is being deleted." msgstr "与此 Workflow 关联的清单将被删除。" -#: awx/api/serializers.py:4263 +#: awx/api/serializers.py:4271 msgid "Message type '{}' invalid, must be either 'message' or 'body'" msgstr "消息类型 '{}' 无效,必须是 'message' 或 'body'" -#: awx/api/serializers.py:4269 +#: awx/api/serializers.py:4277 msgid "Expected string for '{}', found {}, " msgstr "'{}' 的预期字符串,找到 {}," -#: awx/api/serializers.py:4273 +#: awx/api/serializers.py:4281 msgid "Messages cannot contain newlines (found newline in {} event)" msgstr "消息不能包含新行(在 {} 事件中找到新行)" -#: awx/api/serializers.py:4279 +#: awx/api/serializers.py:4287 msgid "Expected dict for 'messages' field, found {}" msgstr "'messages' 字段的预期字典,找到 {}" -#: awx/api/serializers.py:4283 +#: awx/api/serializers.py:4291 msgid "" "Event '{}' invalid, must be one of 'started', 'success', 'error', or " "'workflow_approval'" msgstr "事件 '{}' 无效,必须是 'started'、'success'、'error' 或 'workflow_approval' 之一" -#: awx/api/serializers.py:4289 +#: awx/api/serializers.py:4297 msgid "Expected dict for event '{}', found {}" msgstr "事件 '{}' 的预期字典,找到 {}" -#: awx/api/serializers.py:4294 +#: awx/api/serializers.py:4302 msgid "" "Workflow Approval event '{}' invalid, must be one of 'running', 'approved', " "'timed_out', or 'denied'" msgstr "工作流批准事件 '{}' 无效,必须是 'running'、'approved'、'timed_out' 或 'denied' 之一。" -#: awx/api/serializers.py:4301 +#: awx/api/serializers.py:4309 msgid "Expected dict for workflow approval event '{}', found {}" msgstr "工作流批准事件 '{}' 的预期字典,找到 {}" -#: awx/api/serializers.py:4328 +#: awx/api/serializers.py:4336 msgid "Unable to render message '{}': {}" msgstr "无法呈现消息 '{}':{}" -#: awx/api/serializers.py:4330 +#: awx/api/serializers.py:4338 msgid "Field '{}' unavailable" msgstr "字段 '{}' 不可用" -#: awx/api/serializers.py:4332 +#: awx/api/serializers.py:4340 msgid "Security error due to field '{}'" msgstr "因为字段 '{}' 导致安全错误" -#: awx/api/serializers.py:4352 +#: awx/api/serializers.py:4360 msgid "Webhook body for '{}' should be a json dictionary. Found type '{}'." msgstr "'{}' 的 Webhook 正文应该是 json 字典。找到类型 '{}'。" -#: awx/api/serializers.py:4355 +#: awx/api/serializers.py:4363 msgid "Webhook body for '{}' is not a valid json dictionary ({})." msgstr "'{}' 的 Webhook 正文不是有效的 json 字典 ({})。" -#: awx/api/serializers.py:4373 +#: awx/api/serializers.py:4381 msgid "" "Missing required fields for Notification Configuration: notification_type" msgstr "通知配置缺少所需字段:notification_type" -#: awx/api/serializers.py:4400 +#: awx/api/serializers.py:4408 msgid "No values specified for field '{}'" msgstr "没有为字段 '{}' 指定值" -#: awx/api/serializers.py:4405 +#: awx/api/serializers.py:4413 msgid "HTTP method must be either 'POST' or 'PUT'." msgstr "HTTP 方法必须是 'POST' 或 'PUT'。" -#: awx/api/serializers.py:4407 +#: awx/api/serializers.py:4415 msgid "Missing required fields for Notification Configuration: {}." msgstr "通知配置缺少所需字段:{}。" -#: awx/api/serializers.py:4410 +#: awx/api/serializers.py:4418 msgid "Configuration field '{}' incorrect type, expected {}." msgstr "配置字段 '{}' 类型错误,预期为 {}。" -#: awx/api/serializers.py:4427 +#: awx/api/serializers.py:4435 msgid "Notification body" msgstr "通知正文" -#: awx/api/serializers.py:4507 +#: awx/api/serializers.py:4515 msgid "" "Valid DTSTART required in rrule. Value should start with: DTSTART:" "YYYYMMDDTHHMMSSZ" msgstr "rrule 中需要有效的 DTSTART。值应该以 DTSTART:YYYMMDDTHHMMSSZ 开头" -#: awx/api/serializers.py:4509 +#: awx/api/serializers.py:4517 msgid "" "DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ." msgstr "DTSTART 不能是一个不带时区的日期时间。指定 ;TZINFO= 或 YYYMMDDTHHMMSSZ。" -#: awx/api/serializers.py:4511 +#: awx/api/serializers.py:4519 msgid "Multiple DTSTART is not supported." msgstr "不支持多个 DTSTART。" -#: awx/api/serializers.py:4513 +#: awx/api/serializers.py:4521 msgid "RRULE required in rrule." msgstr "rrule 中需要 RRULE。" -#: awx/api/serializers.py:4515 +#: awx/api/serializers.py:4523 msgid "Multiple RRULE is not supported." msgstr "不支持多个 RRULE。" -#: awx/api/serializers.py:4517 +#: awx/api/serializers.py:4525 msgid "INTERVAL required in rrule." msgstr "rrule 需要 INTERVAL。" -#: awx/api/serializers.py:4519 +#: awx/api/serializers.py:4527 msgid "SECONDLY is not supported." msgstr "不支持 SECONDLY。" -#: awx/api/serializers.py:4521 +#: awx/api/serializers.py:4529 msgid "Multiple BYMONTHDAYs not supported." msgstr "不支持多个 BYMONTHDAY。" -#: awx/api/serializers.py:4523 +#: awx/api/serializers.py:4531 msgid "Multiple BYMONTHs not supported." msgstr "不支持多个 BYMONTH。" -#: awx/api/serializers.py:4525 +#: awx/api/serializers.py:4533 msgid "BYDAY with numeric prefix not supported." msgstr "不支持带有数字前缀的 BYDAY。" -#: awx/api/serializers.py:4527 +#: awx/api/serializers.py:4535 msgid "BYYEARDAY not supported." msgstr "不支持 BYYEARDAY。" -#: awx/api/serializers.py:4529 +#: awx/api/serializers.py:4537 msgid "BYWEEKNO not supported." msgstr "不支持 BYWEEKNO。" -#: awx/api/serializers.py:4531 +#: awx/api/serializers.py:4539 msgid "RRULE may not contain both COUNT and UNTIL" msgstr "RRULE 可能不包含 COUNT 和 UNTIL" -#: awx/api/serializers.py:4535 +#: awx/api/serializers.py:4543 msgid "COUNT > 999 is unsupported." msgstr "不支持 COUNT > 999。" -#: awx/api/serializers.py:4541 +#: awx/api/serializers.py:4549 msgid "rrule parsing failed validation: {}" msgstr "rrule 解析失败验证:{}" -#: awx/api/serializers.py:4603 +#: awx/api/serializers.py:4611 msgid "Inventory Source must be a cloud resource." msgstr "清单源必须是云资源。" -#: awx/api/serializers.py:4605 +#: awx/api/serializers.py:4613 msgid "Manual Project cannot have a schedule set." msgstr "手动项目不能有计划集。" -#: awx/api/serializers.py:4608 +#: awx/api/serializers.py:4616 msgid "" "Inventory sources with `update_on_project_update` cannot be scheduled. " "Schedule its source project `{}` instead." msgstr "无法调度带有 `update_on_project_update` 的清单源。改为调度其源项目 `{}`。" -#: awx/api/serializers.py:4618 +#: awx/api/serializers.py:4626 msgid "" "Count of jobs in the running or waiting state that are targeted for this " "instance" msgstr "处于运行状态或等待状态的针对此实例的作业计数" -#: awx/api/serializers.py:4623 +#: awx/api/serializers.py:4631 msgid "Count of all jobs that target this instance" msgstr "所有针对此实例的作业计数" -#: awx/api/serializers.py:4656 +#: awx/api/serializers.py:4664 msgid "" "Count of jobs in the running or waiting state that are targeted for this " "instance group" msgstr "处于运行状态或等待状态的针对此实例组的作业计数" -#: awx/api/serializers.py:4661 +#: awx/api/serializers.py:4669 msgid "Count of all jobs that target this instance group" msgstr "所有针对此实例组的作业计数" -#: awx/api/serializers.py:4666 +#: awx/api/serializers.py:4674 msgid "Indicates whether instance group controls any other group" msgstr "指明实例组是否控制任何其他组" -#: awx/api/serializers.py:4670 +#: awx/api/serializers.py:4678 msgid "" "Indicates whether instances in this group are isolated.Isolated groups have " "a designated controller group." msgstr "指明此组中的实例是否被隔离。隔离的组具有指定的控制器组。" -#: awx/api/serializers.py:4675 +#: awx/api/serializers.py:4683 msgid "" "Indicates whether instances in this group are containerized.Containerized " "groups have a designated Openshift or Kubernetes cluster." msgstr "指明此组中的实例是否容器化。容器化的组具有指定的 Openshift 或 Kubernetes 集群。" -#: awx/api/serializers.py:4683 +#: awx/api/serializers.py:4691 msgid "Policy Instance Percentage" msgstr "策略实例百分比" -#: awx/api/serializers.py:4684 +#: awx/api/serializers.py:4692 msgid "" "Minimum percentage of all instances that will be automatically assigned to " "this group when new instances come online." msgstr "新实例上线时将自动分配给此组的所有实例的最小百分比。" -#: awx/api/serializers.py:4689 +#: awx/api/serializers.py:4697 msgid "Policy Instance Minimum" msgstr "策略实例最小值" -#: awx/api/serializers.py:4690 +#: awx/api/serializers.py:4698 msgid "" "Static minimum number of Instances that will be automatically assign to this " "group when new instances come online." msgstr "新实例上线时自动分配给此组的静态最小实例数量。" -#: awx/api/serializers.py:4695 +#: awx/api/serializers.py:4703 msgid "Policy Instance List" msgstr "策略实例列表" -#: awx/api/serializers.py:4696 +#: awx/api/serializers.py:4704 msgid "List of exact-match Instances that will be assigned to this group" msgstr "将分配给此组的完全匹配实例的列表" -#: awx/api/serializers.py:4722 +#: awx/api/serializers.py:4730 msgid "Duplicate entry {}." msgstr "重复条目 {}。" -#: awx/api/serializers.py:4724 +#: awx/api/serializers.py:4732 msgid "{} is not a valid hostname of an existing instance." msgstr "{} 不是现有实例的有效主机名。" -#: awx/api/serializers.py:4726 awx/api/views/mixin.py:98 +#: awx/api/serializers.py:4734 awx/api/views/mixin.py:98 msgid "" "Isolated instances may not be added or removed from instances groups via the " "API." msgstr "可能无法通过 API 为实例组添加或删除隔离的实例。" -#: awx/api/serializers.py:4728 awx/api/views/mixin.py:102 +#: awx/api/serializers.py:4736 awx/api/views/mixin.py:102 msgid "Isolated instance group membership may not be managed via the API." msgstr "可能无法通过 API 管理隔离的实例组成员资格。" -#: awx/api/serializers.py:4730 awx/api/serializers.py:4735 -#: awx/api/serializers.py:4740 +#: awx/api/serializers.py:4738 awx/api/serializers.py:4743 +#: awx/api/serializers.py:4748 msgid "Containerized instances may not be managed via the API" msgstr "可能无法通过 API 管理容器化实例" -#: awx/api/serializers.py:4745 +#: awx/api/serializers.py:4753 msgid "tower instance group name may not be changed." msgstr "可能不会更改 tower 实例组名称。" -#: awx/api/serializers.py:4750 +#: awx/api/serializers.py:4758 msgid "Only Kubernetes credentials can be associated with an Instance Group" msgstr "只有 Kubernetes 凭证可以与实例组关联" -#: awx/api/serializers.py:4789 +#: awx/api/serializers.py:4797 msgid "" "When present, shows the field name of the role or relationship that changed." msgstr "存在时,显示更改的角色或关系的字段名称。" -#: awx/api/serializers.py:4791 +#: awx/api/serializers.py:4799 msgid "" "When present, shows the model on which the role or relationship was defined." msgstr "存在时,显示定义角色或关系的模型。" -#: awx/api/serializers.py:4824 +#: awx/api/serializers.py:4832 msgid "" "A summary of the new and changed values when an object is created, updated, " "or deleted" msgstr "创建、更新或删除对象时新值和更改值的概述" -#: awx/api/serializers.py:4826 +#: awx/api/serializers.py:4834 msgid "" "For create, update, and delete events this is the object type that was " "affected. For associate and disassociate events this is the object type " "associated or disassociated with object2." msgstr "对于创建、更新和删除事件,这是受影响的对象类型。对于关联和解除关联事件,这是与对象 2 关联或解除关联的对象类型。" -#: awx/api/serializers.py:4829 +#: awx/api/serializers.py:4837 msgid "" "Unpopulated for create, update, and delete events. For associate and " "disassociate events this is the object type that object1 is being associated " "with." msgstr "创建、更新和删除事件未填充。对于关联和解除关联事件,这是对象 1 要关联的对象类型。" -#: awx/api/serializers.py:4832 +#: awx/api/serializers.py:4840 msgid "The action taken with respect to the given object(s)." msgstr "对给定对象执行的操作。" @@ -1637,7 +1637,7 @@ msgstr "设置示例" msgid "Example setting which can be different for each user." msgstr "每个用户之间可以各不相同的设置示例。" -#: awx/conf/conf.py:92 awx/conf/registry.py:81 awx/conf/views.py:55 +#: awx/conf/conf.py:92 awx/conf/registry.py:81 awx/conf/views.py:56 msgid "User" msgstr "用户" @@ -1740,15 +1740,15 @@ msgstr "系统" msgid "OtherSystem" msgstr "OtherSystem" -#: awx/conf/views.py:47 +#: awx/conf/views.py:48 msgid "Setting Categories" msgstr "设置类别" -#: awx/conf/views.py:69 +#: awx/conf/views.py:70 msgid "Setting Detail" msgstr "设置详情" -#: awx/conf/views.py:160 +#: awx/conf/views.py:162 msgid "Logging Connectivity Test" msgstr "日志记录连接测试" @@ -2793,7 +2793,7 @@ msgstr "Conjur URL" msgid "API Key" msgstr "API 密钥" -#: awx/main/credential_plugins/conjur.py:28 awx/main/models/inventory.py:1017 +#: awx/main/credential_plugins/conjur.py:28 awx/main/models/inventory.py:1018 msgid "Account" msgstr "帐户" @@ -2880,7 +2880,7 @@ msgid "" msgstr "kv 机密后端的名称(如果留空,将使用机密路径的第一个分段)。" #: awx/main/credential_plugins/hashivault.py:60 -#: awx/main/models/inventory.py:1022 +#: awx/main/models/inventory.py:1023 msgid "Key Name" msgstr "密钥名称" @@ -3257,7 +3257,7 @@ msgid "" "Management (IAM) users." msgstr "安全令牌服务 (STS) 是一个 Web 服务,让您可以为 AWS 身份和访问管理 (IAM) 用户请求临时的有限权限凭证。" -#: awx/main/models/credential/__init__.py:780 awx/main/models/inventory.py:832 +#: awx/main/models/credential/__init__.py:780 awx/main/models/inventory.py:833 msgid "OpenStack" msgstr "OpenStack" @@ -3297,7 +3297,7 @@ msgstr "OpenStack 域定义了管理边界。只有 Keystone v3 身份验证 URL msgid "Verify SSL" msgstr "验证 SSL" -#: awx/main/models/credential/__init__.py:823 awx/main/models/inventory.py:829 +#: awx/main/models/credential/__init__.py:823 awx/main/models/inventory.py:830 msgid "VMware vCenter" msgstr "VMware vCenter" @@ -3310,7 +3310,7 @@ msgid "" "Enter the hostname or IP address that corresponds to your VMware vCenter." msgstr "输入与 VMware vCenter 对应的主机名或 IP 地址。" -#: awx/main/models/credential/__init__.py:849 awx/main/models/inventory.py:830 +#: awx/main/models/credential/__init__.py:849 awx/main/models/inventory.py:831 msgid "Red Hat Satellite 6" msgstr "红帽卫星 6" @@ -3324,7 +3324,7 @@ msgid "" "example, https://satellite.example.org" msgstr "输入与您的红帽卫星 6 服务器对应的 URL。例如:https://satellite.example.org" -#: awx/main/models/credential/__init__.py:875 awx/main/models/inventory.py:831 +#: awx/main/models/credential/__init__.py:875 awx/main/models/inventory.py:832 msgid "Red Hat CloudForms" msgstr "Red Hat CloudForms" @@ -3338,7 +3338,7 @@ msgid "" "instance. For example, https://cloudforms.example.org" msgstr "输入与您的 CloudForms 实例对应的虚拟机的 URL。例如:https://cloudforms.example.org" -#: awx/main/models/credential/__init__.py:902 awx/main/models/inventory.py:827 +#: awx/main/models/credential/__init__.py:902 awx/main/models/inventory.py:828 msgid "Google Compute Engine" msgstr "Google Compute Engine" @@ -3367,7 +3367,7 @@ msgid "" "Paste the contents of the PEM file associated with the service account email." msgstr "粘贴与服务账户电子邮件关联的 PEM 文件的内容。" -#: awx/main/models/credential/__init__.py:936 awx/main/models/inventory.py:828 +#: awx/main/models/credential/__init__.py:936 awx/main/models/inventory.py:829 msgid "Microsoft Azure Resource Manager" msgstr "Microsoft Azure Resource Manager" @@ -3405,7 +3405,7 @@ msgstr "GitLab 个人访问令牌" msgid "This token needs to come from your profile settings in GitLab" msgstr "此令牌需要来自您在 GitLab 中的配置文件设置" -#: awx/main/models/credential/__init__.py:1041 awx/main/models/inventory.py:833 +#: awx/main/models/credential/__init__.py:1041 awx/main/models/inventory.py:834 msgid "Red Hat Virtualization" msgstr "红帽虚拟化" @@ -3421,7 +3421,7 @@ msgstr "CA 文件" msgid "Absolute file path to the CA file to use (optional)" msgstr "要使用的 CA 文件的绝对文件路径(可选)" -#: awx/main/models/credential/__init__.py:1091 awx/main/models/inventory.py:834 +#: awx/main/models/credential/__init__.py:1091 awx/main/models/inventory.py:835 msgid "Ansible Tower" msgstr "Ansible Tower" @@ -3465,7 +3465,7 @@ msgstr "源必须是外部凭证" msgid "Input field must be defined on target credential (options are {})." msgstr "输入字段必须在目标凭证上定义(选项为 {})。" -#: awx/main/models/events.py:152 awx/main/models/events.py:655 +#: awx/main/models/events.py:152 awx/main/models/events.py:674 msgid "Host Failed" msgstr "主机故障" @@ -3473,7 +3473,7 @@ msgstr "主机故障" msgid "Host Started" msgstr "主机已启动" -#: awx/main/models/events.py:154 awx/main/models/events.py:656 +#: awx/main/models/events.py:154 awx/main/models/events.py:675 msgid "Host OK" msgstr "主机正常" @@ -3481,11 +3481,11 @@ msgstr "主机正常" msgid "Host Failure" msgstr "主机故障" -#: awx/main/models/events.py:156 awx/main/models/events.py:662 +#: awx/main/models/events.py:156 awx/main/models/events.py:681 msgid "Host Skipped" msgstr "主机已跳过" -#: awx/main/models/events.py:157 awx/main/models/events.py:657 +#: awx/main/models/events.py:157 awx/main/models/events.py:676 msgid "Host Unreachable" msgstr "主机无法访问" @@ -3569,27 +3569,27 @@ msgstr "Play 已启动" msgid "Playbook Complete" msgstr "Playbook 完成" -#: awx/main/models/events.py:184 awx/main/models/events.py:672 +#: awx/main/models/events.py:184 awx/main/models/events.py:691 msgid "Debug" msgstr "调试" -#: awx/main/models/events.py:185 awx/main/models/events.py:673 +#: awx/main/models/events.py:185 awx/main/models/events.py:692 msgid "Verbose" msgstr "详细" -#: awx/main/models/events.py:186 awx/main/models/events.py:674 +#: awx/main/models/events.py:186 awx/main/models/events.py:693 msgid "Deprecated" msgstr "已弃用" -#: awx/main/models/events.py:187 awx/main/models/events.py:675 +#: awx/main/models/events.py:187 awx/main/models/events.py:694 msgid "Warning" msgstr "警告" -#: awx/main/models/events.py:188 awx/main/models/events.py:676 +#: awx/main/models/events.py:188 awx/main/models/events.py:695 msgid "System Warning" msgstr "系统警告" -#: awx/main/models/events.py:189 awx/main/models/events.py:677 +#: awx/main/models/events.py:189 awx/main/models/events.py:696 #: awx/main/models/unified_jobs.py:75 msgid "Error" msgstr "错误" @@ -3617,300 +3617,300 @@ msgid "" "this group" msgstr "将始终自动分配给此组的完全匹配实例的列表" -#: awx/main/models/inventory.py:79 +#: awx/main/models/inventory.py:80 msgid "Hosts have a direct link to this inventory." msgstr "主机具有指向此清单的直接链接。" -#: awx/main/models/inventory.py:80 +#: awx/main/models/inventory.py:81 msgid "Hosts for inventory generated using the host_filter property." msgstr "使用 host_filter 属性生成的清单的主机。" -#: awx/main/models/inventory.py:85 +#: awx/main/models/inventory.py:86 msgid "inventories" msgstr "清单" -#: awx/main/models/inventory.py:92 +#: awx/main/models/inventory.py:93 msgid "Organization containing this inventory." msgstr "包含此清单的机构。" -#: awx/main/models/inventory.py:99 +#: awx/main/models/inventory.py:100 msgid "Inventory variables in JSON or YAML format." msgstr "JSON 或 YAML 格式的清单变量。" -#: awx/main/models/inventory.py:104 +#: awx/main/models/inventory.py:105 msgid "" "This field is deprecated and will be removed in a future release. Flag " "indicating whether any hosts in this inventory have failed." msgstr "此字段已弃用,并将在以后的发行版本中删除。指示此清单中是否有任何主机故障的标记。" -#: awx/main/models/inventory.py:110 +#: awx/main/models/inventory.py:111 msgid "" "This field is deprecated and will be removed in a future release. Total " "number of hosts in this inventory." msgstr "此字段已弃用,并将在以后的发行版本中删除。此清单中的主机总数。" -#: awx/main/models/inventory.py:116 +#: awx/main/models/inventory.py:117 msgid "" "This field is deprecated and will be removed in a future release. Number of " "hosts in this inventory with active failures." msgstr "此字段已弃用,并将在以后的发行版本中删除。此清单中有活跃故障的主机数量。" -#: awx/main/models/inventory.py:122 +#: awx/main/models/inventory.py:123 msgid "" "This field is deprecated and will be removed in a future release. Total " "number of groups in this inventory." msgstr "此字段已弃用,并将在以后的发行版本中删除。此清单中的总组数。" -#: awx/main/models/inventory.py:128 +#: awx/main/models/inventory.py:129 msgid "" "This field is deprecated and will be removed in a future release. Flag " "indicating whether this inventory has any external inventory sources." msgstr "此字段已弃用,并将在以后的发行版本中删除。表示此清单是否有任何外部清单源的标记。" -#: awx/main/models/inventory.py:134 +#: awx/main/models/inventory.py:135 msgid "" "Total number of external inventory sources configured within this inventory." msgstr "在此清单中配置的外部清单源总数。" -#: awx/main/models/inventory.py:139 +#: awx/main/models/inventory.py:140 msgid "Number of external inventory sources in this inventory with failures." msgstr "此清单中有故障的外部清单源数量。" -#: awx/main/models/inventory.py:146 +#: awx/main/models/inventory.py:147 msgid "Kind of inventory being represented." msgstr "所代表的清单种类。" -#: awx/main/models/inventory.py:152 +#: awx/main/models/inventory.py:153 msgid "Filter that will be applied to the hosts of this inventory." msgstr "将应用到此清单的主机的过滤器。" -#: awx/main/models/inventory.py:180 +#: awx/main/models/inventory.py:181 msgid "" "Credentials to be used by hosts belonging to this inventory when accessing " "Red Hat Insights API." msgstr "访问红帽 Insights API 时供属于此清单的主机使用的凭证。" -#: awx/main/models/inventory.py:189 +#: awx/main/models/inventory.py:190 msgid "Flag indicating the inventory is being deleted." msgstr "指示正在删除清单的标记。" -#: awx/main/models/inventory.py:244 +#: awx/main/models/inventory.py:245 msgid "Could not parse subset as slice specification." msgstr "无法将子集作为分片规格来解析。" -#: awx/main/models/inventory.py:248 +#: awx/main/models/inventory.py:249 msgid "Slice number must be less than total number of slices." msgstr "分片数量必须小于分片总数。" -#: awx/main/models/inventory.py:250 +#: awx/main/models/inventory.py:251 msgid "Slice number must be 1 or higher." msgstr "分片数量必须为 1 或更高。" -#: awx/main/models/inventory.py:387 +#: awx/main/models/inventory.py:388 msgid "Assignment not allowed for Smart Inventory" msgstr "智能清单不允许分配" -#: awx/main/models/inventory.py:389 awx/main/models/projects.py:166 +#: awx/main/models/inventory.py:390 awx/main/models/projects.py:166 msgid "Credential kind must be 'insights'." msgstr "凭证种类必须是 'inights'。" -#: awx/main/models/inventory.py:474 +#: awx/main/models/inventory.py:475 msgid "Is this host online and available for running jobs?" msgstr "此主机是否在线,并可用于运行作业?" -#: awx/main/models/inventory.py:480 +#: awx/main/models/inventory.py:481 msgid "" "The value used by the remote inventory source to uniquely identify the host" msgstr "远程清单源用来唯一标识主机的值" -#: awx/main/models/inventory.py:485 +#: awx/main/models/inventory.py:486 msgid "Host variables in JSON or YAML format." msgstr "JSON 或 YAML 格式的主机变量。" -#: awx/main/models/inventory.py:508 +#: awx/main/models/inventory.py:509 msgid "Inventory source(s) that created or modified this host." msgstr "创建或修改此主机的清单源。" -#: awx/main/models/inventory.py:513 +#: awx/main/models/inventory.py:514 msgid "Arbitrary JSON structure of most recent ansible_facts, per-host." msgstr "每个主机最近的 ansible_facts 的任意 JSON 结构。" -#: awx/main/models/inventory.py:519 +#: awx/main/models/inventory.py:520 msgid "The date and time ansible_facts was last modified." msgstr "最后修改 ansible_facts 的日期和时间。" -#: awx/main/models/inventory.py:526 +#: awx/main/models/inventory.py:527 msgid "Red Hat Insights host unique identifier." msgstr "红帽 Insights 主机唯一标识符。" -#: awx/main/models/inventory.py:640 +#: awx/main/models/inventory.py:641 msgid "Group variables in JSON or YAML format." msgstr "JSON 或 YAML 格式的组变量。" -#: awx/main/models/inventory.py:646 +#: awx/main/models/inventory.py:647 msgid "Hosts associated directly with this group." msgstr "与此组直接关联的主机。" -#: awx/main/models/inventory.py:652 +#: awx/main/models/inventory.py:653 msgid "Inventory source(s) that created or modified this group." msgstr "创建或修改此组的清单源。" -#: awx/main/models/inventory.py:824 +#: awx/main/models/inventory.py:825 msgid "File, Directory or Script" msgstr "文件、目录或脚本" -#: awx/main/models/inventory.py:825 +#: awx/main/models/inventory.py:826 msgid "Sourced from a Project" msgstr "源于项目" -#: awx/main/models/inventory.py:826 +#: awx/main/models/inventory.py:827 msgid "Amazon EC2" msgstr "Amazon EC2" -#: awx/main/models/inventory.py:835 +#: awx/main/models/inventory.py:836 msgid "Custom Script" msgstr "自定义脚本" -#: awx/main/models/inventory.py:952 +#: awx/main/models/inventory.py:953 msgid "Inventory source variables in YAML or JSON format." msgstr "YAML 或 JSON 格式的清单源变量。" -#: awx/main/models/inventory.py:963 +#: awx/main/models/inventory.py:964 msgid "" "Comma-separated list of filter expressions (EC2 only). Hosts are imported " "when ANY of the filters match." msgstr "以逗号分隔的过滤器表达式列表(仅限 EC2)。当任何过滤器匹配时会导入主机。" -#: awx/main/models/inventory.py:969 +#: awx/main/models/inventory.py:970 msgid "Limit groups automatically created from inventory source (EC2 only)." msgstr "限制从清单源自动创建的组(仅限 EC2)。" -#: awx/main/models/inventory.py:973 +#: awx/main/models/inventory.py:974 msgid "Overwrite local groups and hosts from remote inventory source." msgstr "从远程清单源覆盖本地组和主机。" -#: awx/main/models/inventory.py:977 +#: awx/main/models/inventory.py:978 msgid "Overwrite local variables from remote inventory source." msgstr "从远程清单源覆盖本地变量。" -#: awx/main/models/inventory.py:982 awx/main/models/jobs.py:154 +#: awx/main/models/inventory.py:983 awx/main/models/jobs.py:154 #: awx/main/models/projects.py:135 msgid "The amount of time (in seconds) to run before the task is canceled." msgstr "取消任务前运行的时间(以秒为单位)。" -#: awx/main/models/inventory.py:1015 +#: awx/main/models/inventory.py:1016 msgid "Image ID" msgstr "镜像 ID" -#: awx/main/models/inventory.py:1016 +#: awx/main/models/inventory.py:1017 msgid "Availability Zone" msgstr "可用性区域" -#: awx/main/models/inventory.py:1018 +#: awx/main/models/inventory.py:1019 msgid "Instance ID" msgstr "实例 ID" -#: awx/main/models/inventory.py:1019 +#: awx/main/models/inventory.py:1020 msgid "Instance State" msgstr "实例状态" -#: awx/main/models/inventory.py:1020 +#: awx/main/models/inventory.py:1021 msgid "Platform" msgstr "平台" -#: awx/main/models/inventory.py:1021 +#: awx/main/models/inventory.py:1022 msgid "Instance Type" msgstr "实例类型" -#: awx/main/models/inventory.py:1023 +#: awx/main/models/inventory.py:1024 msgid "Region" msgstr "区域" -#: awx/main/models/inventory.py:1024 +#: awx/main/models/inventory.py:1025 msgid "Security Group" msgstr "安全组" -#: awx/main/models/inventory.py:1025 +#: awx/main/models/inventory.py:1026 msgid "Tags" msgstr "标签" -#: awx/main/models/inventory.py:1026 +#: awx/main/models/inventory.py:1027 msgid "Tag None" msgstr "标签 None" -#: awx/main/models/inventory.py:1027 +#: awx/main/models/inventory.py:1028 msgid "VPC ID" msgstr "VPC ID" -#: awx/main/models/inventory.py:1095 +#: awx/main/models/inventory.py:1096 #, python-format msgid "" "Cloud-based inventory sources (such as %s) require credentials for the " "matching cloud service." msgstr "基于云的清单源(如 %s)需要匹配的云服务的凭证。" -#: awx/main/models/inventory.py:1101 +#: awx/main/models/inventory.py:1102 msgid "Credential is required for a cloud source." msgstr "云源需要凭证。" -#: awx/main/models/inventory.py:1104 +#: awx/main/models/inventory.py:1105 msgid "" "Credentials of type machine, source control, insights and vault are " "disallowed for custom inventory sources." msgstr "对于自定义清单源,不允许使用机器、源控制、insights 和 vault 类型的凭证。" -#: awx/main/models/inventory.py:1109 +#: awx/main/models/inventory.py:1110 msgid "" "Credentials of type insights and vault are disallowed for scm inventory " "sources." msgstr "对于 scm 清单源,不允许使用 insights 和 vault 类型的凭证。" -#: awx/main/models/inventory.py:1169 +#: awx/main/models/inventory.py:1170 #, python-format msgid "Invalid %(source)s region: %(region)s" msgstr "无效的 %(source)s 区域:%(region)s" -#: awx/main/models/inventory.py:1193 +#: awx/main/models/inventory.py:1194 #, python-format msgid "Invalid filter expression: %(filter)s" msgstr "无效的过滤器表达式:%(filter)s" -#: awx/main/models/inventory.py:1214 +#: awx/main/models/inventory.py:1215 #, python-format msgid "Invalid group by choice: %(choice)s" msgstr "选择的组无效:%(choice)s" -#: awx/main/models/inventory.py:1242 +#: awx/main/models/inventory.py:1243 msgid "Project containing inventory file used as source." msgstr "包含用作源的清单文件的项目。" -#: awx/main/models/inventory.py:1415 +#: awx/main/models/inventory.py:1416 msgid "" "More than one SCM-based inventory source with update on project update per-" "inventory not allowed." msgstr "不允许多个基于 SCM 的清单源按清单在项目更新时更新。" -#: awx/main/models/inventory.py:1422 +#: awx/main/models/inventory.py:1423 msgid "" "Cannot update SCM-based inventory source on launch if set to update on " "project update. Instead, configure the corresponding source project to " "update on launch." msgstr "如果设置为在项目更新时更新,则无法在启动时更新基于 SCM 的清单源。应将对应的源项目配置为在启动时更新。" -#: awx/main/models/inventory.py:1428 +#: awx/main/models/inventory.py:1429 msgid "Cannot set source_path if not SCM type." msgstr "如果不是 SCM 类型,则无法设置 source_path。" -#: awx/main/models/inventory.py:1471 +#: awx/main/models/inventory.py:1472 msgid "" "Inventory files from this Project Update were used for the inventory update." msgstr "此项目更新中的清单文件用于清单更新。" -#: awx/main/models/inventory.py:1582 +#: awx/main/models/inventory.py:1583 msgid "Inventory script contents" msgstr "清单脚本内容" -#: awx/main/models/inventory.py:1587 +#: awx/main/models/inventory.py:1588 msgid "Organization owning this inventory script" msgstr "拥有此清单脚本的机构" @@ -4009,28 +4009,28 @@ msgstr "作为提示而应用的清单,假定作业模板提示提供清单" msgid "job host summaries" msgstr "作业主机摘要" -#: awx/main/models/jobs.py:1158 +#: awx/main/models/jobs.py:1144 msgid "Remove jobs older than a certain number of days" msgstr "删除超过特定天数的作业" -#: awx/main/models/jobs.py:1159 +#: awx/main/models/jobs.py:1145 msgid "Remove activity stream entries older than a certain number of days" msgstr "删除比特定天数旧的活动流条目" -#: awx/main/models/jobs.py:1160 +#: awx/main/models/jobs.py:1146 msgid "Removes expired browser sessions from the database" msgstr "从数据库中删除已过期的浏览器会话" -#: awx/main/models/jobs.py:1161 +#: awx/main/models/jobs.py:1147 msgid "Removes expired OAuth 2 access tokens and refresh tokens" msgstr "删除已过期的 OAuth 2 访问令牌并刷新令牌" -#: awx/main/models/jobs.py:1231 +#: awx/main/models/jobs.py:1217 #, python-brace-format msgid "Variables {list_of_keys} are not allowed for system jobs." msgstr "系统作业不允许使用变量 {list_of_keys}。" -#: awx/main/models/jobs.py:1247 +#: awx/main/models/jobs.py:1233 msgid "days must be a positive integer." msgstr "天必须为正整数。" @@ -4774,7 +4774,7 @@ msgstr "未找到错误处理路径,将工作流标记为失败" msgid "The approval node {name} ({pk}) has expired after {timeout} seconds." msgstr "批准节点 {name} ({pk}) 已在 {timeout} 秒后过期。" -#: awx/main/tasks.py:1053 +#: awx/main/tasks.py:1049 msgid "Invalid virtual environment selected: {}" msgstr "选择了无效的虚拟环境:{}" @@ -4811,53 +4811,53 @@ msgstr "工作流作业节点没有错误处理路径 []。工作流作业节点 msgid "Unable to convert \"%s\" to boolean" msgstr "无法将 \"%s\" 转换为布尔值" -#: awx/main/utils/common.py:275 +#: awx/main/utils/common.py:261 #, python-format msgid "Unsupported SCM type \"%s\"" msgstr "不受支持的 SCM 类型 \"%s\"" -#: awx/main/utils/common.py:282 awx/main/utils/common.py:294 -#: awx/main/utils/common.py:313 +#: awx/main/utils/common.py:268 awx/main/utils/common.py:280 +#: awx/main/utils/common.py:299 #, python-format msgid "Invalid %s URL" msgstr "无效的 %s URL" -#: awx/main/utils/common.py:284 awx/main/utils/common.py:323 +#: awx/main/utils/common.py:270 awx/main/utils/common.py:309 #, python-format msgid "Unsupported %s URL" msgstr "不受支持的 %s URL" -#: awx/main/utils/common.py:325 +#: awx/main/utils/common.py:311 #, python-format msgid "Unsupported host \"%s\" for file:// URL" msgstr "用于 file:// URL的主机 \"%s\" 不受支持" -#: awx/main/utils/common.py:327 +#: awx/main/utils/common.py:313 #, python-format msgid "Host is required for %s URL" msgstr "%s URL 需要主机" -#: awx/main/utils/common.py:345 +#: awx/main/utils/common.py:331 #, python-format msgid "Username must be \"git\" for SSH access to %s." msgstr "用户名必须是 \"git\" 以供 SSH 访问 %s。" -#: awx/main/utils/common.py:351 +#: awx/main/utils/common.py:337 #, python-format msgid "Username must be \"hg\" for SSH access to %s." msgstr "用户名必须是 \"hg\" 以供 SSH 访问 %s。" -#: awx/main/utils/common.py:682 +#: awx/main/utils/common.py:668 #, python-brace-format msgid "Input type `{data_type}` is not a dictionary" msgstr "输入类型 `{data_type}` 不是字典" -#: awx/main/utils/common.py:715 +#: awx/main/utils/common.py:701 #, python-brace-format msgid "Variables not compatible with JSON standard (error: {json_error})" msgstr "与 JSON 标准不兼容的变量(错误:{json_error})" -#: awx/main/utils/common.py:721 +#: awx/main/utils/common.py:707 #, python-brace-format msgid "" "Cannot parse as JSON (error: {json_error}) or YAML (error: {yaml_error})." diff --git a/awx/ui/po/fr.po b/awx/ui/po/fr.po index 71d684fb90..59ed681256 100644 --- a/awx/ui/po/fr.po +++ b/awx/ui/po/fr.po @@ -4429,11 +4429,12 @@ msgstr "Remplacer les variables qui se trouvent dans azure_rm.ini et qui sont ut #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:282 msgid "" "Override variables found in cloudforms.ini and used by the inventory update script. For an example variable configuration\n" -" \n" -" view cloudforms.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +" \n" +" view cloudforms.ini in the Ansible Collections github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." msgstr "Remplacez les variables qui se trouvent dans cloudforms.ini et qui sont utilisées par le script de mise à jour de l'inventaire. Voici un exemple de configuration de variable\n" -" \n" -" view cloudforms.ini in the Ansible github repo. Entrez les variables d’inventaire avec la syntaxe JSON ou YAML. Utilisez le bouton radio pour basculer entre les deux. Consultez la documentation d’Ansible Tower pour avoir un exemple de syntaxe." +" \n" +" voir cloudforms.ini dans Ansible Collections github repo.\n" +" Entrez les variables d’inventaire avec la syntaxe JSON ou YAML. Utilisez le bouton radio pour basculer entre les deux. Consultez la documentation d’Ansible Tower pour avoir un exemple de syntaxe." #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:217 msgid "Override variables found in ec2.ini and used by the inventory update script. For a detailed description of these variables" @@ -4442,20 +4443,20 @@ msgstr "Remplacer les variables qui se trouvent dans ec2.ini et qui sont utilis #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:299 msgid "" "Override variables found in foreman.ini and used by the inventory update script. For an example variable configuration\n" -" \n" -" view foreman.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +" \n" +" view foreman.ini in the Ansible Collections github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." msgstr "Remplacez les variables qui se trouvent dans foreman.ini et qui sont utilisées par le script de mise à jour de l'inventaire. Voici un exemple de configuration de variable\n" -" \n" -" view foreman.ini in the Ansible github repo. Entrez les variables d’inventaire avec la syntaxe JSON ou YAML. Utilisez le bouton radio pour basculer entre les deux. Consultez la documentation d’Ansible Tower pour avoir un exemple de syntaxe." +" \n" +" voir foreman.ini dans Ansible le référentiel github repo. Entrez les variables d’inventaire avec la syntaxe JSON ou YAML. Utilisez le bouton radio pour basculer entre les deux. Consultez la documentation d’Ansible Tower pour avoir un exemple de syntaxe." #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:265 msgid "" "Override variables found in openstack.yml and used by the inventory update script. For an example variable configuration\n" -" \n" -" view openstack.yml in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +" \n" +" view openstack.yml in the Openstack github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." msgstr "Remplacez les variables qui se trouvent dans openstack.yml et qui sont utilisées par le script de mise à jour de l'inventaire. Voici un exemple de configuration de variable\n" -" \n" -" view openstack.yml in the Ansible github repo. Entrez les variables d’inventaire avec la syntaxe JSON ou YAML. Utilisez le bouton radio pour basculer entre les deux. Consultez la documentation d’Ansible Tower pour avoir un exemple de syntaxe." +" \n" +" voir openstack.yml dans le référentiel Ansible github repo. Entrez les variables d’inventaire avec la syntaxe JSON ou YAML. Utilisez le bouton radio pour basculer entre les deux. Consultez la documentation d’Ansible Tower pour avoir un exemple de syntaxe." #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:241 msgid "Override variables found in vmware.ini and used by the inventory update script. For a detailed description of these variables" diff --git a/awx/ui/po/ja.po b/awx/ui/po/ja.po index 7e44693fb0..e292156170 100644 --- a/awx/ui/po/ja.po +++ b/awx/ui/po/ja.po @@ -4427,11 +4427,11 @@ msgstr "azure_rm.ini にあり、インベントリー更新スクリプトで #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:282 msgid "" "Override variables found in cloudforms.ini and used by the inventory update script. For an example variable configuration\n" -" \n" -" view cloudforms.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +" \n" +" view cloudforms.ini in the Ansible Collections github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." msgstr "cloudforms.ini にあり、インベントリー更新スクリプトで使用される変数を上書きします。たとえば、変数の設定\n" -" \n" -" は Ansible github リポジトリーで cloudforms.ini を表示します。 JSON または YAML 構文のいずれかを使用してインベントリー変数を入力します。ラジオボタンを使用してこの 2 つの間の切り替えを行います。構文のサンプルについては、Ansible Tower ドキュメントを参照してください。" +" \n" +" は Ansible Collections github リポジトリーで cloudforms.ini を表示します。 JSON または YAML 構文のいずれかを使用してインベントリー変数を入力します。ラジオボタンを使用してこの 2 つの間の切り替えを行います。構文のサンプルについては、Ansible Tower ドキュメントを参照してください。" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:217 msgid "Override variables found in ec2.ini and used by the inventory update script. For a detailed description of these variables" @@ -4440,20 +4440,16 @@ msgstr "ec2.ini にあり、インベントリー更新スクリプトで使用 #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:299 msgid "" "Override variables found in foreman.ini and used by the inventory update script. For an example variable configuration\n" -" \n" -" view foreman.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." -msgstr "foreman.ini にあり、インベントリー更新スクリプトで使用される変数を上書きします。たとえば、変数の設定\n" -" \n" -" は Ansible github リポジトリーで foreman.ini を表示します。 JSON または YAML 構文のいずれかを使用してインベントリー変数を入力します。ラジオボタンを使用してこの 2 つの間の切り替えを行います。構文のサンプルについては、Ansible Tower ドキュメントを参照してください。" +" \n" +" view foreman.ini in the Ansible Collections github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +msgstr "foreman.ini にあり、インベントリー更新スクリプトで使用される変数を上書きします。たとえば、変数の設定 は Ansible Collections github リポジトリーで foreman.ini を表示します。 JSON または YAML 構文のいずれかを使用してインベントリー変数を入力します。ラジオボタンを使用してこの 2 つの間の切り替えを行います。構文のサンプルについては、Ansible Tower ドキュメントを参照してください。" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:265 msgid "" "Override variables found in openstack.yml and used by the inventory update script. For an example variable configuration\n" -" \n" -" view openstack.yml in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." -msgstr "openstack.yml にあり、インベントリー更新スクリプトで使用される変数を上書きします。たとえば、変数の設定\n" -" \n" -" は Ansible github リポジトリーで openstack.yml を表示します。 JSON または YAML 構文のいずれかを使用してインベントリー変数を入力します。ラジオボタンを使用してこの 2 つの間の切り替えを行います。構文のサンプルについては、Ansible Tower ドキュメントを参照してください。" +" \n" +" view openstack.yml in the Openstack github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +msgstr "openstack.yml にあり、インベントリー更新スクリプトで使用される変数を上書きします。たとえば、変数の設定 は Openstack github リポジトリーで openstack.yml を表示します。 JSON または YAML 構文のいずれかを使用してインベントリー変数を入力します。ラジオボタンを使用して 2 つの間の切り替えを行います。構文のサンプルについては、Ansible Tower ドキュメントを参照してください。" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:241 msgid "Override variables found in vmware.ini and used by the inventory update script. For a detailed description of these variables" diff --git a/awx/ui/po/zh.po b/awx/ui/po/zh.po index 23e2bb39c6..92348c9c6e 100644 --- a/awx/ui/po/zh.po +++ b/awx/ui/po/zh.po @@ -4424,16 +4424,16 @@ msgstr "其他(云提供商)" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:316 msgid "Override variables found in azure_rm.ini and used by the inventory update script. For a detailed description of these variables" -msgstr "覆写 azure_rm.ini 中由清单更新脚本使用的变量。有关这些变量的详细描述" +msgstr "覆盖 azure_rm.ini 中由清单更新脚本使用的变量。有关这些变量的详细描述" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:282 msgid "" "Override variables found in cloudforms.ini and used by the inventory update script. For an example variable configuration\n" +" \n" +" view cloudforms.ini in the Ansible Collections github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +msgstr "覆盖 cloudforms.ini 中由清单更新脚本使用的变量。一个变量配置示例包括在\n" " \n" -" view cloudforms.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." -msgstr "Override variables found in cloudforms.ini and used by the inventory update script. For an example variable configuration\n" -" \n" -" view cloudforms.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +" Ansible github repo 的 cloudforms.ini 中。 使用 JSON 或 YAML 格式输入清单变量。通过单选按钮可以切换这两个格式。详情请参阅 Ansible Tower 的相关文档。" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:217 msgid "Override variables found in ec2.ini and used by the inventory update script. For a detailed description of these variables" @@ -4442,20 +4442,19 @@ msgstr "覆写 ec2.ini 中由清单更新脚本使用的变量。有关这些变 #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:299 msgid "" "Override variables found in foreman.ini and used by the inventory update script. For an example variable configuration\n" +" \n" +" view foreman.ini in the Ansible Collections github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +msgstr "覆盖 foreman.ini 中由清单脚本使用的变量。一个变量配置示例包括在\n" " \n" -" view foreman.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." -msgstr "Override variables found in foreman.ini and used by the inventory update script. For an example variable configuration\n" -" \n" -" view foreman.ini in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +" Ansible github repo 的 foreman.ini 中。 使用 JSON 或 YAML 格式输入清单变量。通过单选按旧可以切换这两个格式。详情请参阅 Ansible Tower 的相关文档。" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:265 msgid "" "Override variables found in openstack.yml and used by the inventory update script. For an example variable configuration\n" -" \n" -" view openstack.yml in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." -msgstr "Override variables found in openstack.yml and used by the inventory update script. For an example variable configuration\n" -" \n" -" view openstack.yml in the Ansible github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +" \n" +" view openstack.yml in the Openstack github repo. Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax." +msgstr "覆盖 openstack.yml 中由清单脚本使用的变量。一个变量配置示例包括在 \n" +" Openstack github repo 的 openstack.yml 中。 使用 JSON 或 YAML 格式输入清单变量。通过单选按旧可以切换这两个格式。详情请参阅 Ansible Tower 的相关文档。" #: client/src/inventories-hosts/inventories/related/sources/sources.form.js:241 msgid "Override variables found in vmware.ini and used by the inventory update script. For a detailed description of these variables" From 3dfc9328a9b9599ef73f7990ab90aad730ad2aaf Mon Sep 17 00:00:00 2001 From: mabashian Date: Thu, 4 Jun 2020 14:29:28 -0400 Subject: [PATCH 148/494] Dynamically render credential subform fields based on options responses for each credential type --- .../FieldWithPrompt/FieldWithPrompt.jsx | 7 +- .../CredentialAdd/CredentialAdd.jsx | 51 +- .../CredentialAdd/CredentialAdd.test.jsx | 32 +- .../CredentialEdit/CredentialEdit.jsx | 71 +- .../CredentialEdit/CredentialEdit.test.jsx | 32 +- .../Credential/shared/CredentialForm.jsx | 190 ++- .../Credential/shared/CredentialForm.test.jsx | 44 +- .../BecomeMethodField.jsx | 80 ++ .../CredentialFormFields/CredentialField.jsx | 167 +++ .../CredentialPluginField.jsx | 155 +++ .../CredentialPluginField.test.jsx | 20 +- .../CredentialPluginPrompt.jsx | 0 .../CredentialPluginPrompt.test.jsx | 14 +- .../CredentialsStep.jsx | 14 +- .../CredentialPluginPrompt/MetadataStep.jsx | 16 +- .../CredentialPluginPrompt/index.js | 0 .../CredentialPluginSelected.jsx | 4 +- .../CredentialPluginSelected.test.jsx | 4 +- .../CredentialPlugins/index.js | 0 .../GceFileUploadField.jsx | 89 ++ .../shared/CredentialFormFields/index.js | 3 + .../CredentialPluginField.jsx | 103 -- .../Credential/shared/CredentialSubForm.jsx | 74 + .../GoogleComputeEngineSubForm.jsx | 136 -- .../CredentialSubForms/ManualSubForm.jsx | 89 -- .../CredentialSubForms/SharedFields.jsx | 50 - .../SourceControlSubForm.jsx | 25 - .../shared/CredentialSubForms/index.js | 3 - .../shared/data.credentialTypes.json | 1200 ++++++++++++++++- awx/ui_next/src/types.js | 13 + 30 files changed, 2022 insertions(+), 664 deletions(-) create mode 100644 awx/ui_next/src/screens/Credential/shared/CredentialFormFields/BecomeMethodField.jsx create mode 100644 awx/ui_next/src/screens/Credential/shared/CredentialFormFields/CredentialField.jsx create mode 100644 awx/ui_next/src/screens/Credential/shared/CredentialFormFields/CredentialPlugins/CredentialPluginField.jsx rename awx/ui_next/src/screens/Credential/shared/{ => CredentialFormFields}/CredentialPlugins/CredentialPluginField.test.jsx (85%) rename awx/ui_next/src/screens/Credential/shared/{ => CredentialFormFields}/CredentialPlugins/CredentialPluginPrompt/CredentialPluginPrompt.jsx (100%) rename awx/ui_next/src/screens/Credential/shared/{ => CredentialFormFields}/CredentialPlugins/CredentialPluginPrompt/CredentialPluginPrompt.test.jsx (94%) rename awx/ui_next/src/screens/Credential/shared/{ => CredentialFormFields}/CredentialPlugins/CredentialPluginPrompt/CredentialsStep.jsx (83%) rename awx/ui_next/src/screens/Credential/shared/{ => CredentialFormFields}/CredentialPlugins/CredentialPluginPrompt/MetadataStep.jsx (89%) rename awx/ui_next/src/screens/Credential/shared/{ => CredentialFormFields}/CredentialPlugins/CredentialPluginPrompt/index.js (100%) rename awx/ui_next/src/screens/Credential/shared/{ => CredentialFormFields}/CredentialPlugins/CredentialPluginSelected.jsx (93%) rename awx/ui_next/src/screens/Credential/shared/{ => CredentialFormFields}/CredentialPlugins/CredentialPluginSelected.test.jsx (87%) rename awx/ui_next/src/screens/Credential/shared/{ => CredentialFormFields}/CredentialPlugins/index.js (100%) create mode 100644 awx/ui_next/src/screens/Credential/shared/CredentialFormFields/GceFileUploadField.jsx create mode 100644 awx/ui_next/src/screens/Credential/shared/CredentialFormFields/index.js delete mode 100644 awx/ui_next/src/screens/Credential/shared/CredentialPlugins/CredentialPluginField.jsx create mode 100644 awx/ui_next/src/screens/Credential/shared/CredentialSubForm.jsx delete mode 100644 awx/ui_next/src/screens/Credential/shared/CredentialSubForms/GoogleComputeEngineSubForm.jsx delete mode 100644 awx/ui_next/src/screens/Credential/shared/CredentialSubForms/ManualSubForm.jsx delete mode 100644 awx/ui_next/src/screens/Credential/shared/CredentialSubForms/SharedFields.jsx delete mode 100644 awx/ui_next/src/screens/Credential/shared/CredentialSubForms/SourceControlSubForm.jsx delete mode 100644 awx/ui_next/src/screens/Credential/shared/CredentialSubForms/index.js diff --git a/awx/ui_next/src/components/FieldWithPrompt/FieldWithPrompt.jsx b/awx/ui_next/src/components/FieldWithPrompt/FieldWithPrompt.jsx index 19ad76c796..b0d27ccc6e 100644 --- a/awx/ui_next/src/components/FieldWithPrompt/FieldWithPrompt.jsx +++ b/awx/ui_next/src/components/FieldWithPrompt/FieldWithPrompt.jsx @@ -7,16 +7,11 @@ import { CheckboxField, FieldTooltip } from '../FormField'; const FieldHeader = styled.div` display: flex; - justify-content: space-between; - padding-bottom: var(--pf-c-form__label--PaddingBottom); - - label { - --pf-c-form__label--PaddingBottom: 0px; - } `; const StyledCheckboxField = styled(CheckboxField)` --pf-c-check__label--FontSize: var(--pf-c-form__label--FontSize); + margin-left: auto; `; function FieldWithPrompt({ diff --git a/awx/ui_next/src/screens/Credential/CredentialAdd/CredentialAdd.jsx b/awx/ui_next/src/screens/Credential/CredentialAdd/CredentialAdd.jsx index c721b56789..da8cffdca0 100644 --- a/awx/ui_next/src/screens/Credential/CredentialAdd/CredentialAdd.jsx +++ b/awx/ui_next/src/screens/Credential/CredentialAdd/CredentialAdd.jsx @@ -25,17 +25,32 @@ function CredentialAdd({ me }) { result: credentialId, } = useRequest( useCallback( - async values => { - const { inputs, organization, ...remainingValues } = values; + async (values, credentialTypesMap) => { + const { + inputs: { fields: possibleFields }, + } = credentialTypesMap[values.credential_type]; + + const { + inputs, + organization, + passwordPrompts, + ...remainingValues + } = values; + const nonPluginInputs = {}; const pluginInputs = {}; - Object.entries(inputs).forEach(([key, value]) => { - if (value.credential && value.inputs) { - pluginInputs[key] = value; + + possibleFields.forEach(field => { + const input = inputs[field.id]; + if (input.credential && input.inputs) { + pluginInputs[field.id] = input; + } else if (passwordPrompts[field.id]) { + nonPluginInputs[field.id] = 'ASK'; } else { - nonPluginInputs[key] = value; + nonPluginInputs[field.id] = input; } }); + const { data: { id: newCredentialId }, } = await CredentialsAPI.create({ @@ -44,18 +59,17 @@ function CredentialAdd({ me }) { inputs: nonPluginInputs, ...remainingValues, }); - const inputSourceRequests = []; - Object.entries(pluginInputs).forEach(([key, value]) => { - inputSourceRequests.push( + + await Promise.all( + Object.entries(pluginInputs).map(([key, value]) => CredentialInputSourcesAPI.create({ input_field_name: key, metadata: value.inputs, source_credential: value.credential.id, target_credential: newCredentialId, }) - ); - }); - await Promise.all(inputSourceRequests); + ) + ); return newCredentialId; }, @@ -74,10 +88,13 @@ function CredentialAdd({ me }) { try { const { data: { results: loadedCredentialTypes }, - } = await CredentialTypesAPI.read({ - or__namespace: ['gce', 'scm', 'ssh'], - }); - setCredentialTypes(loadedCredentialTypes); + } = await CredentialTypesAPI.read(); + setCredentialTypes( + loadedCredentialTypes.reduce((credentialTypesMap, credentialType) => { + credentialTypesMap[credentialType.id] = credentialType; + return credentialTypesMap; + }, {}) + ); } catch (err) { setError(err); } finally { @@ -92,7 +109,7 @@ function CredentialAdd({ me }) { }; const handleSubmit = async values => { - await submitRequest(values); + await submitRequest(values, credentialTypes); }; if (error) { diff --git a/awx/ui_next/src/screens/Credential/CredentialAdd/CredentialAdd.test.jsx b/awx/ui_next/src/screens/Credential/CredentialAdd/CredentialAdd.test.jsx index c348c6e252..3f6e562434 100644 --- a/awx/ui_next/src/screens/Credential/CredentialAdd/CredentialAdd.test.jsx +++ b/awx/ui_next/src/screens/Credential/CredentialAdd/CredentialAdd.test.jsx @@ -5,7 +5,6 @@ import { mountWithContexts, waitForElement, } from '../../../../testUtils/enzymeHelpers'; -import { sleep } from '../../../../testUtils/testUtils'; import { CredentialsAPI, CredentialTypesAPI } from '../../../api'; import CredentialAdd from './CredentialAdd'; @@ -175,23 +174,34 @@ describe('', () => { }); test('handleSubmit should call the api and redirect to details page', async () => { await waitForElement(wrapper, 'isLoading', el => el.length === 0); - - wrapper.find('CredentialForm').prop('onSubmit')({ - user: 1, - organization: null, - name: 'foo', - description: 'bar', - credential_type: '2', - inputs: {}, + await act(async () => { + wrapper.find('CredentialForm').prop('onSubmit')({ + user: 1, + organization: null, + name: 'foo', + description: 'bar', + credential_type: '2', + inputs: { + username: '', + password: '', + ssh_key_data: '', + ssh_key_unlock: '', + }, + passwordPrompts: {}, + }); }); - await sleep(1); expect(CredentialsAPI.create).toHaveBeenCalledWith({ user: 1, organization: null, name: 'foo', description: 'bar', credential_type: '2', - inputs: {}, + inputs: { + username: '', + password: '', + ssh_key_data: '', + ssh_key_unlock: '', + }, }); expect(history.location.pathname).toBe('/credentials/13/details'); }); diff --git a/awx/ui_next/src/screens/Credential/CredentialEdit/CredentialEdit.jsx b/awx/ui_next/src/screens/Credential/CredentialEdit/CredentialEdit.jsx index aef18ea8e9..766f7146d9 100644 --- a/awx/ui_next/src/screens/Credential/CredentialEdit/CredentialEdit.jsx +++ b/awx/ui_next/src/screens/Credential/CredentialEdit/CredentialEdit.jsx @@ -1,7 +1,6 @@ import React, { useCallback, useState, useEffect } from 'react'; import { useHistory } from 'react-router-dom'; import { object } from 'prop-types'; - import { CardBody } from '../../../components/Card'; import { CredentialsAPI, @@ -22,8 +21,33 @@ function CredentialEdit({ credential, me }) { const { error: submitError, request: submitRequest, result } = useRequest( useCallback( - async (values, inputSourceMap) => { - const createAndUpdateInputSources = pluginInputs => + async (values, credentialTypesMap, inputSourceMap) => { + const { + inputs: { fields: possibleFields }, + } = credentialTypesMap[values.credential_type]; + + const { + inputs, + organization, + passwordPrompts, + ...remainingValues + } = values; + + const nonPluginInputs = {}; + const pluginInputs = {}; + + possibleFields.forEach(field => { + const input = inputs[field.id]; + if (input.credential && input.inputs) { + pluginInputs[field.id] = input; + } else if (passwordPrompts[field.id]) { + nonPluginInputs[field.id] = 'ASK'; + } else { + nonPluginInputs[field.id] = input; + } + }); + + const createAndUpdateInputSources = () => Object.entries(pluginInputs).map(([fieldName, fieldValue]) => { if (!inputSourceMap[fieldName]) { return CredentialInputSourcesAPI.create({ @@ -46,27 +70,15 @@ function CredentialEdit({ credential, me }) { return null; }); - const destroyInputSources = inputs => { - const destroyRequests = []; - Object.values(inputSourceMap).forEach(inputSource => { + const destroyInputSources = () => + Object.values(inputSourceMap).map(inputSource => { const { id, input_field_name } = inputSource; if (!inputs[input_field_name]?.credential) { - destroyRequests.push(CredentialInputSourcesAPI.destroy(id)); + return CredentialInputSourcesAPI.destroy(id); } + return null; }); - return destroyRequests; - }; - const { inputs, organization, ...remainingValues } = values; - const nonPluginInputs = {}; - const pluginInputs = {}; - Object.entries(inputs).forEach(([key, value]) => { - if (value.credential && value.inputs) { - pluginInputs[key] = value; - } else { - nonPluginInputs[key] = value; - } - }); const [{ data }] = await Promise.all([ CredentialsAPI.update(credential.id, { user: (me && me.id) || null, @@ -74,12 +86,14 @@ function CredentialEdit({ credential, me }) { inputs: nonPluginInputs, ...remainingValues, }), - ...destroyInputSources(inputs), + ...destroyInputSources(), ]); - await Promise.all(createAndUpdateInputSources(pluginInputs)); + + await Promise.all(createAndUpdateInputSources()); + return data; }, - [credential.id, me] + [me, credential.id] ) ); @@ -100,12 +114,15 @@ function CredentialEdit({ credential, me }) { data: { results: loadedInputSources }, }, ] = await Promise.all([ - CredentialTypesAPI.read({ - or__namespace: ['gce', 'scm', 'ssh'], - }), + CredentialTypesAPI.read(), CredentialsAPI.readInputSources(credential.id, { page_size: 200 }), ]); - setCredentialTypes(loadedCredentialTypes); + setCredentialTypes( + loadedCredentialTypes.reduce((credentialTypesMap, credentialType) => { + credentialTypesMap[credentialType.id] = credentialType; + return credentialTypesMap; + }, {}) + ); setInputSources( loadedInputSources.reduce((inputSourcesMap, inputSource) => { inputSourcesMap[inputSource.input_field_name] = inputSource; @@ -127,7 +144,7 @@ function CredentialEdit({ credential, me }) { }; const handleSubmit = async values => { - await submitRequest(values, inputSources); + await submitRequest(values, credentialTypes, inputSources); }; if (error) { diff --git a/awx/ui_next/src/screens/Credential/CredentialEdit/CredentialEdit.test.jsx b/awx/ui_next/src/screens/Credential/CredentialEdit/CredentialEdit.test.jsx index 3d4ce756cb..b586735035 100644 --- a/awx/ui_next/src/screens/Credential/CredentialEdit/CredentialEdit.test.jsx +++ b/awx/ui_next/src/screens/Credential/CredentialEdit/CredentialEdit.test.jsx @@ -5,7 +5,6 @@ import { mountWithContexts, waitForElement, } from '../../../../testUtils/enzymeHelpers'; -import { sleep } from '../../../../testUtils/testUtils'; import { CredentialsAPI, CredentialTypesAPI } from '../../../api'; import CredentialEdit from './CredentialEdit'; @@ -279,23 +278,34 @@ describe('', () => { test('handleSubmit should post to the api', async () => { await waitForElement(wrapper, 'isLoading', el => el.length === 0); - - wrapper.find('CredentialForm').prop('onSubmit')({ - user: 1, - organization: null, - name: 'foo', - description: 'bar', - credential_type: '2', - inputs: {}, + await act(async () => { + wrapper.find('CredentialForm').prop('onSubmit')({ + user: 1, + organization: null, + name: 'foo', + description: 'bar', + credential_type: '2', + inputs: { + username: '', + password: '', + ssh_key_data: '', + ssh_key_unlock: '', + }, + passwordPrompts: {}, + }); }); - await sleep(1); expect(CredentialsAPI.update).toHaveBeenCalledWith(3, { user: 1, organization: null, name: 'foo', description: 'bar', credential_type: '2', - inputs: {}, + inputs: { + username: '', + password: '', + ssh_key_data: '', + ssh_key_unlock: '', + }, }); expect(history.location.pathname).toBe('/credentials/3/details'); }); diff --git a/awx/ui_next/src/screens/Credential/shared/CredentialForm.jsx b/awx/ui_next/src/screens/Credential/shared/CredentialForm.jsx index ddd4ccaa5f..c215b7f8b4 100644 --- a/awx/ui_next/src/screens/Credential/shared/CredentialForm.jsx +++ b/awx/ui_next/src/screens/Credential/shared/CredentialForm.jsx @@ -3,30 +3,20 @@ import { Formik, useField } from 'formik'; import { withI18n } from '@lingui/react'; import { t } from '@lingui/macro'; import { arrayOf, func, object, shape } from 'prop-types'; -import { Form, FormGroup, Title } from '@patternfly/react-core'; +import { Form, FormGroup } from '@patternfly/react-core'; import FormField, { FormSubmitError } from '../../../components/FormField'; import FormActionGroup from '../../../components/FormActionGroup/FormActionGroup'; import AnsibleSelect from '../../../components/AnsibleSelect'; import { required } from '../../../util/validators'; import OrganizationLookup from '../../../components/Lookup/OrganizationLookup'; -import { - FormColumnLayout, - SubFormLayout, -} from '../../../components/FormLayout'; -import { - GoogleComputeEngineSubForm, - ManualSubForm, - SourceControlSubForm, -} from './CredentialSubForms'; +import { FormColumnLayout } from '../../../components/FormLayout'; +import CredentialSubForm from './CredentialSubForm'; function CredentialFormFields({ i18n, credentialTypes, formik, - gceCredentialTypeId, initialValues, - scmCredentialTypeId, - sshCredentialTypeId, }) { const [orgField, orgMeta, orgHelpers] = useField('organization'); const [credTypeField, credTypeMeta, credTypeHelpers] = useField({ @@ -34,23 +24,52 @@ function CredentialFormFields({ validate: required(i18n._(t`Select a value for this field`), i18n), }); - const credentialTypeOptions = Object.keys(credentialTypes).map(key => { - return { - value: credentialTypes[key].id, - key: credentialTypes[key].kind, - label: credentialTypes[key].name, - }; - }); + const credentialTypeOptions = Object.keys(credentialTypes) + .map(key => { + return { + value: credentialTypes[key].id, + key: credentialTypes[key].id, + label: credentialTypes[key].name, + }; + }) + .sort((a, b) => (a.label > b.label ? 1 : -1)); - const resetSubFormFields = (value, form) => { - Object.keys(form.initialValues.inputs).forEach(label => { - if (parseInt(value, 10) === form.initialValues.credential_type) { - form.setFieldValue(`inputs.${label}`, initialValues.inputs[label]); - } else { - form.setFieldValue(`inputs.${label}`, ''); + const resetSubFormFields = (newCredentialType, form) => { + credentialTypes[newCredentialType].inputs.fields.forEach( + ({ ask_at_runtime, type, id, choices, default: defaultValue }) => { + if ( + parseInt(newCredentialType, 10) === form.initialValues.credential_type + ) { + form.setFieldValue(`inputs.${id}`, initialValues.inputs[id]); + if (ask_at_runtime) { + form.setFieldValue( + `passwordPrompts.${id}`, + initialValues.passwordPrompts[id] + ); + } + } else { + switch (type) { + case 'string': + form.setFieldValue(`inputs.${id}`, defaultValue || ''); + break; + case 'boolean': + form.setFieldValue(`inputs.${id}`, defaultValue || false); + break; + default: + break; + } + + if (choices) { + form.setFieldValue(`inputs.${id}`, defaultValue); + } + + if (ask_at_runtime) { + form.setFieldValue(`passwordPrompts.${id}`, false); + } + } + form.setFieldTouched(`inputs.${id}`, false); } - form.setFieldTouched(`inputs.${label}`, false); - }); + ); }; return ( @@ -106,16 +125,9 @@ function CredentialFormFields({ /> {credTypeField.value !== undefined && credTypeField.value !== '' && ( - - {i18n._(t`Type Details`)} - { - { - [gceCredentialTypeId]: , - [sshCredentialTypeId]: , - [scmCredentialTypeId]: , - }[credTypeField.value] - } - + )} ); @@ -135,19 +147,43 @@ function CredentialForm({ description: credential.description || '', organization: credential?.summary_fields?.organization || null, credential_type: credential.credential_type || '', - inputs: { - become_method: credential?.inputs?.become_method || '', - become_password: credential?.inputs?.become_password || '', - become_username: credential?.inputs?.become_username || '', - password: credential?.inputs?.password || '', - project: credential?.inputs?.project || '', - ssh_key_data: credential?.inputs?.ssh_key_data || '', - ssh_key_unlock: credential?.inputs?.ssh_key_unlock || '', - ssh_public_key_data: credential?.inputs?.ssh_public_key_data || '', - username: credential?.inputs?.username || '', - }, + inputs: {}, + passwordPrompts: {}, }; + Object.values(credentialTypes).forEach(credentialType => { + credentialType.inputs.fields.forEach( + ({ ask_at_runtime, type, id, choices, default: defaultValue }) => { + if (credential?.inputs && credential.inputs[id]) { + if (ask_at_runtime) { + initialValues.passwordPrompts[id] = + credential.inputs[id] === 'ASK' || false; + } + initialValues.inputs[id] = credential.inputs[id]; + } else { + switch (type) { + case 'string': + initialValues.inputs[id] = defaultValue || ''; + break; + case 'boolean': + initialValues.inputs[id] = defaultValue || false; + break; + default: + break; + } + + if (choices) { + initialValues.inputs[id] = defaultValue; + } + + if (ask_at_runtime) { + initialValues.passwordPrompts[id] = false; + } + } + } + ); + }); + Object.values(inputSources).forEach(inputSource => { initialValues.inputs[inputSource.input_field_name] = { credential: inputSource.summary_fields.source_credential, @@ -155,60 +191,10 @@ function CredentialForm({ }; }); - const scmCredentialTypeId = Object.keys(credentialTypes) - .filter(key => credentialTypes[key].namespace === 'scm') - .map(key => credentialTypes[key].id)[0]; - const sshCredentialTypeId = Object.keys(credentialTypes) - .filter(key => credentialTypes[key].namespace === 'ssh') - .map(key => credentialTypes[key].id)[0]; - const gceCredentialTypeId = Object.keys(credentialTypes) - .filter(key => credentialTypes[key].namespace === 'gce') - .map(key => credentialTypes[key].id)[0]; - return ( { - const scmKeys = [ - 'username', - 'password', - 'ssh_key_data', - 'ssh_key_unlock', - ]; - const sshKeys = [ - 'username', - 'password', - 'ssh_key_data', - 'ssh_public_key_data', - 'ssh_key_unlock', - 'become_method', - 'become_username', - 'become_password', - ]; - const gceKeys = ['username', 'ssh_key_data', 'project']; - if (parseInt(values.credential_type, 10) === scmCredentialTypeId) { - Object.keys(values.inputs).forEach(key => { - if (scmKeys.indexOf(key) < 0) { - delete values.inputs[key]; - } - }); - } else if ( - parseInt(values.credential_type, 10) === sshCredentialTypeId - ) { - Object.keys(values.inputs).forEach(key => { - if (sshKeys.indexOf(key) < 0) { - delete values.inputs[key]; - } - }); - } else if ( - parseInt(values.credential_type, 10) === gceCredentialTypeId - ) { - Object.keys(values.inputs).forEach(key => { - if (gceKeys.indexOf(key) < 0) { - delete values.inputs[key]; - } - }); - } onSubmit(values); }} > @@ -219,9 +205,6 @@ function CredentialForm({ formik={formik} initialValues={initialValues} credentialTypes={credentialTypes} - gceCredentialTypeId={gceCredentialTypeId} - scmCredentialTypeId={scmCredentialTypeId} - sshCredentialTypeId={sshCredentialTypeId} {...rest} /> @@ -239,13 +222,16 @@ function CredentialForm({ CredentialForm.proptype = { handleSubmit: func.isRequired, handleCancel: func.isRequired, + credentialTypes: shape({}).isRequired, credential: shape({}), inputSources: arrayOf(object), + submitError: shape({}), }; CredentialForm.defaultProps = { credential: {}, inputSources: [], + submitError: null, }; export default withI18n()(CredentialForm); diff --git a/awx/ui_next/src/screens/Credential/shared/CredentialForm.test.jsx b/awx/ui_next/src/screens/Credential/shared/CredentialForm.test.jsx index c77089b758..2062ed01ee 100644 --- a/awx/ui_next/src/screens/Credential/shared/CredentialForm.test.jsx +++ b/awx/ui_next/src/screens/Credential/shared/CredentialForm.test.jsx @@ -4,11 +4,19 @@ import { mountWithContexts } from '../../../../testUtils/enzymeHelpers'; import machineCredential from './data.machineCredential.json'; import gceCredential from './data.gceCredential.json'; import scmCredential from './data.scmCredential.json'; -import credentialTypes from './data.credentialTypes.json'; +import credentialTypesArr from './data.credentialTypes.json'; import CredentialForm from './CredentialForm'; jest.mock('../../../api'); +const credentialTypes = credentialTypesArr.reduce( + (credentialTypesMap, credentialType) => { + credentialTypesMap[credentialType.id] = credentialType; + return credentialTypesMap; + }, + {} +); + describe('', () => { let wrapper; const onCancel = jest.fn(); @@ -28,23 +36,19 @@ describe('', () => { expect(wrapper.find('FormGroup[label="Organization"]').length).toBe(1); expect(wrapper.find('FormGroup[label="Credential Type"]').length).toBe(1); expect(wrapper.find('FormGroup[label="Username"]').length).toBe(1); - expect(wrapper.find('FormGroup[label="Password"]').length).toBe(1); + expect(wrapper.find('input#credential-password').length).toBe(1); expect(wrapper.find('FormGroup[label="SSH Private Key"]').length).toBe(1); expect( wrapper.find('FormGroup[label="Signed SSH Certificate"]').length ).toBe(1); + expect(wrapper.find('input#credential-ssh_key_unlock').length).toBe(1); expect( - wrapper.find('FormGroup[label="Private Key Passphrase"]').length - ).toBe(1); - expect( - wrapper.find('FormGroup[label="Privelege Escalation Method"]').length + wrapper.find('FormGroup[label="Privilege Escalation Method"]').length ).toBe(1); expect( wrapper.find('FormGroup[label="Privilege Escalation Username"]').length ).toBe(1); - expect( - wrapper.find('FormGroup[label="Privilege Escalation Password"]').length - ).toBe(1); + expect(wrapper.find('input#credential-become_password').length).toBe(1); }; const sourceFieldExpects = () => { @@ -55,7 +59,7 @@ describe('', () => { expect(wrapper.find('FormGroup[label="Credential Type"]').length).toBe(1); expect(wrapper.find('FormGroup[label="Username"]').length).toBe(1); expect(wrapper.find('FormGroup[label="Password"]').length).toBe(1); - expect(wrapper.find('FormGroup[label="SSH Private Key"]').length).toBe(1); + expect(wrapper.find('FormGroup[label="SCM Private Key"]').length).toBe(1); expect( wrapper.find('FormGroup[label="Private Key Passphrase"]').length ).toBe(1); @@ -71,10 +75,10 @@ describe('', () => { wrapper.find('FormGroup[label="Service account JSON file"]').length ).toBe(1); expect( - wrapper.find('FormGroup[label="Service account email address"]').length + wrapper.find('FormGroup[label="Service Account Email Address"]').length ).toBe(1); expect(wrapper.find('FormGroup[label="Project"]').length).toBe(1); - expect(wrapper.find('FormGroup[label="RSA private key"]').length).toBe(1); + expect(wrapper.find('FormGroup[label="RSA Private Key"]').length).toBe(1); }; describe('Add', () => { @@ -152,9 +156,9 @@ describe('', () => { gceFieldExpects(); expect(wrapper.find('input#credential-username').prop('value')).toBe(''); expect(wrapper.find('input#credential-project').prop('value')).toBe(''); - expect(wrapper.find('textarea#credential-sshKeyData').prop('value')).toBe( - '' - ); + expect( + wrapper.find('textarea#credential-ssh_key_data').prop('value') + ).toBe(''); await act(async () => { wrapper.find('FileUpload').invoke('onChange')({ name: 'foo.json', @@ -169,7 +173,9 @@ describe('', () => { expect(wrapper.find('input#credential-project').prop('value')).toBe( 'test123' ); - expect(wrapper.find('textarea#credential-sshKeyData').prop('value')).toBe( + expect( + wrapper.find('textarea#credential-ssh_key_data').prop('value') + ).toBe( '-----BEGIN PRIVATE KEY-----\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\n-----END PRIVATE KEY-----\n' ); }); @@ -180,9 +186,9 @@ describe('', () => { wrapper.update(); expect(wrapper.find('input#credential-username').prop('value')).toBe(''); expect(wrapper.find('input#credential-project').prop('value')).toBe(''); - expect(wrapper.find('textarea#credential-sshKeyData').prop('value')).toBe( - '' - ); + expect( + wrapper.find('textarea#credential-ssh_key_data').prop('value') + ).toBe(''); }); test('should show error when error thrown parsing JSON', async () => { expect(wrapper.find('#credential-gce-file-helper').text()).toBe( diff --git a/awx/ui_next/src/screens/Credential/shared/CredentialFormFields/BecomeMethodField.jsx b/awx/ui_next/src/screens/Credential/shared/CredentialFormFields/BecomeMethodField.jsx new file mode 100644 index 0000000000..ee97e43d27 --- /dev/null +++ b/awx/ui_next/src/screens/Credential/shared/CredentialFormFields/BecomeMethodField.jsx @@ -0,0 +1,80 @@ +import React, { useState } from 'react'; +import { useField } from 'formik'; +import { bool, shape, string } from 'prop-types'; +import { + FormGroup, + Select, + SelectOption, + SelectVariant, +} from '@patternfly/react-core'; +import { FieldTooltip } from '../../../../components/FormField'; + +function BecomeMethodField({ fieldOptions, isRequired }) { + const [isOpen, setIsOpen] = useState(false); + const [options, setOptions] = useState( + [ + 'sudo', + 'su', + 'pbrun', + 'pfexec', + 'dzdo', + 'pmrun', + 'runas', + 'enable', + 'doas', + 'ksu', + 'machinectl', + 'sesu', + ].map(val => ({ value: val })) + ); + const [becomeMethodField, meta, helpers] = useField({ + name: `inputs.${fieldOptions.id}`, + }); + return ( + + {fieldOptions.help_text && ( + + )} + + + ); +} +BecomeMethodField.propTypes = { + fieldOptions: shape({ + id: string.isRequired, + label: string.isRequired, + }).isRequired, + isRequired: bool, +}; +BecomeMethodField.defaultProps = { + isRequired: false, +}; + +export default BecomeMethodField; diff --git a/awx/ui_next/src/screens/Credential/shared/CredentialFormFields/CredentialField.jsx b/awx/ui_next/src/screens/Credential/shared/CredentialFormFields/CredentialField.jsx new file mode 100644 index 0000000000..da7c94729c --- /dev/null +++ b/awx/ui_next/src/screens/Credential/shared/CredentialFormFields/CredentialField.jsx @@ -0,0 +1,167 @@ +import React from 'react'; +import { useField, useFormikContext } from 'formik'; +import { shape, string } from 'prop-types'; +import { withI18n } from '@lingui/react'; +import { t } from '@lingui/macro'; +import { + FormGroup, + InputGroup, + TextArea, + TextInput, +} from '@patternfly/react-core'; +import { FieldTooltip, PasswordInput } from '../../../../components/FormField'; +import AnsibleSelect from '../../../../components/AnsibleSelect'; +import { CredentialType } from '../../../../types'; +import { required } from '../../../../util/validators'; +import { CredentialPluginField } from './CredentialPlugins'; +import BecomeMethodField from './BecomeMethodField'; + +function CredentialInput({ fieldOptions, credentialKind, ...rest }) { + const [subFormField, meta] = useField(`inputs.${fieldOptions.id}`); + const isValid = !(meta.touched && meta.error); + if (fieldOptions.multiline) { + return ( +
diff --git a/awx/ui/client/src/scheduler/schedulerForm.partial.html b/awx/ui/client/src/scheduler/schedulerForm.partial.html index 068851e533..0daad2b3b1 100644 --- a/awx/ui/client/src/scheduler/schedulerForm.partial.html +++ b/awx/ui/client/src/scheduler/schedulerForm.partial.html @@ -29,7 +29,7 @@ id="schedulerName" ng-model="schedulerName" required ng-disabled="!(schedule_obj.summary_fields.user_capabilities.edit || canAdd) || credentialRequiresPassword" - placeholder="{{strings.get('form.SCHEDULE_NAME')}}"> + placeholder="{{strings.get('form.SCHEDULE_NAME')}}" />
{{ strings.get('form.NAME_REQUIRED_MESSAGE') }} @@ -74,7 +74,7 @@ placeholder="{{strings.get('form.HH24')}}" aw-min="0" min="0" aw-max="23" max="23" data-zero-pad="2" required - ng-change="timeChange()" > + ng-change="timeChange()" /> : @@ -90,7 +90,7 @@ placeholder="{{strings.get('form.MM')}}" min="0" max="59" data-zero-pad="2" required - ng-change="timeChange()" > + ng-change="timeChange()" /> : @@ -106,7 +106,7 @@ placeholder="{{strings.get('form.SS')}}" min="0" max="59" data-zero-pad="2" required - ng-change="timeChange()" > + ng-change="timeChange()" />
@@ -173,7 +173,7 @@ min="1" max="999" ng-change="resetError('scheduler_interval_error')" - > + />
@@ -209,7 +209,7 @@ aw-spinner="$parent.monthDay" ng-model="$parent.monthDay" min="1" max="31" - ng-change="resetError('scheduler_monthDay_error')" > + ng-change="resetError('scheduler_monthDay_error')" />
{{ strings.get('form.MONTH_DAY_ERROR_MESSAGE') }} @@ -228,7 +228,7 @@ ng-model="$parent.monthlyRepeatOption" ng-change="monthlyRepeatChange()" name="monthlyRepeatOption" - id="monthlyRepeatOption"> + id="monthlyRepeatOption" /> {{ strings.get('form.ON_THE') }}
@@ -267,7 +267,7 @@ ng-model="$parent.yearlyRepeatOption" ng-change="yearlyRepeatChange()" name="yearlyRepeatOption" - id="yearlyRepeatOption"> + id="yearlyRepeatOption" /> {{ strings.get('form.ON') }}
@@ -292,7 +292,7 @@ ng-model="$parent.yearlyMonthDay" min="1" max="31" ng-change="resetError('scheduler_yearlyMonthDay_error')" - > + />
@@ -312,7 +312,7 @@ ng-model="$parent.yearlyRepeatOption" ng-change="yearlyRepeatChange()" name="yearlyRepeatOption" - id="yearlyRepeatOption"> + id="yearlyRepeatOption" /> {{ strings.get('form.ON_THE') }}
@@ -524,7 +524,7 @@ placeholder="{{strings.get('form.HH24')}}" aw-min="0" min="0" aw-max="23" max="23" data-zero-pad="2" required - ng-change="schedulerEndChange('schedulerEndHour', $parent.schedulerEndHour)" > + ng-change="schedulerEndChange('schedulerEndHour', $parent.schedulerEndHour)" /> : @@ -540,7 +540,7 @@ placeholder="{{strings.get('form.MM')}}" min="0" max="59" data-zero-pad="2" required - ng-change="schedulerEndChange('schedulerEndMinute', $parent.schedulerEndMinute)" > + ng-change="schedulerEndChange('schedulerEndMinute', $parent.schedulerEndMinute)" /> : @@ -556,7 +556,7 @@ placeholder="{{strings.get('form.SS')}}" min="0" max="59" data-zero-pad="2" required - ng-change="schedulerEndChange('schedulerEndSecond', $parent.schedulerEndSecond)" > + ng-change="schedulerEndChange('schedulerEndSecond', $parent.schedulerEndSecond)" />
@@ -604,7 +604,7 @@ class="SchedulerFormDetail-radioButton" ng-model="dateChoice" id="date-choice-local" - value="local" > + value="local" /> {{ strings.get('form.LOCAL_TIME_ZONE') }}
From a88f03b372c478529b1debc53565f7cd244c4113 Mon Sep 17 00:00:00 2001 From: Bill Nottingham Date: Mon, 6 Jul 2020 13:48:58 -0400 Subject: [PATCH 294/494] Reintroduce label filtering Labels are visible if you have a role on the org they are in, or on a job template they're attached to. --- awx/main/access.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/awx/main/access.py b/awx/main/access.py index 4705fb2cfc..d0f3bd6c96 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -2480,13 +2480,16 @@ class NotificationAccess(BaseAccess): class LabelAccess(BaseAccess): ''' - I can see/use a Label if I have permission to associated organization + I can see/use a Label if I have permission to associated organization, or to a JT that the label is on ''' model = Label prefetch_related = ('modified_by', 'created_by', 'organization',) def filtered_queryset(self): - return self.model.objects.all() + return self.model.objects.filter( + Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role')) | + Q(unifiedjobtemplate_labels__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role')) + ) @check_superuser def can_add(self, data): From 2f1b4d81e1758e76d4240fe39bfe38f418c316fd Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Mon, 6 Jul 2020 13:50:33 -0400 Subject: [PATCH 295/494] use jinja2.sandbox for credential type injectors --- awx/main/models/credential/__init__.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/awx/main/models/credential/__init__.py b/awx/main/models/credential/__init__.py index 1701c1fb24..b16896be1f 100644 --- a/awx/main/models/credential/__init__.py +++ b/awx/main/models/credential/__init__.py @@ -11,7 +11,7 @@ import tempfile from types import SimpleNamespace # Jinja2 -from jinja2 import Template +from jinja2 import sandbox # Django from django.db import models @@ -514,8 +514,11 @@ class CredentialType(CommonModelNameNotUnique): # If any file templates are provided, render the files and update the # special `tower` template namespace so the filename can be # referenced in other injectors + + sandbox_env = sandbox.ImmutableSandboxedEnvironment() + for file_label, file_tmpl in file_tmpls.items(): - data = Template(file_tmpl).render(**namespace) + data = sandbox_env.from_string(file_tmpl).render(**namespace) _, path = tempfile.mkstemp(dir=private_data_dir) with open(path, 'w') as f: f.write(data) @@ -537,14 +540,14 @@ class CredentialType(CommonModelNameNotUnique): except ValidationError as e: logger.error('Ignoring prohibited env var {}, reason: {}'.format(env_var, e)) continue - env[env_var] = Template(tmpl).render(**namespace) - safe_env[env_var] = Template(tmpl).render(**safe_namespace) + env[env_var] = sandbox_env.from_string(tmpl).render(**namespace) + safe_env[env_var] = sandbox_env.from_string(tmpl).render(**safe_namespace) if 'INVENTORY_UPDATE_ID' not in env: # awx-manage inventory_update does not support extra_vars via -e extra_vars = {} for var_name, tmpl in self.injectors.get('extra_vars', {}).items(): - extra_vars[var_name] = Template(tmpl).render(**namespace) + extra_vars[var_name] = sandbox_env.from_string(tmpl).render(**namespace) def build_extra_vars_file(vars, private_dir): handle, path = tempfile.mkstemp(dir = private_dir) From ea4b435ea7513745f41595ff59af18f7adc7abf3 Mon Sep 17 00:00:00 2001 From: Jake McDermott Date: Wed, 1 Jul 2020 11:59:05 -0400 Subject: [PATCH 296/494] Ignore required field validation for booleans --- .../client/lib/components/input/base.controller.js | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/awx/ui/client/lib/components/input/base.controller.js b/awx/ui/client/lib/components/input/base.controller.js index eabd536e34..9a08ed0f2e 100644 --- a/awx/ui/client/lib/components/input/base.controller.js +++ b/awx/ui/client/lib/components/input/base.controller.js @@ -12,7 +12,13 @@ function BaseInputController (strings) { scope.state._touched = false; scope.state._required = scope.state.required || false; - scope.state._isValid = scope.state._isValid || false; + + if (scope.state.type === 'boolean') { + scope.state._isValid = scope.state._isValid || true; + } else { + scope.state._isValid = scope.state._isValid || false; + } + scope.state._disabled = scope.state._disabled || false; scope.state._activeModel = scope.state._activeModel || '_value'; @@ -59,6 +65,10 @@ function BaseInputController (strings) { scope.state._touched = true; } + if (scope.state.type === 'boolean') { + return { isValid, message }; + } + if (scope.state._required && (!scope.state._value || !scope.state._value[0]) && !scope.state._displayValue) { isValid = false; From 05799d97952e79d1659de68289dcd8f06ec9c0c9 Mon Sep 17 00:00:00 2001 From: Jake McDermott Date: Wed, 1 Jul 2020 13:41:40 -0400 Subject: [PATCH 297/494] Avoid non-unique field name collisions Custom credentials can have input fields named 'name', 'organization', 'description', etc. Underscore these variables to make collisions less likely to occur. --- .../add-edit-credentials.controller.js | 27 ++++++++++--------- .../add-edit-credentials.view.html | 6 ++--- awx/ui/client/lib/models/Credential.js | 10 +++++++ 3 files changed, 28 insertions(+), 15 deletions(-) diff --git a/awx/ui/client/features/credentials/add-edit-credentials.controller.js b/awx/ui/client/features/credentials/add-edit-credentials.controller.js index f82fd6e3df..57c8eed1dd 100644 --- a/awx/ui/client/features/credentials/add-edit-credentials.controller.js +++ b/awx/ui/client/features/credentials/add-edit-credentials.controller.js @@ -53,16 +53,19 @@ function AddEditCredentialsController ( vm.form.disabled = !isEditable; } - vm.form.organization._disabled = !isOrgEditableByUser; + vm.form._organization._disabled = !isOrgEditableByUser; // Only exists for permissions compatibility $scope.credential_obj = credential.get(); - vm.form.organization._resource = 'organization'; - vm.form.organization._model = organization; - vm.form.organization._route = 'credentials.edit.organization'; - vm.form.organization._value = credential.get('summary_fields.organization.id'); - vm.form.organization._displayValue = credential.get('summary_fields.organization.name'); - vm.form.organization._placeholder = strings.get('inputs.ORGANIZATION_PLACEHOLDER'); + // Custom credentials can have input fields named 'name', 'organization', + // 'description', etc. Underscore these variables to make collisions + // less likely to occur. + vm.form._organization._resource = 'organization'; + vm.form._organization._model = organization; + vm.form._organization._route = 'credentials.edit.organization'; + vm.form._organization._value = credential.get('summary_fields.organization.id'); + vm.form._organization._displayValue = credential.get('summary_fields.organization.name'); + vm.form._organization._placeholder = strings.get('inputs.ORGANIZATION_PLACEHOLDER'); vm.form.credential_type._resource = 'credential_type'; vm.form.credential_type._model = credentialType; @@ -98,10 +101,10 @@ function AddEditCredentialsController ( vm.form._formName = 'credential'; vm.form.disabled = !credential.isCreatable(); - vm.form.organization._resource = 'organization'; - vm.form.organization._route = 'credentials.add.organization'; - vm.form.organization._model = organization; - vm.form.organization._placeholder = strings.get('inputs.ORGANIZATION_PLACEHOLDER'); + vm.form._organization._resource = 'organization'; + vm.form._organization._route = 'credentials.add.organization'; + vm.form._organization._model = organization; + vm.form._organization._placeholder = strings.get('inputs.ORGANIZATION_PLACEHOLDER'); vm.form.credential_type._resource = 'credential_type'; vm.form.credential_type._route = 'credentials.add.credentialType'; @@ -112,7 +115,7 @@ function AddEditCredentialsController ( $scope.$watch('organization', () => { if ($scope.organization) { - vm.form.organization._idFromModal = $scope.organization; + vm.form._organization._idFromModal = $scope.organization; } }); diff --git a/awx/ui/client/features/credentials/add-edit-credentials.view.html b/awx/ui/client/features/credentials/add-edit-credentials.view.html index 7da2b0e8e1..aa3e581b6d 100644 --- a/awx/ui/client/features/credentials/add-edit-credentials.view.html +++ b/awx/ui/client/features/credentials/add-edit-credentials.view.html @@ -10,9 +10,9 @@ - - - + + + diff --git a/awx/ui/client/lib/models/Credential.js b/awx/ui/client/lib/models/Credential.js index 27b6a04533..6fbce5e141 100644 --- a/awx/ui/client/lib/models/Credential.js +++ b/awx/ui/client/lib/models/Credential.js @@ -27,6 +27,16 @@ function createFormSchema (method, config) { } }); + // Custom credentials can have input fields named 'name', 'organization', + // 'description', etc. Underscore these variables to make collisions + // less likely to occur. + schema._name = schema.name; + schema._organization = schema.organization; + schema._description = schema.description; + delete schema.name; + delete schema.organization; + delete schema.description; + return schema; } From d14aee70a1ebafc5076f93eac148e2bed41f6484 Mon Sep 17 00:00:00 2001 From: Graham Mainwaring Date: Mon, 6 Jul 2020 15:26:39 -0400 Subject: [PATCH 298/494] Don't follow redirects in credential plugins --- awx/main/credential_plugins/aim.py | 1 + awx/main/credential_plugins/conjur.py | 4 +++- awx/main/credential_plugins/hashivault.py | 10 ++++++++-- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/awx/main/credential_plugins/aim.py b/awx/main/credential_plugins/aim.py index c75d4d85aa..c63181ed46 100644 --- a/awx/main/credential_plugins/aim.py +++ b/awx/main/credential_plugins/aim.py @@ -95,6 +95,7 @@ def aim_backend(**kwargs): timeout=30, cert=cert, verify=verify, + allow_redirects=False, ) res.raise_for_status() return res.json()['Content'] diff --git a/awx/main/credential_plugins/conjur.py b/awx/main/credential_plugins/conjur.py index 55fd2e60f2..a851277134 100644 --- a/awx/main/credential_plugins/conjur.py +++ b/awx/main/credential_plugins/conjur.py @@ -63,7 +63,8 @@ def conjur_backend(**kwargs): auth_kwargs = { 'headers': {'Content-Type': 'text/plain'}, - 'data': api_key + 'data': api_key, + 'allow_redirects': False, } if cacert: auth_kwargs['verify'] = create_temporary_fifo(cacert.encode()) @@ -78,6 +79,7 @@ def conjur_backend(**kwargs): lookup_kwargs = { 'headers': {'Authorization': 'Token token="{}"'.format(token)}, + 'allow_redirects': False, } if cacert: lookup_kwargs['verify'] = create_temporary_fifo(cacert.encode()) diff --git a/awx/main/credential_plugins/hashivault.py b/awx/main/credential_plugins/hashivault.py index c9caafba6b..c094f747d9 100644 --- a/awx/main/credential_plugins/hashivault.py +++ b/awx/main/credential_plugins/hashivault.py @@ -97,7 +97,10 @@ def kv_backend(**kwargs): cacert = kwargs.get('cacert', None) api_version = kwargs['api_version'] - request_kwargs = {'timeout': 30} + request_kwargs = { + 'timeout': 30, + 'allow_redirects': False, + } if cacert: request_kwargs['verify'] = create_temporary_fifo(cacert.encode()) @@ -150,7 +153,10 @@ def ssh_backend(**kwargs): role = kwargs['role'] cacert = kwargs.get('cacert', None) - request_kwargs = {'timeout': 30} + request_kwargs = { + 'timeout': 30, + 'allow_redirects': False, + } if cacert: request_kwargs['verify'] = create_temporary_fifo(cacert.encode()) From b7f37d5e26e16f682c4d1486a54f22a03aff74b7 Mon Sep 17 00:00:00 2001 From: Graham Mainwaring Date: Mon, 6 Jul 2020 21:17:21 -0400 Subject: [PATCH 299/494] Reduce error detail in webhook notification --- awx/main/notifications/grafana_backend.py | 4 ++-- awx/main/notifications/hipchat_backend.py | 4 ++-- awx/main/notifications/mattermost_backend.py | 4 ++-- awx/main/notifications/rocketchat_backend.py | 4 ++-- awx/main/notifications/webhook_backend.py | 4 ++-- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/awx/main/notifications/grafana_backend.py b/awx/main/notifications/grafana_backend.py index 23816c8d06..01699b24dc 100644 --- a/awx/main/notifications/grafana_backend.py +++ b/awx/main/notifications/grafana_backend.py @@ -99,8 +99,8 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase): headers=grafana_headers, verify=(not self.grafana_no_verify_ssl)) if r.status_code >= 400: - logger.error(smart_text(_("Error sending notification grafana: {}").format(r.text))) + logger.error(smart_text(_("Error sending notification grafana: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending notification grafana: {}").format(r.text))) + raise Exception(smart_text(_("Error sending notification grafana: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/hipchat_backend.py b/awx/main/notifications/hipchat_backend.py index 16790644a3..1dfde08546 100644 --- a/awx/main/notifications/hipchat_backend.py +++ b/awx/main/notifications/hipchat_backend.py @@ -47,8 +47,8 @@ class HipChatBackend(AWXBaseEmailBackend, CustomNotificationBase): "from": m.from_email, "message_format": "text"}) if r.status_code != 204: - logger.error(smart_text(_("Error sending messages: {}").format(r.text))) + logger.error(smart_text(_("Error sending messages: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending message to hipchat: {}").format(r.text))) + raise Exception(smart_text(_("Error sending message to hipchat: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/mattermost_backend.py b/awx/main/notifications/mattermost_backend.py index 78a23c72d1..59a1c6f5e1 100644 --- a/awx/main/notifications/mattermost_backend.py +++ b/awx/main/notifications/mattermost_backend.py @@ -46,8 +46,8 @@ class MattermostBackend(AWXBaseEmailBackend, CustomNotificationBase): r = requests.post("{}".format(m.recipients()[0]), json=payload, verify=(not self.mattermost_no_verify_ssl)) if r.status_code >= 400: - logger.error(smart_text(_("Error sending notification mattermost: {}").format(r.text))) + logger.error(smart_text(_("Error sending notification mattermost: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending notification mattermost: {}").format(r.text))) + raise Exception(smart_text(_("Error sending notification mattermost: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/rocketchat_backend.py b/awx/main/notifications/rocketchat_backend.py index 1ad367fb57..df271bf80d 100644 --- a/awx/main/notifications/rocketchat_backend.py +++ b/awx/main/notifications/rocketchat_backend.py @@ -46,9 +46,9 @@ class RocketChatBackend(AWXBaseEmailBackend, CustomNotificationBase): if r.status_code >= 400: logger.error(smart_text( - _("Error sending notification rocket.chat: {}").format(r.text))) + _("Error sending notification rocket.chat: {}").format(r.status_code))) if not self.fail_silently: raise Exception(smart_text( - _("Error sending notification rocket.chat: {}").format(r.text))) + _("Error sending notification rocket.chat: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/webhook_backend.py b/awx/main/notifications/webhook_backend.py index b9c2c35d22..a33cf026f8 100644 --- a/awx/main/notifications/webhook_backend.py +++ b/awx/main/notifications/webhook_backend.py @@ -72,8 +72,8 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase): headers=self.headers, verify=(not self.disable_ssl_verification)) if r.status_code >= 400: - logger.error(smart_text(_("Error sending notification webhook: {}").format(r.text))) + logger.error(smart_text(_("Error sending notification webhook: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending notification webhook: {}").format(r.text))) + raise Exception(smart_text(_("Error sending notification webhook: {}").format(r.status_code))) sent_messages += 1 return sent_messages From 5a96af79d43df982e02f3fa25120fb588f9f3bcb Mon Sep 17 00:00:00 2001 From: Graham Mainwaring Date: Mon, 6 Jul 2020 21:41:28 -0400 Subject: [PATCH 300/494] Reduce error detail in credential lookups --- awx/api/views/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index aca8d892a0..a0a39fa1e1 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -1397,7 +1397,7 @@ class CredentialExternalTest(SubDetailAPIView): obj.credential_type.plugin.backend(**backend_kwargs) return Response({}, status=status.HTTP_202_ACCEPTED) except requests.exceptions.HTTPError as exc: - message = 'HTTP {}\n{}'.format(exc.response.status_code, exc.response.text) + message = 'HTTP {}'.format(exc.response.status_code) return Response({'inputs': message}, status=status.HTTP_400_BAD_REQUEST) except Exception as exc: return Response({'inputs': str(exc)}, status=status.HTTP_400_BAD_REQUEST) From 8b4b1f5f816de6282ab221399f529668824e3e84 Mon Sep 17 00:00:00 2001 From: Ilkka Tengvall Date: Mon, 6 Jul 2020 11:12:04 +0300 Subject: [PATCH 301/494] Allow anyone to start awx and awx_task [#7545] --- installer/roles/image_build/tasks/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/installer/roles/image_build/tasks/main.yml b/installer/roles/image_build/tasks/main.yml index e0b8bd3fe1..9679161a86 100644 --- a/installer/roles/image_build/tasks/main.yml +++ b/installer/roles/image_build/tasks/main.yml @@ -110,14 +110,14 @@ copy: src: launch_awx.sh dest: "{{ docker_base_path }}/launch_awx.sh" - mode: '0700' + mode: '0755' delegate_to: localhost - name: Stage launch_awx_task template: src: launch_awx_task.sh.j2 dest: "{{ docker_base_path }}/launch_awx_task.sh" - mode: '0700' + mode: '0755' delegate_to: localhost - name: Stage rsyslog.conf From 61d3a765eea93964d2992acb656c18309213e1ca Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Tue, 7 Jul 2020 10:59:14 -0400 Subject: [PATCH 302/494] prevent unsafe jinja from being saved in the first place for cred types see: https://github.com/ansible/tower-security/issues/21 --- awx/main/fields.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/awx/main/fields.py b/awx/main/fields.py index 4e854bbb8b..57d2d9e505 100644 --- a/awx/main/fields.py +++ b/awx/main/fields.py @@ -7,8 +7,8 @@ import json import re import urllib.parse -from jinja2 import Environment, StrictUndefined -from jinja2.exceptions import UndefinedError, TemplateSyntaxError +from jinja2 import sandbox, StrictUndefined +from jinja2.exceptions import UndefinedError, TemplateSyntaxError, SecurityError # Django from django.contrib.postgres.fields import JSONField as upstream_JSONBField @@ -932,7 +932,7 @@ class CredentialTypeInjectorField(JSONSchemaField): self.validate_env_var_allowed(key) for key, tmpl in injector.items(): try: - Environment( + sandbox.ImmutableSandboxedEnvironment( undefined=StrictUndefined ).from_string(tmpl).render(valid_namespace) except UndefinedError as e: @@ -942,6 +942,10 @@ class CredentialTypeInjectorField(JSONSchemaField): code='invalid', params={'value': value}, ) + except SecurityError as e: + raise django_exceptions.ValidationError( + _('Encountered unsafe code execution: {}').format(e) + ) except TemplateSyntaxError as e: raise django_exceptions.ValidationError( _('Syntax error rendering template for {sub_key} inside of {type} ({error_msg})').format( From 777d44ec349b6366c68dd846a12f24f9a6f17e1c Mon Sep 17 00:00:00 2001 From: nixocio Date: Mon, 29 Jun 2020 18:08:13 -0400 Subject: [PATCH 303/494] Add edit credential types Add feature to edit credential types. See: https://github.com/ansible/awx/issues/7326 --- .../components/FormField/FormSubmitError.jsx | 2 + .../screens/CredentialType/CredentialType.jsx | 12 +- .../CredentialTypeDetails.jsx | 2 +- .../CredentialTypeEdit/CredentialTypeEdit.jsx | 42 +++++- .../CredentialTypeEdit.test.jsx | 140 ++++++++++++++++++ .../CredentialTypeList/CredentialTypeList.jsx | 2 +- .../shared/CredentialTypeForm.jsx | 12 +- awx/ui_next/src/util/yaml.js | 11 +- 8 files changed, 200 insertions(+), 23 deletions(-) create mode 100644 awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/CredentialTypeEdit.test.jsx diff --git a/awx/ui_next/src/components/FormField/FormSubmitError.jsx b/awx/ui_next/src/components/FormField/FormSubmitError.jsx index 7b6edc6cb3..714ca4119d 100644 --- a/awx/ui_next/src/components/FormField/FormSubmitError.jsx +++ b/awx/ui_next/src/components/FormField/FormSubmitError.jsx @@ -22,6 +22,8 @@ function FormSubmitError({ error }) { Object.values(error.response.data).forEach(value => { if (Array.isArray(value)) { messages = messages.concat(value); + } else { + messages.push(value); } }); setErrorMessage(messages.length > 0 ? messages : null); diff --git a/awx/ui_next/src/screens/CredentialType/CredentialType.jsx b/awx/ui_next/src/screens/CredentialType/CredentialType.jsx index 121020b569..17dc115a44 100644 --- a/awx/ui_next/src/screens/CredentialType/CredentialType.jsx +++ b/awx/ui_next/src/screens/CredentialType/CredentialType.jsx @@ -65,11 +65,6 @@ function CredentialType({ i18n, setBreadcrumb }) { }, ]; - let cardHeader = ; - if (pathname.endsWith('edit')) { - cardHeader = null; - } - if (!isLoading && contentError) { return ( @@ -89,6 +84,11 @@ function CredentialType({ i18n, setBreadcrumb }) { ); } + let cardHeader = ; + if (pathname.endsWith('edit')) { + cardHeader = null; + } + return ( @@ -104,7 +104,7 @@ function CredentialType({ i18n, setBreadcrumb }) { {credentialType && ( <> - + diff --git a/awx/ui_next/src/screens/CredentialType/CredentialTypeDetails/CredentialTypeDetails.jsx b/awx/ui_next/src/screens/CredentialType/CredentialTypeDetails/CredentialTypeDetails.jsx index d622c9d6f1..1b99c16867 100644 --- a/awx/ui_next/src/screens/CredentialType/CredentialTypeDetails/CredentialTypeDetails.jsx +++ b/awx/ui_next/src/screens/CredentialType/CredentialTypeDetails/CredentialTypeDetails.jsx @@ -70,7 +70,7 @@ function CredentialTypeDetails({ credentialType, i18n }) { diff --git a/awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/CredentialTypeEdit.jsx b/awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/CredentialTypeEdit.jsx index 9ccbf329d0..c188e97e97 100644 --- a/awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/CredentialTypeEdit.jsx +++ b/awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/CredentialTypeEdit.jsx @@ -1,11 +1,41 @@ -import React from 'react'; -import { Card, PageSection } from '@patternfly/react-core'; +import React, { useState } from 'react'; +import { useHistory } from 'react-router-dom'; -function CredentialTypeEdit() { +import { CardBody } from '../../../components/Card'; +import { CredentialTypesAPI } from '../../../api'; +import CredentialTypeForm from '../shared/CredentialTypeForm'; +import { parseVariableField } from '../../../util/yaml'; + +function CredentialTypeEdit({ credentialType }) { + const history = useHistory(); + const [submitError, setSubmitError] = useState(null); + const detailsUrl = `/credential_types/${credentialType.id}/details`; + + const handleSubmit = async values => { + try { + await CredentialTypesAPI.update(credentialType.id, { + ...values, + injectors: parseVariableField(values.injectors), + inputs: parseVariableField(values.inputs), + }); + history.push(detailsUrl); + } catch (error) { + setSubmitError(error); + } + }; + + const handleCancel = () => { + history.push(detailsUrl); + }; return ( - - Credential Type Edit - + + + ); } diff --git a/awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/CredentialTypeEdit.test.jsx b/awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/CredentialTypeEdit.test.jsx new file mode 100644 index 0000000000..f4800e81b4 --- /dev/null +++ b/awx/ui_next/src/screens/CredentialType/CredentialTypeEdit/CredentialTypeEdit.test.jsx @@ -0,0 +1,140 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; +import { createMemoryHistory } from 'history'; + +import { mountWithContexts } from '../../../../testUtils/enzymeHelpers'; +import { CredentialTypesAPI } from '../../../api'; + +import CredentialTypeEdit from './CredentialTypeEdit'; + +jest.mock('../../../api'); + +const credentialTypeData = { + id: 42, + name: 'Foo', + description: 'New credential', + kind: 'cloud', + inputs: JSON.stringify({ + fields: [ + { + id: 'username', + type: 'string', + label: 'Jenkins username', + }, + { + id: 'password', + type: 'string', + label: 'Jenkins password', + secret: true, + }, + ], + required: ['username', 'password'], + }), + injectors: JSON.stringify({ + extra_vars: { + Jenkins_password: '{{ password }}', + Jenkins_username: '{{ username }}', + }, + }), + summary_fields: { + created_by: { + id: 1, + username: 'admin', + first_name: '', + last_name: '', + }, + modified_by: { + id: 1, + username: 'admin', + first_name: '', + last_name: '', + }, + user_capabilities: { + edit: true, + delete: true, + }, + }, + created: '2020-06-25T16:52:36.127008Z', + modified: '2020-06-25T16:52:36.127022Z', +}; + +const updateCredentialTypeData = { + name: 'Bar', + description: 'Updated new Credential Type', + injectors: credentialTypeData.injectors, + inputs: credentialTypeData.inputs, +}; + +describe('', () => { + let wrapper; + let history; + + beforeAll(async () => { + history = createMemoryHistory(); + await act(async () => { + wrapper = mountWithContexts( + , + { + context: { router: { history } }, + } + ); + }); + }); + + afterAll(() => { + jest.clearAllMocks(); + wrapper.unmount(); + }); + + test('handleSubmit should call the api and redirect to details page', async () => { + await act(async () => { + wrapper.find('CredentialTypeForm').invoke('onSubmit')( + updateCredentialTypeData + ); + wrapper.update(); + expect(CredentialTypesAPI.update).toHaveBeenCalledWith(42, { + ...updateCredentialTypeData, + injectors: JSON.parse(credentialTypeData.injectors), + inputs: JSON.parse(credentialTypeData.inputs), + }); + }); + }); + + test('should navigate to credential types detail when cancel is clicked', async () => { + await act(async () => { + wrapper.find('button[aria-label="Cancel"]').prop('onClick')(); + }); + expect(history.location.pathname).toEqual('/credential_types/42/details'); + }); + + test('should navigate to credential type detail after successful submission', async () => { + await act(async () => { + wrapper.find('CredentialTypeForm').invoke('onSubmit')({ + ...updateCredentialTypeData, + injectors: JSON.parse(credentialTypeData.injectors), + inputs: JSON.parse(credentialTypeData.inputs), + }); + }); + wrapper.update(); + expect(wrapper.find('FormSubmitError').length).toBe(0); + expect(history.location.pathname).toEqual('/credential_types/42/details'); + }); + + test('failed form submission should show an error message', async () => { + const error = { + response: { + data: { detail: 'An error occurred' }, + }, + }; + CredentialTypesAPI.update.mockImplementationOnce(() => + Promise.reject(error) + ); + await act(async () => { + wrapper.find('CredentialTypeForm').invoke('onSubmit')( + updateCredentialTypeData + ); + }); + wrapper.update(); + expect(wrapper.find('FormSubmitError').length).toBe(1); + }); +}); diff --git a/awx/ui_next/src/screens/CredentialType/CredentialTypeList/CredentialTypeList.jsx b/awx/ui_next/src/screens/CredentialType/CredentialTypeList/CredentialTypeList.jsx index 7375a61b63..c19391f0c0 100644 --- a/awx/ui_next/src/screens/CredentialType/CredentialTypeList/CredentialTypeList.jsx +++ b/awx/ui_next/src/screens/CredentialType/CredentialTypeList/CredentialTypeList.jsx @@ -131,7 +131,7 @@ function CredentialTypeList({ i18n }) { )} renderItem={credentialType => ( @@ -65,8 +67,12 @@ function CredentialTypeForm({ const initialValues = { name: credentialType.name || '', description: credentialType.description || '', - inputs: credentialType.inputs || '---', - injectors: credentialType.injectors || '---', + inputs: credentialType.inputs + ? jsonToYaml(JSON.stringify(credentialType.inputs)) + : '---', + injectors: credentialType.injectors + ? jsonToYaml(JSON.stringify(credentialType.injectors)) + : '---', }; return ( onSubmit(values)}> @@ -74,7 +80,7 @@ function CredentialTypeForm({
- + {submitError && } Date: Tue, 7 Jul 2020 13:00:05 -0400 Subject: [PATCH 304/494] Update to ansible/angular-scheduler#v0.4.4 --- awx/ui/package-lock.json | 60 ++++++++++++++++++++++++++-------------- awx/ui/package.json | 2 +- 2 files changed, 40 insertions(+), 22 deletions(-) diff --git a/awx/ui/package-lock.json b/awx/ui/package-lock.json index 27bdc044cc..24b7837312 100644 --- a/awx/ui/package-lock.json +++ b/awx/ui/package-lock.json @@ -241,11 +241,11 @@ "integrity": "sha512-nB/xe7JQWF9nLvhHommAICQ3eWrfRETo0EVGFESi952CDzDa+GAJ/2BFBNw44QqQPxj1Xua/uYKrbLsOGWZdbQ==" }, "angular-scheduler": { - "version": "git+https://git@github.com/ansible/angular-scheduler.git#6a2d33b06b1143e7449c4427f222fd05559f3a23", - "from": "git+https://git@github.com/ansible/angular-scheduler.git#v0.4.3", + "version": "git+https://git@github.com/ansible/angular-scheduler.git#d72b62f47fb5c11b3284eaaea11c4d5525fa3b99", + "from": "git+https://git@github.com/ansible/angular-scheduler.git#v0.4.4", "requires": { "angular": "^1.7.9", - "angular-tz-extensions": "github:ansible/angular-tz-extensions#5c594b5756d29637601020bba16274f10ee0ed65", + "angular-tz-extensions": "github:ansible/angular-tz-extensions", "jquery": "^3.5.1", "jquery-ui": "*", "lodash": "^4.17.15", @@ -261,14 +261,9 @@ "angular-filters": "^1.1.2", "jquery": "^3.5.1", "jstimezonedetect": "1.0.5", - "timezone-js": "github:ansible/timezone-js#6937de14ce0c193961538bb5b3b12b7ef62a358f" + "timezone-js": "github:ansible/timezone-js#0.4.14" } }, - "jquery": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/jquery/-/jquery-3.5.1.tgz", - "integrity": "sha512-XwIBPqcMn57FxfT+Go5pzySnm4KWkT1Tv7gjrpT1srtf8Weynl6R273VJ5GjkRb51IzMp5nbaPjJXMWeju2MKg==" - }, "rrule": { "version": "github:jkbrzt/rrule#4ff63b2f8524fd6d5ba6e80db770953b5cd08a0c", "from": "github:jkbrzt/rrule#4ff63b2f8524fd6d5ba6e80db770953b5cd08a0c" @@ -283,13 +278,17 @@ "angular-filters": "^1.1.2", "jquery": "^3.5.1", "jstimezonedetect": "1.0.5", - "timezone-js": "github:ansible/timezone-js#6937de14ce0c193961538bb5b3b12b7ef62a358f" + "timezone-js": "github:ansible/timezone-js#0.4.14" }, "dependencies": { "jquery": { "version": "3.5.1", "resolved": "https://registry.npmjs.org/jquery/-/jquery-3.5.1.tgz", "integrity": "sha512-XwIBPqcMn57FxfT+Go5pzySnm4KWkT1Tv7gjrpT1srtf8Weynl6R273VJ5GjkRb51IzMp5nbaPjJXMWeju2MKg==" + }, + "timezone-js": { + "version": "github:ansible/timezone-js#6937de14ce0c193961538bb5b3b12b7ef62a358f", + "from": "github:ansible/timezone-js#0.4.14" } } }, @@ -1803,6 +1802,7 @@ "resolved": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz", "integrity": "sha1-OciRjO/1eZ+D+UkqhI9iWt0Mdm8=", "dev": true, + "optional": true, "requires": { "hoek": "2.x.x" } @@ -5374,7 +5374,8 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true + "dev": true, + "optional": true }, "aproba": { "version": "1.2.0", @@ -5398,13 +5399,15 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true + "dev": true, + "optional": true }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dev": true, + "optional": true, "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -5414,19 +5417,22 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", - "dev": true + "dev": true, + "optional": true }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true + "dev": true, + "optional": true }, "console-control-strings": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=", - "dev": true + "dev": true, + "optional": true }, "core-util-is": { "version": "1.0.2", @@ -5547,7 +5553,8 @@ "version": "2.0.3", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", - "dev": true + "dev": true, + "optional": true }, "ini": { "version": "1.3.5", @@ -5561,6 +5568,7 @@ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", "dev": true, + "optional": true, "requires": { "number-is-nan": "^1.0.0" } @@ -5577,6 +5585,7 @@ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", "dev": true, + "optional": true, "requires": { "brace-expansion": "^1.1.7" } @@ -5682,7 +5691,8 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", - "dev": true + "dev": true, + "optional": true }, "object-assign": { "version": "4.1.1", @@ -5696,6 +5706,7 @@ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", "dev": true, + "optional": true, "requires": { "wrappy": "1" } @@ -5791,7 +5802,8 @@ "version": "5.1.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==", - "dev": true + "dev": true, + "optional": true }, "safer-buffer": { "version": "2.1.2", @@ -5833,6 +5845,7 @@ "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", "dev": true, + "optional": true, "requires": { "code-point-at": "^1.0.0", "is-fullwidth-code-point": "^1.0.0", @@ -5854,6 +5867,7 @@ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", "dev": true, + "optional": true, "requires": { "ansi-regex": "^2.0.0" } @@ -5886,7 +5900,8 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true + "dev": true, + "optional": true } } }, @@ -6633,7 +6648,8 @@ "version": "2.16.3", "resolved": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz", "integrity": "sha1-ILt0A9POo5jpHcRxCo/xuCdKJe0=", - "dev": true + "dev": true, + "optional": true }, "home-or-tmp": { "version": "2.0.0", @@ -9131,6 +9147,7 @@ "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.3.5.tgz", "integrity": "sha512-Gi1W4k059gyRbyVUZQ4mEqLm0YIUiGYfvxhF6SIlk3ui1WVxMTGfGdQ2SInh3PDrRTVvPKgULkpJtT4RH10+VA==", "dev": true, + "optional": true, "requires": { "safe-buffer": "^5.1.2", "yallist": "^3.0.0" @@ -9140,7 +9157,8 @@ "version": "3.0.3", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", "integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==", - "dev": true + "dev": true, + "optional": true } } }, diff --git a/awx/ui/package.json b/awx/ui/package.json index 1f6b603665..59cb964b5c 100644 --- a/awx/ui/package.json +++ b/awx/ui/package.json @@ -107,7 +107,7 @@ "angular-moment": "^1.3.0", "angular-mousewheel": "^1.0.5", "angular-sanitize": "^1.7.9", - "angular-scheduler": "git+https://git@github.com/ansible/angular-scheduler.git#v0.4.3", + "angular-scheduler": "git+https://git@github.com/ansible/angular-scheduler.git#v0.4.4", "angular-tz-extensions": "git+https://git@github.com/ansible/angular-tz-extensions.git#v0.6.1", "angular-xeditable": "~0.8.0", "ansi-to-html": "^0.6.3", From 15ae0976ddb80f9af273de7e7a1684b7ab45d973 Mon Sep 17 00:00:00 2001 From: Keith Grant Date: Tue, 7 Jul 2020 16:36:59 -0700 Subject: [PATCH 305/494] add sync status indicator to inventory list --- .../SyncStatusIndicator.jsx | 46 +++++++++ .../components/SyncStatusIndicator/index.js | 1 + .../Inventory/InventoryList/InventoryList.jsx | 11 ++- .../InventoryList/InventoryListItem.jsx | 15 ++- .../InventoryList/useWsInventories.js | 95 +++++++++++++++++++ 5 files changed, 160 insertions(+), 8 deletions(-) create mode 100644 awx/ui_next/src/components/SyncStatusIndicator/SyncStatusIndicator.jsx create mode 100644 awx/ui_next/src/components/SyncStatusIndicator/index.js create mode 100644 awx/ui_next/src/screens/Inventory/InventoryList/useWsInventories.js diff --git a/awx/ui_next/src/components/SyncStatusIndicator/SyncStatusIndicator.jsx b/awx/ui_next/src/components/SyncStatusIndicator/SyncStatusIndicator.jsx new file mode 100644 index 0000000000..77cb158548 --- /dev/null +++ b/awx/ui_next/src/components/SyncStatusIndicator/SyncStatusIndicator.jsx @@ -0,0 +1,46 @@ +import React from 'react'; +import 'styled-components/macro'; +import styled, { keyframes } from 'styled-components'; +import { oneOf, string } from 'prop-types'; +import { CloudIcon } from '@patternfly/react-icons'; + +const COLORS = { + success: '--pf-global--palette--green-400', + syncing: '--pf-global--palette--green-400', + error: '--pf-global--danger-color--100', + disabled: '--pf-global--disabled-color--200', +}; + +const Pulse = keyframes` + from { + opacity: 0; + } + to { + opacity: 1.0; + } +`; + +const PulseWrapper = styled.div` + animation: ${Pulse} 1.5s linear infinite alternate; +`; + +export default function SyncStatusIndicator({ status, title }) { + const color = COLORS[status] || COLORS.disabled; + + if (status === 'syncing') { + return ( + + + + ); + } + + return ; +} +SyncStatusIndicator.propTypes = { + status: oneOf(['success', 'error', 'disabled', 'syncing']).isRequired, + title: string, +}; +SyncStatusIndicator.defaultProps = { + title: null, +}; diff --git a/awx/ui_next/src/components/SyncStatusIndicator/index.js b/awx/ui_next/src/components/SyncStatusIndicator/index.js new file mode 100644 index 0000000000..8a25d03365 --- /dev/null +++ b/awx/ui_next/src/components/SyncStatusIndicator/index.js @@ -0,0 +1 @@ +export { default } from './SyncStatusIndicator'; diff --git a/awx/ui_next/src/screens/Inventory/InventoryList/InventoryList.jsx b/awx/ui_next/src/screens/Inventory/InventoryList/InventoryList.jsx index dcb817fe59..e0aa888f97 100644 --- a/awx/ui_next/src/screens/Inventory/InventoryList/InventoryList.jsx +++ b/awx/ui_next/src/screens/Inventory/InventoryList/InventoryList.jsx @@ -1,7 +1,6 @@ import React, { useState, useCallback, useEffect } from 'react'; import { useLocation, useRouteMatch } from 'react-router-dom'; import { withI18n } from '@lingui/react'; - import { t } from '@lingui/macro'; import { Card, PageSection } from '@patternfly/react-core'; @@ -13,8 +12,8 @@ import ErrorDetail from '../../../components/ErrorDetail'; import PaginatedDataList, { ToolbarDeleteButton, } from '../../../components/PaginatedDataList'; - import { getQSConfig, parseQueryString } from '../../../util/qs'; +import useWsInventories from './useWsInventories'; import AddDropDownButton from '../../../components/AddDropDownButton'; import InventoryListItem from './InventoryListItem'; @@ -30,7 +29,7 @@ function InventoryList({ i18n }) { const [selected, setSelected] = useState([]); const { - result: { inventories, itemCount, actions }, + result: { results, itemCount, actions }, error: contentError, isLoading, request: fetchInventories, @@ -42,13 +41,13 @@ function InventoryList({ i18n }) { InventoriesAPI.readOptions(), ]); return { - inventories: response.data.results, + results: response.data.results, itemCount: response.data.count, actions: actionsResponse.data.actions, }; }, [location]), { - inventories: [], + results: [], itemCount: 0, actions: {}, } @@ -58,6 +57,8 @@ function InventoryList({ i18n }) { fetchInventories(); }, [fetchInventories]); + const inventories = useWsInventories(results); + const isAllSelected = selected.length === inventories.length && selected.length > 0; const { diff --git a/awx/ui_next/src/screens/Inventory/InventoryList/InventoryListItem.jsx b/awx/ui_next/src/screens/Inventory/InventoryList/InventoryListItem.jsx index 9d272ace69..eb091495c7 100644 --- a/awx/ui_next/src/screens/Inventory/InventoryList/InventoryListItem.jsx +++ b/awx/ui_next/src/screens/Inventory/InventoryList/InventoryListItem.jsx @@ -10,16 +10,16 @@ import { DataListItemRow, Tooltip, } from '@patternfly/react-core'; - +import { PencilAltIcon } from '@patternfly/react-icons'; import { t } from '@lingui/macro'; import { Link } from 'react-router-dom'; import styled from 'styled-components'; -import { PencilAltIcon } from '@patternfly/react-icons'; import { timeOfDay } from '../../../util/dates'; import { InventoriesAPI } from '../../../api'; import { Inventory } from '../../../types'; import DataListCell from '../../../components/DataListCell'; import CopyButton from '../../../components/CopyButton'; +import SyncStatusIndicator from '../../../components/SyncStatusIndicator'; const DataListAction = styled(_DataListAction)` align-items: center; @@ -52,6 +52,12 @@ function InventoryListItem({ }, [inventory.id, inventory.name, fetchInventories]); const labelId = `check-action-${inventory.id}`; + + let syncStatus = 'disabled'; + if (inventory.has_inventory_sources) { + syncStatus = + inventory.inventory_sources_with_failures > 0 ? 'error' : 'success'; + } return ( + + + , + {inventory.name} diff --git a/awx/ui_next/src/screens/Inventory/InventoryList/useWsInventories.js b/awx/ui_next/src/screens/Inventory/InventoryList/useWsInventories.js new file mode 100644 index 0000000000..29d438d3bf --- /dev/null +++ b/awx/ui_next/src/screens/Inventory/InventoryList/useWsInventories.js @@ -0,0 +1,95 @@ +import { useState, useEffect, useRef } from 'react'; + +export default function useWsProjects(initialInventories) { + const [inventories, setInventories] = useState(initialInventories); + const [lastMessage, setLastMessage] = useState(null); + const ws = useRef(null); + + useEffect(() => { + setInventories(initialInventories); + }, [initialInventories]); + + // const messageExample = { + // unified_job_id: 533, + // status: 'pending', + // type: 'inventory_update', + // inventory_source_id: 53, + // inventory_id: 5, + // group_name: 'jobs', + // unified_job_template_id: 53, + // }; + useEffect(() => { + if (!lastMessage?.unified_job_id || lastMessage.type !== 'project_update') { + return; + } + const index = inventories.findIndex(p => p.id === lastMessage.project_id); + if (index === -1) { + return; + } + + const inventory = inventories[index]; + const updatedProject = { + ...inventory, + summary_fields: { + ...inventory.summary_fields, + // last_job: { + // id: lastMessage.unified_job_id, + // status: lastMessage.status, + // finished: lastMessage.finished, + // }, + }, + }; + setInventories([ + ...inventories.slice(0, index), + updatedProject, + ...inventories.slice(index + 1), + ]); + }, [lastMessage]); // eslint-disable-line react-hooks/exhaustive-deps + + useEffect(() => { + ws.current = new WebSocket(`wss://${window.location.host}/websocket/`); + + const connect = () => { + const xrftoken = `; ${document.cookie}` + .split('; csrftoken=') + .pop() + .split(';') + .shift(); + ws.current.send( + JSON.stringify({ + xrftoken, + groups: { + inventories: ['status_changed'], + jobs: ['status_changed'], + control: ['limit_reached_1'], + }, + }) + ); + }; + ws.current.onopen = connect; + + ws.current.onmessage = e => { + setLastMessage(JSON.parse(e.data)); + }; + + ws.current.onclose = e => { + // eslint-disable-next-line no-console + console.debug('Socket closed. Reconnecting...', e); + setTimeout(() => { + connect(); + }, 1000); + }; + + ws.current.onerror = err => { + // eslint-disable-next-line no-console + console.debug('Socket error: ', err, 'Disconnecting...'); + ws.current.close(); + }; + + return () => { + ws.current.close(); + }; + }, []); + + return inventories; +} From 319308193c981238b07730ca9041298fbb6a6991 Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Wed, 8 Jul 2020 09:17:55 -0400 Subject: [PATCH 306/494] show user capabilities for instance groups --- awx/api/serializers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 34924e30ea..fc1bf50947 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -4663,6 +4663,8 @@ class InstanceSerializer(BaseSerializer): class InstanceGroupSerializer(BaseSerializer): + show_capabilities = ['edit', 'delete'] + committed_capacity = serializers.SerializerMethodField() consumed_capacity = serializers.SerializerMethodField() percent_capacity_remaining = serializers.SerializerMethodField() From 56d800e8514e6aa67e116f2e41c11bc33c1dd116 Mon Sep 17 00:00:00 2001 From: Caleb Boylan Date: Wed, 8 Jul 2020 09:27:46 -0700 Subject: [PATCH 307/494] Raise job length for testing job wait --- .../tests/integration/targets/tower_job_wait/tasks/main.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/awx_collection/tests/integration/targets/tower_job_wait/tasks/main.yml b/awx_collection/tests/integration/targets/tower_job_wait/tasks/main.yml index a3e1338e02..b04fa62ff8 100644 --- a/awx_collection/tests/integration/targets/tower_job_wait/tasks/main.yml +++ b/awx_collection/tests/integration/targets/tower_job_wait/tasks/main.yml @@ -19,6 +19,8 @@ job_type: run project: "{{ proj_name }}" inventory: "Demo Inventory" + extra_vars: + sleep_interval: 300 - name: Check deprecation warnings tower_job_wait: From bbc4522063b06bb17b03bc8b0b52746fef66908d Mon Sep 17 00:00:00 2001 From: Alex Corey Date: Mon, 6 Jul 2020 11:23:37 -0400 Subject: [PATCH 308/494] Adds User TokenAdd Functionality --- awx/ui_next/src/api/index.js | 3 + awx/ui_next/src/api/models/Tokens.js | 10 ++ awx/ui_next/src/api/models/Users.js | 4 + .../components/Lookup/ApplicationLookup.jsx | 106 +++++++++++++ .../Lookup/ApplicationLookup.test.jsx | 80 ++++++++++ awx/ui_next/src/components/Lookup/index.js | 1 + awx/ui_next/src/screens/User/User.jsx | 6 +- .../User/UserTokenAdd/UserTokenAdd.jsx | 42 +++++ .../User/UserTokenAdd/UserTokenAdd.test.jsx | 98 ++++++++++++ .../src/screens/User/UserTokenAdd/index.js | 1 + .../User/UserTokenList/UserTokenListItem.jsx | 2 +- .../UserTokenList/UserTokenListItem.test.jsx | 2 +- .../screens/User/UserTokens/UserTokens.jsx | 22 +++ .../src/screens/User/UserTokens/index.js | 1 + awx/ui_next/src/screens/User/Users.jsx | 1 + .../src/screens/User/shared/UserTokenForm.jsx | 127 +++++++++++++++ .../User/shared/UserTokenForm.test.jsx | 144 ++++++++++++++++++ awx/ui_next/src/screens/User/shared/index.js | 1 + 18 files changed, 646 insertions(+), 5 deletions(-) create mode 100644 awx/ui_next/src/api/models/Tokens.js create mode 100644 awx/ui_next/src/components/Lookup/ApplicationLookup.jsx create mode 100644 awx/ui_next/src/components/Lookup/ApplicationLookup.test.jsx create mode 100644 awx/ui_next/src/screens/User/UserTokenAdd/UserTokenAdd.jsx create mode 100644 awx/ui_next/src/screens/User/UserTokenAdd/UserTokenAdd.test.jsx create mode 100644 awx/ui_next/src/screens/User/UserTokenAdd/index.js create mode 100644 awx/ui_next/src/screens/User/UserTokens/UserTokens.jsx create mode 100644 awx/ui_next/src/screens/User/UserTokens/index.js create mode 100644 awx/ui_next/src/screens/User/shared/UserTokenForm.jsx create mode 100644 awx/ui_next/src/screens/User/shared/UserTokenForm.test.jsx diff --git a/awx/ui_next/src/api/index.js b/awx/ui_next/src/api/index.js index 2de6a235e0..c3cfc1167f 100644 --- a/awx/ui_next/src/api/index.js +++ b/awx/ui_next/src/api/index.js @@ -24,6 +24,7 @@ import Roles from './models/Roles'; import Schedules from './models/Schedules'; import SystemJobs from './models/SystemJobs'; import Teams from './models/Teams'; +import Tokens from './models/Tokens'; import UnifiedJobTemplates from './models/UnifiedJobTemplates'; import UnifiedJobs from './models/UnifiedJobs'; import Users from './models/Users'; @@ -58,6 +59,7 @@ const RolesAPI = new Roles(); const SchedulesAPI = new Schedules(); const SystemJobsAPI = new SystemJobs(); const TeamsAPI = new Teams(); +const TokensAPI = new Tokens(); const UnifiedJobTemplatesAPI = new UnifiedJobTemplates(); const UnifiedJobsAPI = new UnifiedJobs(); const UsersAPI = new Users(); @@ -93,6 +95,7 @@ export { SchedulesAPI, SystemJobsAPI, TeamsAPI, + TokensAPI, UnifiedJobTemplatesAPI, UnifiedJobsAPI, UsersAPI, diff --git a/awx/ui_next/src/api/models/Tokens.js b/awx/ui_next/src/api/models/Tokens.js new file mode 100644 index 0000000000..5dd490808d --- /dev/null +++ b/awx/ui_next/src/api/models/Tokens.js @@ -0,0 +1,10 @@ +import Base from '../Base'; + +class Tokens extends Base { + constructor(http) { + super(http); + this.baseUrl = '/api/v2/tokens/'; + } +} + +export default Tokens; diff --git a/awx/ui_next/src/api/models/Users.js b/awx/ui_next/src/api/models/Users.js index 3d4ec4aac9..97c7a6976c 100644 --- a/awx/ui_next/src/api/models/Users.js +++ b/awx/ui_next/src/api/models/Users.js @@ -12,6 +12,10 @@ class Users extends Base { }); } + createToken(userId, data) { + return this.http.post(`${this.baseUrl}${userId}/authorized_tokens/`, data); + } + disassociateRole(userId, roleId) { return this.http.post(`${this.baseUrl}${userId}/roles/`, { id: roleId, diff --git a/awx/ui_next/src/components/Lookup/ApplicationLookup.jsx b/awx/ui_next/src/components/Lookup/ApplicationLookup.jsx new file mode 100644 index 0000000000..2d43c0491b --- /dev/null +++ b/awx/ui_next/src/components/Lookup/ApplicationLookup.jsx @@ -0,0 +1,106 @@ +import React, { useCallback, useEffect } from 'react'; +import { func, node } from 'prop-types'; +import { withRouter, useLocation } from 'react-router-dom'; +import { withI18n } from '@lingui/react'; +import { t } from '@lingui/macro'; +import { FormGroup } from '@patternfly/react-core'; +import { ApplicationsAPI } from '../../api'; +import { Application } from '../../types'; +import { getQSConfig, parseQueryString } from '../../util/qs'; +import Lookup from './Lookup'; +import OptionsList from '../OptionsList'; +import useRequest from '../../util/useRequest'; +import LookupErrorMessage from './shared/LookupErrorMessage'; + +const QS_CONFIG = getQSConfig('applications', { + page: 1, + page_size: 5, + order_by: 'name', +}); + +function ApplicationLookup({ i18n, onChange, value, label }) { + const location = useLocation(); + const { + error, + result: { applications, itemCount }, + request: fetchApplications, + } = useRequest( + useCallback(async () => { + const params = parseQueryString(QS_CONFIG, location.search); + + const { + data: { results, count }, + } = await ApplicationsAPI.read(params); + return { applications: results, itemCount: count }; + }, [location]), + { applications: [], itemCount: 0 } + ); + useEffect(() => { + fetchApplications(); + }, [fetchApplications]); + return ( + + ( + dispatch({ type: 'SELECT_ITEM', item })} + deselectItem={item => dispatch({ type: 'DESELECT_ITEM', item })} + /> + )} + /> + + + ); +} +ApplicationLookup.propTypes = { + label: node.isRequired, + onChange: func.isRequired, + value: Application, +}; + +ApplicationLookup.defaultProps = { + value: null, +}; + +export default withI18n()(withRouter(ApplicationLookup)); diff --git a/awx/ui_next/src/components/Lookup/ApplicationLookup.test.jsx b/awx/ui_next/src/components/Lookup/ApplicationLookup.test.jsx new file mode 100644 index 0000000000..5d2e2e33a0 --- /dev/null +++ b/awx/ui_next/src/components/Lookup/ApplicationLookup.test.jsx @@ -0,0 +1,80 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; +import { mountWithContexts } from '../../../testUtils/enzymeHelpers'; +import ApplicationLookup from './ApplicationLookup'; +import { ApplicationsAPI } from '../../api'; + +jest.mock('../../api'); +const application = { + id: 1, + name: 'app', + description: '', +}; + +const fetchedApplications = { + count: 2, + results: [ + { + id: 1, + name: 'app', + description: '', + }, + { + id: 4, + name: 'application that should not crach', + description: '', + }, + ], +}; +describe('ApplicationLookup', () => { + let wrapper; + + beforeEach(() => { + ApplicationsAPI.read.mockResolvedValueOnce(fetchedApplications); + }); + + afterEach(() => { + jest.clearAllMocks(); + wrapper.unmount(); + }); + + test('should render successfully', async () => { + await act(async () => { + wrapper = mountWithContexts( + {}} + /> + ); + }); + expect(wrapper.find('ApplicationLookup')).toHaveLength(1); + }); + + test('should fetch applications', async () => { + await act(async () => { + wrapper = mountWithContexts( + {}} + /> + ); + }); + expect(ApplicationsAPI.read).toHaveBeenCalledTimes(1); + }); + + test('should display label', async () => { + await act(async () => { + wrapper = mountWithContexts( + {}} + /> + ); + }); + const title = wrapper.find('FormGroup .pf-c-form__label-text'); + expect(title.text()).toEqual('Application'); + }); +}); diff --git a/awx/ui_next/src/components/Lookup/index.js b/awx/ui_next/src/components/Lookup/index.js index 9321fb08e9..fb99cd5681 100644 --- a/awx/ui_next/src/components/Lookup/index.js +++ b/awx/ui_next/src/components/Lookup/index.js @@ -4,3 +4,4 @@ export { default as InventoryLookup } from './InventoryLookup'; export { default as ProjectLookup } from './ProjectLookup'; export { default as MultiCredentialsLookup } from './MultiCredentialsLookup'; export { default as CredentialLookup } from './CredentialLookup'; +export { default as ApplicationLookup } from './ApplicationLookup'; diff --git a/awx/ui_next/src/screens/User/User.jsx b/awx/ui_next/src/screens/User/User.jsx index af60199e69..282a193ab8 100644 --- a/awx/ui_next/src/screens/User/User.jsx +++ b/awx/ui_next/src/screens/User/User.jsx @@ -20,7 +20,7 @@ import UserDetail from './UserDetail'; import UserEdit from './UserEdit'; import UserOrganizations from './UserOrganizations'; import UserTeams from './UserTeams'; -import UserTokenList from './UserTokenList'; +import UserTokens from './UserTokens'; import UserAccessList from './UserAccess/UserAccessList'; function User({ i18n, setBreadcrumb, me }) { @@ -80,7 +80,7 @@ function User({ i18n, setBreadcrumb, me }) { } let showCardHeader = true; - if (['edit'].some(name => location.pathname.includes(name))) { + if (['edit', 'add'].some(name => location.pathname.includes(name))) { showCardHeader = false; } @@ -131,7 +131,7 @@ function User({ i18n, setBreadcrumb, me }) { )} - + diff --git a/awx/ui_next/src/screens/User/UserTokenAdd/UserTokenAdd.jsx b/awx/ui_next/src/screens/User/UserTokenAdd/UserTokenAdd.jsx new file mode 100644 index 0000000000..606171c028 --- /dev/null +++ b/awx/ui_next/src/screens/User/UserTokenAdd/UserTokenAdd.jsx @@ -0,0 +1,42 @@ +import React, { useCallback } from 'react'; +import { useHistory, useParams } from 'react-router-dom'; + +import { CardBody } from '../../../components/Card'; +import { TokensAPI, UsersAPI } from '../../../api'; +import useRequest from '../../../util/useRequest'; +import UserTokenFrom from '../shared/UserTokenForm'; + +function UserTokenAdd() { + const history = useHistory(); + const { id: userId } = useParams(); + const { error: submitError, request: handleSubmit } = useRequest( + useCallback( + async formData => { + if (formData.application) { + formData.application = formData.application?.id || null; + await UsersAPI.createToken(userId, formData); + } else { + await TokensAPI.create(formData); + } + + history.push(`/users/${userId}/tokens`); + }, + [history, userId] + ) + ); + + const handleCancel = () => { + history.push(`/users/${userId}/tokens`); + }; + + return ( + + + + ); +} +export default UserTokenAdd; diff --git a/awx/ui_next/src/screens/User/UserTokenAdd/UserTokenAdd.test.jsx b/awx/ui_next/src/screens/User/UserTokenAdd/UserTokenAdd.test.jsx new file mode 100644 index 0000000000..4323663c45 --- /dev/null +++ b/awx/ui_next/src/screens/User/UserTokenAdd/UserTokenAdd.test.jsx @@ -0,0 +1,98 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; +import { createMemoryHistory } from 'history'; +import { + mountWithContexts, + waitForElement, +} from '../../../../testUtils/enzymeHelpers'; +import UserTokenAdd from './UserTokenAdd'; +import { UsersAPI, TokensAPI } from '../../../api'; + +jest.mock('../../../api'); +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + history: () => ({ + location: '/user', + }), + useParams: () => ({ id: 1 }), +})); +let wrapper; + +describe('', () => { + test('handleSubmit should post to api', async () => { + await act(async () => { + wrapper = mountWithContexts(); + }); + UsersAPI.createToken.mockResolvedValueOnce({ data: { id: 1 } }); + const tokenData = { + application: 1, + description: 'foo', + scope: 'read', + }; + await act(async () => { + wrapper.find('UserTokenForm').prop('handleSubmit')(tokenData); + }); + expect(UsersAPI.createToken).toHaveBeenCalledWith(1, tokenData); + }); + + test('should navigate to tokens list when cancel is clicked', async () => { + const history = createMemoryHistory({}); + await act(async () => { + wrapper = mountWithContexts(, { + context: { router: { history } }, + }); + }); + await act(async () => { + wrapper.find('button[aria-label="Cancel"]').prop('onClick')(); + }); + expect(history.location.pathname).toEqual('/users/1/tokens'); + }); + + test('successful form submission should trigger redirect', async () => { + const history = createMemoryHistory({}); + const tokenData = { + application: 1, + description: 'foo', + scope: 'read', + }; + UsersAPI.createToken.mockResolvedValueOnce({ + data: { + id: 2, + ...tokenData, + }, + }); + await act(async () => { + wrapper = mountWithContexts(, { + context: { router: { history } }, + }); + }); + await waitForElement(wrapper, 'button[aria-label="Save"]'); + await act(async () => { + wrapper.find('UserTokenForm').prop('handleSubmit')(tokenData); + }); + expect(history.location.pathname).toEqual('/users/1/tokens'); + }); + + test('should successful submit form with application', async () => { + const history = createMemoryHistory({}); + const tokenData = { + scope: 'read', + }; + TokensAPI.create.mockResolvedValueOnce({ + data: { + id: 2, + ...tokenData, + }, + }); + await act(async () => { + wrapper = mountWithContexts(, { + context: { router: { history } }, + }); + }); + await waitForElement(wrapper, 'button[aria-label="Save"]'); + await act(async () => { + wrapper.find('UserTokenForm').prop('handleSubmit')(tokenData); + }); + expect(history.location.pathname).toEqual('/users/1/tokens'); + }); +}); diff --git a/awx/ui_next/src/screens/User/UserTokenAdd/index.js b/awx/ui_next/src/screens/User/UserTokenAdd/index.js new file mode 100644 index 0000000000..d8a9b4a1f7 --- /dev/null +++ b/awx/ui_next/src/screens/User/UserTokenAdd/index.js @@ -0,0 +1 @@ +export { default } from './UserTokenAdd'; diff --git a/awx/ui_next/src/screens/User/UserTokenList/UserTokenListItem.jsx b/awx/ui_next/src/screens/User/UserTokenList/UserTokenListItem.jsx index cb5e4057c5..4b1198c5a9 100644 --- a/awx/ui_next/src/screens/User/UserTokenList/UserTokenListItem.jsx +++ b/awx/ui_next/src/screens/User/UserTokenList/UserTokenListItem.jsx @@ -40,7 +40,7 @@ function UserTokenListItem({ i18n, token, isSelected, onSelect }) { > {token.summary_fields?.application?.name ? ( - {i18n._(t`Application:`)} + {i18n._(t`Application`)} {token.summary_fields.application.name} ) : ( diff --git a/awx/ui_next/src/screens/User/UserTokenList/UserTokenListItem.test.jsx b/awx/ui_next/src/screens/User/UserTokenList/UserTokenListItem.test.jsx index fe009e4b8a..a91e2d1632 100644 --- a/awx/ui_next/src/screens/User/UserTokenList/UserTokenListItem.test.jsx +++ b/awx/ui_next/src/screens/User/UserTokenList/UserTokenListItem.test.jsx @@ -53,7 +53,7 @@ describe('', () => { expect(wrapper.find('DataListCheck').prop('checked')).toBe(false); expect( wrapper.find('PFDataListCell[aria-label="application name"]').text() - ).toBe('Application:app'); + ).toBe('Applicationapp'); expect(wrapper.find('PFDataListCell[aria-label="scope"]').text()).toBe( 'ScopeRead' ); diff --git a/awx/ui_next/src/screens/User/UserTokens/UserTokens.jsx b/awx/ui_next/src/screens/User/UserTokens/UserTokens.jsx new file mode 100644 index 0000000000..dc072a6546 --- /dev/null +++ b/awx/ui_next/src/screens/User/UserTokens/UserTokens.jsx @@ -0,0 +1,22 @@ +import React from 'react'; +import { withI18n } from '@lingui/react'; +import { Switch, Route, useParams } from 'react-router-dom'; +import UserTokenAdd from '../UserTokenAdd'; +import UserTokenList from '../UserTokenList'; + +function UserTokens() { + const { id: userId } = useParams(); + + return ( + + + + + + + + + ); +} + +export default withI18n()(UserTokens); diff --git a/awx/ui_next/src/screens/User/UserTokens/index.js b/awx/ui_next/src/screens/User/UserTokens/index.js new file mode 100644 index 0000000000..8ea0743daa --- /dev/null +++ b/awx/ui_next/src/screens/User/UserTokens/index.js @@ -0,0 +1 @@ +export { default } from './UserTokens'; diff --git a/awx/ui_next/src/screens/User/Users.jsx b/awx/ui_next/src/screens/User/Users.jsx index 575b997f48..6f21f8be10 100644 --- a/awx/ui_next/src/screens/User/Users.jsx +++ b/awx/ui_next/src/screens/User/Users.jsx @@ -33,6 +33,7 @@ function Users({ i18n }) { [`/users/${user.id}/teams`]: i18n._(t`Teams`), [`/users/${user.id}/organizations`]: i18n._(t`Organizations`), [`/users/${user.id}/tokens`]: i18n._(t`Tokens`), + [`/users/${user.id}/tokens/add`]: i18n._(t`Create user token`), }); }, [i18n] diff --git a/awx/ui_next/src/screens/User/shared/UserTokenForm.jsx b/awx/ui_next/src/screens/User/shared/UserTokenForm.jsx new file mode 100644 index 0000000000..bfc5f0f08f --- /dev/null +++ b/awx/ui_next/src/screens/User/shared/UserTokenForm.jsx @@ -0,0 +1,127 @@ +import React from 'react'; +import { withI18n } from '@lingui/react'; +import { t } from '@lingui/macro'; +import { Formik, useField } from 'formik'; +import { Form, FormGroup } from '@patternfly/react-core'; +import AnsibleSelect from '../../../components/AnsibleSelect'; +import FormActionGroup from '../../../components/FormActionGroup/FormActionGroup'; +import FormField, { + FormSubmitError, + FieldTooltip, +} from '../../../components/FormField'; +import ApplicationLookup from '../../../components/Lookup/ApplicationLookup'; +import { required } from '../../../util/validators'; + +import { FormColumnLayout } from '../../../components/FormLayout'; + +function UserTokenFormFields({ i18n }) { + const [applicationField, applicationMeta, applicationHelpers] = useField( + 'application' + ); + + const [scopeField, scopeMeta, scopeHelpers] = useField({ + name: 'scope', + validate: required(i18n._(t`Please enter a value.`), i18n), + }); + + return ( + <> + + { + applicationHelpers.setValue(value); + }} + label={ + + {i18n._(t`Application`)} + + + } + touched={applicationMeta.touched} + /> + + + + + } + > + { + scopeHelpers.setValue(value); + }} + /> + + + ); +} + +function UserTokenForm({ + handleCancel, + handleSubmit, + submitError, + i18n, + token = {}, +}) { + return ( + + {formik => ( + + + + {submitError && } + { + formik.handleSubmit(); + }} + /> + + + )} + + ); +} +export default withI18n()(UserTokenForm); diff --git a/awx/ui_next/src/screens/User/shared/UserTokenForm.test.jsx b/awx/ui_next/src/screens/User/shared/UserTokenForm.test.jsx new file mode 100644 index 0000000000..ddfcbd6cb4 --- /dev/null +++ b/awx/ui_next/src/screens/User/shared/UserTokenForm.test.jsx @@ -0,0 +1,144 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; +import { + mountWithContexts, + waitForElement, +} from '../../../../testUtils/enzymeHelpers'; +import UserTokenForm from './UserTokenForm'; +import { sleep } from '../../../../testUtils/testUtils'; +import { ApplicationsAPI } from '../../../api'; + +jest.mock('../../../api'); +const applications = { + data: { + count: 2, + results: [ + { + id: 1, + name: 'app', + description: '', + }, + { + id: 4, + name: 'application that should not crach', + description: '', + }, + ], + }, +}; +describe('', () => { + let wrapper; + beforeEach(() => {}); + afterEach(() => { + wrapper.unmount(); + jest.clearAllMocks(); + }); + + test('initially renders successfully', async () => { + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + + expect(wrapper.find('UserTokenForm').length).toBe(1); + }); + + test('add form displays all form fields', async () => { + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + await waitForElement(wrapper, 'ContentLoading', el => el.length === 0); + expect(wrapper.find('FormGroup[name="application"]').length).toBe(1); + expect(wrapper.find('FormField[name="description"]').length).toBe(1); + expect(wrapper.find('FormGroup[name="scope"]').length).toBe(1); + }); + + test('inputs should update form value on change', async () => { + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + await waitForElement(wrapper, 'ContentLoading', el => el.length === 0); + wrapper.update(); + await act(async () => { + wrapper.find('ApplicationLookup').invoke('onChange')({ + id: 1, + name: 'application', + }); + wrapper.find('input[name="description"]').simulate('change', { + target: { value: 'new Bar', name: 'description' }, + }); + wrapper.find('AnsibleSelect[name="scope"]').prop('onChange')({}, 'read'); + }); + wrapper.update(); + expect(wrapper.find('ApplicationLookup').prop('value')).toEqual({ + id: 1, + name: 'application', + }); + expect(wrapper.find('input[name="description"]').prop('value')).toBe( + 'new Bar' + ); + expect(wrapper.find('AnsibleSelect#token-scope').prop('value')).toBe( + 'read' + ); + }); + + test('should call handleSubmit when Submit button is clicked', async () => { + ApplicationsAPI.read.mockResolvedValue(applications); + const handleSubmit = jest.fn(); + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + await waitForElement(wrapper, 'ContentLoading', el => el.length === 0); + + await act(async () => { + wrapper.find('AnsibleSelect[name="scope"]').prop('onChange')({}, 'read'); + }); + wrapper.update(); + await act(async () => { + wrapper.find('button[aria-label="Save"]').prop('onClick')(); + }); + await sleep(1); + + expect(handleSubmit).toBeCalled(); + }); + + test('should call handleCancel when Cancel button is clicked', async () => { + const handleCancel = jest.fn(); + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + await waitForElement(wrapper, 'ContentLoading', el => el.length === 0); + expect(handleCancel).not.toHaveBeenCalled(); + wrapper.find('button[aria-label="Cancel"]').invoke('onClick')(); + expect(handleCancel).toBeCalled(); + }); + test('should throw error on submit without scope value', async () => { + ApplicationsAPI.read.mockResolvedValue(applications); + const handleSubmit = jest.fn(); + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + await waitForElement(wrapper, 'ContentLoading', el => el.length === 0); + + await act(async () => { + wrapper.find('button[aria-label="Save"]').prop('onClick')(); + }); + await sleep(1); + wrapper.update(); + expect( + wrapper.find('FormGroup[name="scope"]').prop('helperTextInvalid') + ).toBe('Please enter a value.'); + expect(handleSubmit).not.toBeCalled(); + }); +}); diff --git a/awx/ui_next/src/screens/User/shared/index.js b/awx/ui_next/src/screens/User/shared/index.js index ee4362b5c2..4a93f427bd 100644 --- a/awx/ui_next/src/screens/User/shared/index.js +++ b/awx/ui_next/src/screens/User/shared/index.js @@ -1,2 +1,3 @@ /* eslint-disable-next-line import/prefer-default-export */ export { default as UserForm } from './UserForm'; +export { default as UserTokenForm } from './UserTokenForm'; From a9d4046ec5f0730dbf2488d021d238bb3f56e42c Mon Sep 17 00:00:00 2001 From: Alex Corey Date: Tue, 30 Jun 2020 17:55:14 -0400 Subject: [PATCH 309/494] Adds Application Token List with delete functionality --- awx/ui_next/src/api/index.js | 3 + awx/ui_next/src/api/models/Applications.js | 6 + awx/ui_next/src/api/models/Tokens.js | 10 + .../Application/Application/Application.jsx | 10 +- .../ApplicationTokenList.jsx | 163 +++++++++++++++ .../ApplicationTokenList.test.jsx | 193 ++++++++++++++++++ .../ApplicationTokenListItem.jsx | 69 +++++++ .../ApplicationTokenListItem.test.jsx | 90 ++++++++ .../Application/ApplicationTokens/index.js | 1 + awx/ui_next/src/types.js | 7 + 10 files changed, 547 insertions(+), 5 deletions(-) create mode 100644 awx/ui_next/src/api/models/Tokens.js create mode 100644 awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenList.jsx create mode 100644 awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenList.test.jsx create mode 100644 awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenListItem.jsx create mode 100644 awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenListItem.test.jsx create mode 100644 awx/ui_next/src/screens/Application/ApplicationTokens/index.js diff --git a/awx/ui_next/src/api/index.js b/awx/ui_next/src/api/index.js index 2de6a235e0..c3cfc1167f 100644 --- a/awx/ui_next/src/api/index.js +++ b/awx/ui_next/src/api/index.js @@ -24,6 +24,7 @@ import Roles from './models/Roles'; import Schedules from './models/Schedules'; import SystemJobs from './models/SystemJobs'; import Teams from './models/Teams'; +import Tokens from './models/Tokens'; import UnifiedJobTemplates from './models/UnifiedJobTemplates'; import UnifiedJobs from './models/UnifiedJobs'; import Users from './models/Users'; @@ -58,6 +59,7 @@ const RolesAPI = new Roles(); const SchedulesAPI = new Schedules(); const SystemJobsAPI = new SystemJobs(); const TeamsAPI = new Teams(); +const TokensAPI = new Tokens(); const UnifiedJobTemplatesAPI = new UnifiedJobTemplates(); const UnifiedJobsAPI = new UnifiedJobs(); const UsersAPI = new Users(); @@ -93,6 +95,7 @@ export { SchedulesAPI, SystemJobsAPI, TeamsAPI, + TokensAPI, UnifiedJobTemplatesAPI, UnifiedJobsAPI, UsersAPI, diff --git a/awx/ui_next/src/api/models/Applications.js b/awx/ui_next/src/api/models/Applications.js index 50b709bdca..51aaeaa2a1 100644 --- a/awx/ui_next/src/api/models/Applications.js +++ b/awx/ui_next/src/api/models/Applications.js @@ -5,6 +5,12 @@ class Applications extends Base { super(http); this.baseUrl = '/api/v2/applications/'; } + + readTokens(appId, params) { + return this.http.get(`${this.baseUrl}${appId}/tokens/`, { + params, + }); + } } export default Applications; diff --git a/awx/ui_next/src/api/models/Tokens.js b/awx/ui_next/src/api/models/Tokens.js new file mode 100644 index 0000000000..5dd490808d --- /dev/null +++ b/awx/ui_next/src/api/models/Tokens.js @@ -0,0 +1,10 @@ +import Base from '../Base'; + +class Tokens extends Base { + constructor(http) { + super(http); + this.baseUrl = '/api/v2/tokens/'; + } +} + +export default Tokens; diff --git a/awx/ui_next/src/screens/Application/Application/Application.jsx b/awx/ui_next/src/screens/Application/Application/Application.jsx index dabce2167f..8764f7d2ca 100644 --- a/awx/ui_next/src/screens/Application/Application/Application.jsx +++ b/awx/ui_next/src/screens/Application/Application/Application.jsx @@ -15,9 +15,9 @@ import { Card, PageSection } from '@patternfly/react-core'; import useRequest from '../../../util/useRequest'; import { ApplicationsAPI } from '../../../api'; import ContentError from '../../../components/ContentError'; -import ContentLoading from '../../../components/ContentLoading'; import ApplicationEdit from '../ApplicationEdit'; import ApplicationDetails from '../ApplicationDetails'; +import ApplicationTokens from '../ApplicationTokens'; import RoutedTabs from '../../../components/RoutedTabs'; function Application({ setBreadcrumb, i18n }) { @@ -82,6 +82,7 @@ function Application({ setBreadcrumb, i18n }) { if (pathname.endsWith('edit')) { cardHeader = null; } + if (!isLoading && error) { return ( @@ -101,10 +102,6 @@ function Application({ setBreadcrumb, i18n }) { ); } - if (isLoading) { - return ; - } - return ( @@ -131,6 +128,9 @@ function Application({ setBreadcrumb, i18n }) { clientTypeOptions={clientTypeOptions} /> + + + )} diff --git a/awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenList.jsx b/awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenList.jsx new file mode 100644 index 0000000000..453a387286 --- /dev/null +++ b/awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenList.jsx @@ -0,0 +1,163 @@ +import React, { useCallback, useEffect } from 'react'; +import { useParams, useLocation } from 'react-router-dom'; +import { t } from '@lingui/macro'; +import { withI18n } from '@lingui/react'; +import PaginatedDataList, { + ToolbarDeleteButton, +} from '../../../components/PaginatedDataList'; +import { getQSConfig, parseQueryString } from '../../../util/qs'; +import { TokensAPI, ApplicationsAPI } from '../../../api'; +import ErrorDetail from '../../../components/ErrorDetail'; +import AlertModal from '../../../components/AlertModal'; +import useRequest, { useDeleteItems } from '../../../util/useRequest'; +import useSelected from '../../../util/useSelected'; +import ApplicationTokenListItem from './ApplicationTokenListItem'; +import DatalistToolbar from '../../../components/DataListToolbar'; + +const QS_CONFIG = getQSConfig('applications', { + page: 1, + page_size: 20, + order_by: 'user__username', +}); + +function ApplicationTokenList({ i18n }) { + const { id } = useParams(); + const location = useLocation(); + const { + error, + isLoading, + result: { tokens, itemCount }, + request: fetchTokens, + } = useRequest( + useCallback(async () => { + const params = parseQueryString(QS_CONFIG, location.search); + const { + data: { results, count }, + } = await ApplicationsAPI.readTokens(id, params); + const modifiedResults = results.map(result => { + result.summary_fields = { + user: result.summary_fields.user, + application: result.summary_fields.application, + user_capabilities: { delete: true }, + }; + result.name = result.summary_fields.user?.username; + return result; + }); + return { tokens: modifiedResults, itemCount: count }; + }, [id, location.search]), + { tokens: [], itemCount: 0 } + ); + + useEffect(() => { + fetchTokens(); + }, [fetchTokens]); + + const { selected, isAllSelected, handleSelect, setSelected } = useSelected( + tokens + ); + const { + isLoading: deleteLoading, + deletionError, + deleteItems: handleDeleteApplications, + clearDeletionError, + } = useDeleteItems( + useCallback(async () => { + await Promise.all( + selected.map(({ id: tokenId }) => TokensAPI.destroy(tokenId)) + ); + }, [selected]), + { + qsConfig: QS_CONFIG, + allItemsSelected: isAllSelected, + fetchItems: fetchTokens, + } + ); + + const handleDelete = async () => { + await handleDeleteApplications(); + setSelected([]); + }; + return ( + <> + ( + + setSelected(isSelected ? [...tokens] : []) + } + qsConfig={QS_CONFIG} + additionalControls={[ + , + ]} + /> + )} + renderItem={token => ( + handleSelect(token)} + isSelected={selected.some(row => row.id === token.id)} + /> + )} + /> + + {i18n._(t`Failed to delete one or more tokens.`)} + + + + ); +} + +export default withI18n()(ApplicationTokenList); diff --git a/awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenList.test.jsx b/awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenList.test.jsx new file mode 100644 index 0000000000..d4f44824fb --- /dev/null +++ b/awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenList.test.jsx @@ -0,0 +1,193 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; + +import { + mountWithContexts, + waitForElement, +} from '../../../../testUtils/enzymeHelpers'; +import { ApplicationsAPI, TokensAPI } from '../../../api'; +import ApplicationTokenList from './ApplicationTokenList'; + +jest.mock('../../../api/models/Applications'); +jest.mock('../../../api/models/Tokens'); + +const tokens = { + data: { + results: [ + { + id: 2, + type: 'o_auth2_access_token', + url: '/api/v2/tokens/2/', + related: { + user: '/api/v2/users/1/', + application: '/api/v2/applications/3/', + activity_stream: '/api/v2/tokens/2/activity_stream/', + }, + summary_fields: { + user: { + id: 1, + username: 'admin', + first_name: '', + last_name: '', + }, + application: { + id: 3, + name: 'hg', + }, + }, + created: '2020-06-23T19:56:38.422053Z', + modified: '2020-06-23T19:56:38.441353Z', + description: 'cdfsg', + user: 1, + token: '************', + refresh_token: '************', + application: 3, + expires: '3019-10-25T19:56:38.395635Z', + scope: 'read', + }, + { + id: 3, + type: 'o_auth2_access_token', + url: '/api/v2/tokens/3/', + related: { + user: '/api/v2/users/1/', + application: '/api/v2/applications/3/', + activity_stream: '/api/v2/tokens/3/activity_stream/', + }, + summary_fields: { + user: { + id: 1, + username: 'admin', + first_name: '', + last_name: '', + }, + application: { + id: 3, + name: 'hg', + }, + }, + created: '2020-06-23T19:56:50.536169Z', + modified: '2020-06-23T19:56:50.549521Z', + description: 'fgds', + user: 1, + token: '************', + refresh_token: '************', + application: 3, + expires: '3019-10-25T19:56:50.529306Z', + scope: 'write', + }, + ], + count: 2, + }, +}; +describe('', () => { + let wrapper; + test('should mount properly', async () => { + ApplicationsAPI.readTokens.mockResolvedValue(tokens); + await act(async () => { + wrapper = mountWithContexts(); + }); + await waitForElement(wrapper, 'ApplicationTokenList', el => el.length > 0); + }); + test('should have data fetched and render 2 rows', async () => { + ApplicationsAPI.readTokens.mockResolvedValue(tokens); + await act(async () => { + wrapper = mountWithContexts(); + }); + await waitForElement(wrapper, 'ApplicationTokenList', el => el.length > 0); + expect(wrapper.find('ApplicationTokenListItem').length).toBe(2); + expect(ApplicationsAPI.readTokens).toBeCalled(); + }); + + test('should delete item successfully', async () => { + ApplicationsAPI.readTokens.mockResolvedValue(tokens); + await act(async () => { + wrapper = mountWithContexts(); + }); + waitForElement(wrapper, 'ApplicationTokenList', el => el.length > 0); + + wrapper + .find('input#select-token-2') + .simulate('change', tokens.data.results[0]); + + wrapper.update(); + + expect(wrapper.find('input#select-token-2').prop('checked')).toBe(true); + await act(async () => + wrapper.find('Button[aria-label="Delete"]').prop('onClick')() + ); + + wrapper.update(); + + await act(async () => + wrapper.find('Button[aria-label="confirm delete"]').prop('onClick')() + ); + expect(TokensAPI.destroy).toBeCalledWith(tokens.data.results[0].id); + }); + + test('should throw content error', async () => { + ApplicationsAPI.readTokens.mockRejectedValue( + new Error({ + response: { + config: { + method: 'get', + url: '/api/v2/applications/', + }, + data: 'An error occurred', + }, + }) + ); + await act(async () => { + wrapper = mountWithContexts(); + }); + + await waitForElement(wrapper, 'ApplicationTokenList', el => el.length > 0); + expect(wrapper.find('ContentError').length).toBe(1); + }); + + test('should render deletion error modal', async () => { + TokensAPI.destroy.mockRejectedValue( + new Error({ + response: { + config: { + method: 'delete', + url: '/api/v2/tokens/', + }, + data: 'An error occurred', + }, + }) + ); + ApplicationsAPI.readTokens.mockResolvedValue(tokens); + await act(async () => { + wrapper = mountWithContexts(); + }); + waitForElement(wrapper, 'ApplicationTokenList', el => el.length > 0); + + wrapper.find('input#select-token-2').simulate('change', 'a'); + + wrapper.update(); + + expect(wrapper.find('input#select-token-2').prop('checked')).toBe(true); + await act(async () => + wrapper.find('Button[aria-label="Delete"]').prop('onClick')() + ); + + wrapper.update(); + + await act(async () => + wrapper.find('Button[aria-label="confirm delete"]').prop('onClick')() + ); + wrapper.update(); + expect(wrapper.find('ErrorDetail').length).toBe(1); + }); + + test('should not render add button', async () => { + ApplicationsAPI.readTokens.mockResolvedValue(tokens); + + await act(async () => { + wrapper = mountWithContexts(); + }); + waitForElement(wrapper, 'ApplicationTokenList', el => el.length > 0); + expect(wrapper.find('ToolbarAddButton').length).toBe(0); + }); +}); diff --git a/awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenListItem.jsx b/awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenListItem.jsx new file mode 100644 index 0000000000..142561ea7e --- /dev/null +++ b/awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenListItem.jsx @@ -0,0 +1,69 @@ +import React from 'react'; +import { string, bool, func } from 'prop-types'; +import { withI18n } from '@lingui/react'; +import { t } from '@lingui/macro'; +import { Link } from 'react-router-dom'; +import { + DataListCheck, + DataListItem, + DataListItemCells, + DataListItemRow, +} from '@patternfly/react-core'; +import styled from 'styled-components'; + +import { Token } from '../../../types'; +import { formatDateString } from '../../../util/dates'; +import { toTitleCase } from '../../../util/strings'; +import DataListCell from '../../../components/DataListCell'; + +const Label = styled.b` + margin-right: 20px; +`; + +function ApplicationTokenListItem({ + token, + isSelected, + onSelect, + detailUrl, + i18n, +}) { + const labelId = `check-action-${token.id}`; + return ( + + + + + + {token.summary_fields.user.username} + + , + + + {toTitleCase(token.scope)} + , + + + {formatDateString(token.expires)} + , + ]} + /> + + + ); +} + +ApplicationTokenListItem.propTypes = { + token: Token.isRequired, + detailUrl: string.isRequired, + isSelected: bool.isRequired, + onSelect: func.isRequired, +}; + +export default withI18n()(ApplicationTokenListItem); diff --git a/awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenListItem.test.jsx b/awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenListItem.test.jsx new file mode 100644 index 0000000000..94d0355951 --- /dev/null +++ b/awx/ui_next/src/screens/Application/ApplicationTokens/ApplicationTokenListItem.test.jsx @@ -0,0 +1,90 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; + +import { mountWithContexts } from '../../../../testUtils/enzymeHelpers'; + +import ApplicationTokenListItem from './ApplicationTokenListItem'; + +describe('', () => { + let wrapper; + const token = { + id: 2, + type: 'o_auth2_access_token', + url: '/api/v2/tokens/2/', + related: { + user: '/api/v2/users/1/', + application: '/api/v2/applications/3/', + activity_stream: '/api/v2/tokens/2/activity_stream/', + }, + summary_fields: { + user: { + id: 1, + username: 'admin', + first_name: '', + last_name: '', + }, + application: { + id: 3, + name: 'hg', + }, + }, + created: '2020-06-23T19:56:38.422053Z', + modified: '2020-06-23T19:56:38.441353Z', + description: 'cdfsg', + user: 1, + token: '************', + refresh_token: '************', + application: 3, + expires: '3019-10-25T19:56:38.395635Z', + scope: 'read', + }; + + test('should mount successfully', async () => { + await act(async () => { + wrapper = mountWithContexts( + {}} + /> + ); + }); + expect(wrapper.find('ApplicationTokenListItem').length).toBe(1); + }); + test('should render the proper data', async () => { + await act(async () => { + wrapper = mountWithContexts( + {}} + /> + ); + }); + expect(wrapper.find('DataListCell[aria-label="token name"]').text()).toBe( + 'admin' + ); + expect(wrapper.find('DataListCell[aria-label="scope"]').text()).toBe( + 'ScopeRead' + ); + expect(wrapper.find('DataListCell[aria-label="expiration"]').text()).toBe( + 'Expiration10/25/3019, 7:56:38 PM' + ); + expect(wrapper.find('input#select-token-2').prop('checked')).toBe(false); + }); + test('should be checked', async () => { + await act(async () => { + wrapper = mountWithContexts( + {}} + /> + ); + }); + expect(wrapper.find('input#select-token-2').prop('checked')).toBe(true); + }); +}); diff --git a/awx/ui_next/src/screens/Application/ApplicationTokens/index.js b/awx/ui_next/src/screens/Application/ApplicationTokens/index.js new file mode 100644 index 0000000000..34dd462061 --- /dev/null +++ b/awx/ui_next/src/screens/Application/ApplicationTokens/index.js @@ -0,0 +1 @@ +export { default } from './ApplicationTokenList'; diff --git a/awx/ui_next/src/types.js b/awx/ui_next/src/types.js index 4ba0808d7d..7a66ae3c68 100644 --- a/awx/ui_next/src/types.js +++ b/awx/ui_next/src/types.js @@ -234,6 +234,13 @@ export const Team = shape({ organization: number, }); +export const Token = shape({ + id: number.isRequired, + expires: string.isRequired, + summary_fields: shape({}), + scope: string.isRequired, +}); + export const User = shape({ id: number.isRequired, type: oneOf(['user']), From 2bdd83e0291356168ea8a96d6d89b56a34c9bb7a Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Mon, 6 Jul 2020 13:50:33 -0400 Subject: [PATCH 310/494] use jinja2.sandbox for credential type injectors --- awx/main/models/credential/__init__.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/awx/main/models/credential/__init__.py b/awx/main/models/credential/__init__.py index 6ba5df45b5..36bb2684ea 100644 --- a/awx/main/models/credential/__init__.py +++ b/awx/main/models/credential/__init__.py @@ -11,7 +11,7 @@ import tempfile from types import SimpleNamespace # Jinja2 -from jinja2 import Template +from jinja2 import sandbox # Django from django.db import models @@ -514,8 +514,11 @@ class CredentialType(CommonModelNameNotUnique): # If any file templates are provided, render the files and update the # special `tower` template namespace so the filename can be # referenced in other injectors + + sandbox_env = sandbox.ImmutableSandboxedEnvironment() + for file_label, file_tmpl in file_tmpls.items(): - data = Template(file_tmpl).render(**namespace) + data = sandbox_env.from_string(file_tmpl).render(**namespace) _, path = tempfile.mkstemp(dir=private_data_dir) with open(path, 'w') as f: f.write(data) @@ -537,14 +540,14 @@ class CredentialType(CommonModelNameNotUnique): except ValidationError as e: logger.error('Ignoring prohibited env var {}, reason: {}'.format(env_var, e)) continue - env[env_var] = Template(tmpl).render(**namespace) - safe_env[env_var] = Template(tmpl).render(**safe_namespace) + env[env_var] = sandbox_env.from_string(tmpl).render(**namespace) + safe_env[env_var] = sandbox_env.from_string(tmpl).render(**safe_namespace) if 'INVENTORY_UPDATE_ID' not in env: # awx-manage inventory_update does not support extra_vars via -e extra_vars = {} for var_name, tmpl in self.injectors.get('extra_vars', {}).items(): - extra_vars[var_name] = Template(tmpl).render(**namespace) + extra_vars[var_name] = sandbox_env.from_string(tmpl).render(**namespace) def build_extra_vars_file(vars, private_dir): handle, path = tempfile.mkstemp(dir = private_dir) From 1cf2f009edf7d06e47250a364d1ae9302247c657 Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Tue, 7 Jul 2020 10:59:14 -0400 Subject: [PATCH 311/494] prevent unsafe jinja from being saved in the first place for cred types see: https://github.com/ansible/tower-security/issues/21 --- awx/main/fields.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/awx/main/fields.py b/awx/main/fields.py index a1900294fa..0122b0ab80 100644 --- a/awx/main/fields.py +++ b/awx/main/fields.py @@ -7,8 +7,8 @@ import json import re import urllib.parse -from jinja2 import Environment, StrictUndefined -from jinja2.exceptions import UndefinedError, TemplateSyntaxError +from jinja2 import sandbox, StrictUndefined +from jinja2.exceptions import UndefinedError, TemplateSyntaxError, SecurityError # Django from django.contrib.postgres.fields import JSONField as upstream_JSONBField @@ -940,7 +940,7 @@ class CredentialTypeInjectorField(JSONSchemaField): self.validate_env_var_allowed(key) for key, tmpl in injector.items(): try: - Environment( + sandbox.ImmutableSandboxedEnvironment( undefined=StrictUndefined ).from_string(tmpl).render(valid_namespace) except UndefinedError as e: @@ -950,6 +950,10 @@ class CredentialTypeInjectorField(JSONSchemaField): code='invalid', params={'value': value}, ) + except SecurityError as e: + raise django_exceptions.ValidationError( + _('Encountered unsafe code execution: {}').format(e) + ) except TemplateSyntaxError as e: raise django_exceptions.ValidationError( _('Syntax error rendering template for {sub_key} inside of {type} ({error_msg})').format( From 7322e134360e0f7b803f39ece1a727c426f427db Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Wed, 8 Jul 2020 16:53:05 -0400 Subject: [PATCH 312/494] add tests for clarified label permissions --- awx/main/tests/functional/test_rbac_label.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/awx/main/tests/functional/test_rbac_label.py b/awx/main/tests/functional/test_rbac_label.py index 955894c06f..ed819df9f0 100644 --- a/awx/main/tests/functional/test_rbac_label.py +++ b/awx/main/tests/functional/test_rbac_label.py @@ -20,8 +20,19 @@ def test_label_get_queryset_su(label, user): @pytest.mark.django_db -def test_label_access(label, user): +def test_label_read_access(label, user): access = LabelAccess(user('user', False)) + assert not access.can_read(label) + label.organization.member_role.members.add(user('user', False)) + assert access.can_read(label) + + +@pytest.mark.django_db +def test_label_jt_read_access(label, user, job_template): + access = LabelAccess(user('user', False)) + assert not access.can_read(label) + job_template.read_role.members.add(user('user', False)) + job_template.labels.add(label) assert access.can_read(label) From 84bea3d34834e478471c7f55bb6402718c439fa9 Mon Sep 17 00:00:00 2001 From: Keith Grant Date: Wed, 8 Jul 2020 15:20:42 -0700 Subject: [PATCH 313/494] websockets: show live inventory source sync status --- .../Inventory/InventoryList/InventoryList.jsx | 11 +- .../InventoryList/InventoryListItem.jsx | 4 +- .../InventoryList/useWsInventories.js | 108 ++++++++++++------ 3 files changed, 86 insertions(+), 37 deletions(-) diff --git a/awx/ui_next/src/screens/Inventory/InventoryList/InventoryList.jsx b/awx/ui_next/src/screens/Inventory/InventoryList/InventoryList.jsx index e0aa888f97..65d7bed75a 100644 --- a/awx/ui_next/src/screens/Inventory/InventoryList/InventoryList.jsx +++ b/awx/ui_next/src/screens/Inventory/InventoryList/InventoryList.jsx @@ -57,7 +57,16 @@ function InventoryList({ i18n }) { fetchInventories(); }, [fetchInventories]); - const inventories = useWsInventories(results); + const fetchInventoriesById = useCallback( + async ids => { + const params = parseQueryString(QS_CONFIG, location.search); + params.id__in = ids.join(','); + const { data } = await InventoriesAPI.read(params); + return data.results; + }, + [location.search] // eslint-disable-line react-hooks/exhaustive-deps + ); + const inventories = useWsInventories(results, fetchInventoriesById); const isAllSelected = selected.length === inventories.length && selected.length > 0; diff --git a/awx/ui_next/src/screens/Inventory/InventoryList/InventoryListItem.jsx b/awx/ui_next/src/screens/Inventory/InventoryList/InventoryListItem.jsx index eb091495c7..9070656bba 100644 --- a/awx/ui_next/src/screens/Inventory/InventoryList/InventoryListItem.jsx +++ b/awx/ui_next/src/screens/Inventory/InventoryList/InventoryListItem.jsx @@ -54,7 +54,9 @@ function InventoryListItem({ const labelId = `check-action-${inventory.id}`; let syncStatus = 'disabled'; - if (inventory.has_inventory_sources) { + if (inventory.isSourceSyncRunning) { + syncStatus = 'syncing'; + } else if (inventory.has_inventory_sources) { syncStatus = inventory.inventory_sources_with_failures > 0 ? 'error' : 'success'; } diff --git a/awx/ui_next/src/screens/Inventory/InventoryList/useWsInventories.js b/awx/ui_next/src/screens/Inventory/InventoryList/useWsInventories.js index 29d438d3bf..d9886c29c4 100644 --- a/awx/ui_next/src/screens/Inventory/InventoryList/useWsInventories.js +++ b/awx/ui_next/src/screens/Inventory/InventoryList/useWsInventories.js @@ -1,50 +1,88 @@ import { useState, useEffect, useRef } from 'react'; +import useThrottle from '../../../util/useThrottle'; -export default function useWsProjects(initialInventories) { +export default function useWsProjects( + initialInventories, + fetchInventoriesById +) { const [inventories, setInventories] = useState(initialInventories); const [lastMessage, setLastMessage] = useState(null); + const [inventoriesToFetch, setInventoriesToFetch] = useState([]); + const throttledInventoriesToFetch = useThrottle(inventoriesToFetch, 5000); const ws = useRef(null); useEffect(() => { setInventories(initialInventories); }, [initialInventories]); - // const messageExample = { - // unified_job_id: 533, - // status: 'pending', - // type: 'inventory_update', - // inventory_source_id: 53, - // inventory_id: 5, - // group_name: 'jobs', - // unified_job_template_id: 53, - // }; - useEffect(() => { - if (!lastMessage?.unified_job_id || lastMessage.type !== 'project_update') { - return; - } - const index = inventories.findIndex(p => p.id === lastMessage.project_id); - if (index === -1) { - return; + const enqueueId = id => { + if (!inventoriesToFetch.includes(id)) { + setInventoriesToFetch(ids => ids.concat(id)); } + }; + useEffect( + function fetchUpdatedInventories() { + (async () => { + if (!throttledInventoriesToFetch.length) { + return; + } + setInventoriesToFetch([]); + const newInventories = await fetchInventoriesById( + throttledInventoriesToFetch + ); + let updated = inventories; + newInventories.forEach(inventory => { + const index = inventories.findIndex(i => i.id === inventory.id); + if (index === -1) { + return; + } + updated = [ + ...updated.slice(0, index), + inventory, + ...updated.slice(index + 1), + ]; + }); + setInventories(updated); + })(); + }, + // eslint-disable-next-line react-hooks/exhaustive-deps + [throttledInventoriesToFetch, fetchInventoriesById] + ); - const inventory = inventories[index]; - const updatedProject = { - ...inventory, - summary_fields: { - ...inventory.summary_fields, - // last_job: { - // id: lastMessage.unified_job_id, - // status: lastMessage.status, - // finished: lastMessage.finished, - // }, - }, - }; - setInventories([ - ...inventories.slice(0, index), - updatedProject, - ...inventories.slice(index + 1), - ]); - }, [lastMessage]); // eslint-disable-line react-hooks/exhaustive-deps + useEffect( + function processWsMessage() { + if ( + !lastMessage?.inventory_id || + lastMessage.type !== 'inventory_update' + ) { + return; + } + const index = inventories.findIndex( + p => p.id === lastMessage.inventory_id + ); + if (index === -1) { + return; + } + + if (!['pending', 'waiting', 'running'].includes(lastMessage.status)) { + enqueueId(lastMessage.inventory_id); + return; + } + + const inventory = inventories[index]; + const updatedInventory = { + ...inventory, + isSourceSyncRunning: true, + }; + setInventories([ + ...inventories.slice(0, index), + updatedInventory, + ...inventories.slice(index + 1), + ]); + }, + // eslint-disable-next-line react-hooks/exhaustive-deps, + [lastMessage] + ); useEffect(() => { ws.current = new WebSocket(`wss://${window.location.host}/websocket/`); From ddbe20d41beabf89e49c8ea1a43b29e45ca9de01 Mon Sep 17 00:00:00 2001 From: Jake McDermott Date: Thu, 9 Jul 2020 11:13:37 -0400 Subject: [PATCH 314/494] Include instance_id in host edit request --- .../inventories/related/hosts/edit/host-edit.controller.js | 3 +++ 1 file changed, 3 insertions(+) diff --git a/awx/ui/client/src/inventories-hosts/inventories/related/hosts/edit/host-edit.controller.js b/awx/ui/client/src/inventories-hosts/inventories/related/hosts/edit/host-edit.controller.js index 098a6113a2..e54127f176 100644 --- a/awx/ui/client/src/inventories-hosts/inventories/related/hosts/edit/host-edit.controller.js +++ b/awx/ui/client/src/inventories-hosts/inventories/related/hosts/edit/host-edit.controller.js @@ -26,6 +26,9 @@ description: $scope.description, enabled: $scope.host.enabled }; + if (typeof $scope.host.instance_id !== 'undefined') { + host.instance_id = $scope.host.instance_id; + } HostsService.put(host).then(function(){ $state.go('.', null, {reload: true}); }); From 29666bf3b6b4eb0c5bb565d57782d1f5938de299 Mon Sep 17 00:00:00 2001 From: Philip Douglass Date: Thu, 9 Jul 2020 18:24:16 +0200 Subject: [PATCH 315/494] Remove white-space: pre-wrap from loginModalNotice Signed-off-by: Philip Douglass --- awx/ui/client/src/login/loginModal/loginModalNotice.block.less | 1 - 1 file changed, 1 deletion(-) diff --git a/awx/ui/client/src/login/loginModal/loginModalNotice.block.less b/awx/ui/client/src/login/loginModal/loginModalNotice.block.less index 57cb035e8c..d5e6d4b517 100644 --- a/awx/ui/client/src/login/loginModal/loginModalNotice.block.less +++ b/awx/ui/client/src/login/loginModal/loginModalNotice.block.less @@ -12,7 +12,6 @@ color: @login-notice-text; overflow-y: scroll; overflow-x: visible; - white-space: pre-wrap; } .LoginModalNotice-title { From 202a68aca03e72cdb476d64b7a66a16abbbe83c7 Mon Sep 17 00:00:00 2001 From: Philip Douglass Date: Thu, 9 Jul 2020 18:39:43 +0200 Subject: [PATCH 316/494] Add customLoginInfoIsHTML test Signed-off-by: Philip Douglass --- awx/ui/client/src/login/loginModal/loginModal.controller.js | 1 + 1 file changed, 1 insertion(+) diff --git a/awx/ui/client/src/login/loginModal/loginModal.controller.js b/awx/ui/client/src/login/loginModal/loginModal.controller.js index 6ce18559f6..657cb55553 100644 --- a/awx/ui/client/src/login/loginModal/loginModal.controller.js +++ b/awx/ui/client/src/login/loginModal/loginModal.controller.js @@ -88,6 +88,7 @@ export default ['$log', '$cookies', '$rootScope', 'ProcessErrors', } scope.customLoginInfo = $AnsibleConfig.custom_login_info; scope.customLoginInfoPresent = (scope.customLoginInfo) ? true : false; + scope.customLoginInfoIsHTML = /<\/?[a-z][\s\S]*>/i.test(scope.customLoginInfo); }); if (scope.removeAuthorizationGetLicense) { From 4c98a1cb20fed411dc3a9ea9774000cc979310fe Mon Sep 17 00:00:00 2001 From: Philip Douglass Date: Thu, 9 Jul 2020 18:49:16 +0200 Subject: [PATCH 317/494] Set white-space style based on customLoginInfoIsHTML Signed-off-by: Philip Douglass --- awx/ui/client/src/login/loginModal/loginModal.partial.html | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/awx/ui/client/src/login/loginModal/loginModal.partial.html b/awx/ui/client/src/login/loginModal/loginModal.partial.html index e3133f84c9..d9fa2c0219 100644 --- a/awx/ui/client/src/login/loginModal/loginModal.partial.html +++ b/awx/ui/client/src/login/loginModal/loginModal.partial.html @@ -98,7 +98,12 @@ -
NOTICE
{{ customLoginInfo | sanitize }}
+
+
NOTICE
+ + +