mirror of
https://github.com/ansible/awx.git
synced 2026-02-07 12:34:43 -03:30
Compare commits
166 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b548ad21a9 | ||
|
|
3d0391173b | ||
|
|
ce560bcd5f | ||
|
|
8c3e289170 | ||
|
|
9364c8e562 | ||
|
|
5831949ebf | ||
|
|
7fe98a670f | ||
|
|
6f68f3cba6 | ||
|
|
4dc956c76f | ||
|
|
11a56117eb | ||
|
|
10eed6286a | ||
|
|
d36befd9ce | ||
|
|
0c4ddc7f6f | ||
|
|
3ef9679de3 | ||
|
|
d36441489a | ||
|
|
d26c12dd7c | ||
|
|
7fa7ed3658 | ||
|
|
2c68e7a3d2 | ||
|
|
0c9b1c3c79 | ||
|
|
e10b0e513e | ||
|
|
68c66edada | ||
|
|
6eb17e7af7 | ||
|
|
9a24da3098 | ||
|
|
8ed0543b8b | ||
|
|
73a84444d1 | ||
|
|
451767c179 | ||
|
|
8366386126 | ||
|
|
997686a2ea | ||
|
|
2ba68ef5d0 | ||
|
|
2041665880 | ||
|
|
1e6ca01686 | ||
|
|
e15a76e7aa | ||
|
|
64db44acef | ||
|
|
9972389a8d | ||
|
|
e0b1274eee | ||
|
|
df649e2c56 | ||
|
|
a778017efb | ||
|
|
6a9305818e | ||
|
|
2669904c72 | ||
|
|
35529b5eeb | ||
|
|
d55ed8713c | ||
|
|
7973f28bed | ||
|
|
8189964cce | ||
|
|
ee4c901dc7 | ||
|
|
78220cad82 | ||
|
|
40279bc6c0 | ||
|
|
f6fb46d99e | ||
|
|
954b32941e | ||
|
|
48b016802c | ||
|
|
35aa5dd79f | ||
|
|
237402068c | ||
|
|
31dda6e9d6 | ||
|
|
1c9b4af61d | ||
|
|
eba4a3f1c2 | ||
|
|
0ae9fe3624 | ||
|
|
1b662fcca5 | ||
|
|
cfdba959dd | ||
|
|
78660ad0a2 | ||
|
|
70697869d7 | ||
|
|
10e55108ef | ||
|
|
d4223b8877 | ||
|
|
9537d148d7 | ||
|
|
a133a14b70 | ||
|
|
4ca9e9577b | ||
|
|
44986fad36 | ||
|
|
eb2fca86b6 | ||
|
|
458a1fc035 | ||
|
|
6e87b29e92 | ||
|
|
be1d0c525c | ||
|
|
0787cb4fc2 | ||
|
|
19063a2d90 | ||
|
|
e8e2f820d2 | ||
|
|
aaad634483 | ||
|
|
dfa4127bae | ||
|
|
f3725c714a | ||
|
|
cef3ed01ac | ||
|
|
fc1a3f46f9 | ||
|
|
bfa5feb51b | ||
|
|
4c0813bd69 | ||
|
|
9b0b0f2a5f | ||
|
|
e87c121f8f | ||
|
|
65dfc424bc | ||
|
|
dfea9cc526 | ||
|
|
0d97a0364a | ||
|
|
1da57a4a12 | ||
|
|
b73078e9db | ||
|
|
b17f22cd38 | ||
|
|
7b225057ce | ||
|
|
8242078c06 | ||
|
|
a86740c3c9 | ||
|
|
cbde56549d | ||
|
|
385a94866c | ||
|
|
21972c91dd | ||
|
|
36d3f9afdb | ||
|
|
df2d303ab0 | ||
|
|
05eba350b7 | ||
|
|
1e12e12578 | ||
|
|
bbdab82433 | ||
|
|
f7be6b6423 | ||
|
|
ba358eaa4f | ||
|
|
162e09972f | ||
|
|
2cfccdbe16 | ||
|
|
434fa7b7be | ||
|
|
2f8bdf1eab | ||
|
|
e1705738a1 | ||
|
|
4cfb8fe482 | ||
|
|
d52d2af4b4 | ||
|
|
97fd3832d4 | ||
|
|
3cedd0e0bd | ||
|
|
507b1898ce | ||
|
|
e3fe9010b7 | ||
|
|
2c350b8b90 | ||
|
|
d74e258079 | ||
|
|
b03cabd314 | ||
|
|
6a63af83c0 | ||
|
|
452744b67e | ||
|
|
703a68d4fe | ||
|
|
557893e4b0 | ||
|
|
d7051fb6ce | ||
|
|
867c50da19 | ||
|
|
e8d76ec272 | ||
|
|
c102c61532 | ||
|
|
adb2b0da89 | ||
|
|
3610008699 | ||
|
|
3b44838dde | ||
|
|
0205d7deab | ||
|
|
dd47829bdb | ||
|
|
e7e72d13a9 | ||
|
|
4bbdf1ec8a | ||
|
|
4596df449e | ||
|
|
ecbb636ba1 | ||
|
|
e3aed9dad4 | ||
|
|
213983a322 | ||
|
|
2977084787 | ||
|
|
b6362a63cc | ||
|
|
7517ba820b | ||
|
|
29d60844a8 | ||
|
|
41b0607d7e | ||
|
|
13f7166a30 | ||
|
|
0cc9b84ead | ||
|
|
68ee4311bf | ||
|
|
6e6c3f676e | ||
|
|
c67f50831b | ||
|
|
50ef234bd6 | ||
|
|
2bef5ce09b | ||
|
|
a49c4796f4 | ||
|
|
9eab9586e5 | ||
|
|
cd35787a86 | ||
|
|
cbe84ff4f3 | ||
|
|
410f38eccf | ||
|
|
b885fc2d86 | ||
|
|
4c93f5794a | ||
|
|
456bb75dcb | ||
|
|
02fd8b0d20 | ||
|
|
fbe6c80f86 | ||
|
|
3d5f302d10 | ||
|
|
856a2c1734 | ||
|
|
4277b73438 | ||
|
|
2888f9f8d0 | ||
|
|
68221cdcbe | ||
|
|
f50501cc2a | ||
|
|
c84fac65e0 | ||
|
|
d64c457b3d | ||
|
|
1bd5a880dc | ||
|
|
47d5a89f40 | ||
|
|
6060e7e29f |
20
.github/dependabot.yml
vendored
Normal file
20
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/awx/ui"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 5
|
||||
allow:
|
||||
- dependency-type: "production"
|
||||
reviewers:
|
||||
- "AlexSCorey"
|
||||
- "keithjgrant"
|
||||
- "kialam"
|
||||
- "mabashian"
|
||||
- "marshmalien"
|
||||
- "nixocio"
|
||||
labels:
|
||||
- "component:ui"
|
||||
- "dependencies"
|
||||
target-branch: "devel"
|
||||
10
.github/pr_labeler.yml
vendored
10
.github/pr_labeler.yml
vendored
@@ -1,14 +1,14 @@
|
||||
"component:api":
|
||||
- any: ['awx/**/*', '!awx/ui/*']
|
||||
- any: ["awx/**/*", "!awx/ui/**"]
|
||||
|
||||
"component:ui":
|
||||
- any: ['awx/ui/**/*']
|
||||
- any: ["awx/ui/**/*"]
|
||||
|
||||
"component:docs":
|
||||
- any: ['docs/**/*']
|
||||
- any: ["docs/**/*"]
|
||||
|
||||
"component:cli":
|
||||
- any: ['awxkit/**/*']
|
||||
- any: ["awxkit/**/*"]
|
||||
|
||||
"component:collection":
|
||||
- any: ['awx_collection/**/*']
|
||||
- any: ["awx_collection/**/*"]
|
||||
|
||||
18
.github/triage_replies.md
vendored
18
.github/triage_replies.md
vendored
@@ -12,6 +12,9 @@
|
||||
## Create a Pull Request
|
||||
- Hello, we think your idea is good, please consider contributing a PR for this, following our contributing guidelines: https://github.com/ansible/awx/blob/devel/CONTRIBUTING.md
|
||||
|
||||
## Give us more info
|
||||
- Hello, we'd love to help but we need a little more information about the problem you're having. Screenshots, log outputs, or any reproducers would be very helpful.
|
||||
|
||||
## Receptor
|
||||
- You can find the receptor docs here: https://receptor.readthedocs.io/en/latest/
|
||||
- Hello, your issue seems related to receptor, could you please open an issue in the receptor repository? https://github.com/ansible/receptor. Thanks!
|
||||
@@ -29,3 +32,18 @@
|
||||
|
||||
## Code of Conduct
|
||||
- Hello. Please keep in mind that Ansible adheres to a Code of Conduct in its community spaces. The spirit of the code of conduct is to be kind, and this is your friendly reminder to be so. Please see the full code of conduct here if you have questions: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html
|
||||
|
||||
## AWX Release
|
||||
- Hi all,\
|
||||
\
|
||||
We're happy to announce that the next release of AWX, version 21.0.0 is now available!\
|
||||
In addition AWX Operator version 0.21.0 has also been release!\
|
||||
\
|
||||
Please see the releases pages for more details:\
|
||||
AWX: https://github.com/ansible/awx/releases/tag/21.0.0\
|
||||
Operator: https://github.com/ansible/awx-operator/releases/tag/0.20.1\
|
||||
\
|
||||
The AWX team.
|
||||
|
||||
## Try latest version
|
||||
- Hello, this issue pertains to an older version of AWX. Try upgrading to the lastest version and see if that resolves your issue.
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -38,7 +38,7 @@ awx/ui/build
|
||||
awx/ui/.env.local
|
||||
awx/ui/instrumented
|
||||
rsyslog.pid
|
||||
tools/prometheus/data
|
||||
tools/prometheus
|
||||
tools/docker-compose/ansible/awx_dump.sql
|
||||
tools/docker-compose/Dockerfile
|
||||
tools/docker-compose/_build
|
||||
|
||||
12
Makefile
12
Makefile
@@ -15,6 +15,8 @@ MAIN_NODE_TYPE ?= hybrid
|
||||
KEYCLOAK ?= false
|
||||
# If set to true docker-compose will also start an ldap instance
|
||||
LDAP ?= false
|
||||
# If set to true docker-compose will also start a splunk instance
|
||||
SPLUNK ?= false
|
||||
|
||||
VENV_BASE ?= /var/lib/awx/venv
|
||||
|
||||
@@ -466,7 +468,8 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
-e execution_node_count=$(EXECUTION_NODE_COUNT) \
|
||||
-e minikube_container_group=$(MINIKUBE_CONTAINER_GROUP) \
|
||||
-e enable_keycloak=$(KEYCLOAK) \
|
||||
-e enable_ldap=$(LDAP)
|
||||
-e enable_ldap=$(LDAP) \
|
||||
-e enable_splunk=$(SPLUNK)
|
||||
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
@@ -526,7 +529,12 @@ docker-compose-cluster-elk: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
prometheus:
|
||||
docker run -u0 --net=tools_default --link=`docker ps | egrep -o "tools_awx(_run)?_([^ ]+)?"`:awxweb --volume `pwd`/tools/prometheus:/prometheus --name prometheus -d -p 0.0.0.0:9090:9090 prom/prometheus --web.enable-lifecycle --config.file=/prometheus/prometheus.yml
|
||||
docker volume create prometheus
|
||||
docker run -d --rm --net=_sources_default --link=awx_1:awx1 --volume prometheus-storage:/prometheus --volume `pwd`/tools/prometheus:/etc/prometheus --name prometheus -p 9090:9090 prom/prometheus
|
||||
|
||||
grafana:
|
||||
docker volume create grafana
|
||||
docker run -d --rm --net=_sources_default --volume grafana-storage:/var/lib/grafana --volume `pwd`/tools/grafana:/etc/grafana/provisioning --name grafana -p 3001:3000 grafana/grafana-enterprise
|
||||
|
||||
docker-compose-container-group:
|
||||
MINIKUBE_CONTAINER_GROUP=true make docker-compose
|
||||
|
||||
@@ -78,9 +78,10 @@ def oauth2_getattribute(self, attr):
|
||||
# Custom method to override
|
||||
# oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__
|
||||
from django.conf import settings
|
||||
from oauth2_provider.settings import DEFAULTS
|
||||
|
||||
val = None
|
||||
if 'migrate' not in sys.argv:
|
||||
if (isinstance(attr, str)) and (attr in DEFAULTS) and (not attr.startswith('_')):
|
||||
# certain Django OAuth Toolkit migrations actually reference
|
||||
# setting lookups for references to model classes (e.g.,
|
||||
# oauth2_settings.REFRESH_TOKEN_MODEL)
|
||||
|
||||
@@ -1607,7 +1607,6 @@ class ProjectUpdateSerializer(UnifiedJobSerializer, ProjectOptionsSerializer):
|
||||
|
||||
class ProjectUpdateDetailSerializer(ProjectUpdateSerializer):
|
||||
|
||||
host_status_counts = serializers.SerializerMethodField(help_text=_('A count of hosts uniquely assigned to each status.'))
|
||||
playbook_counts = serializers.SerializerMethodField(help_text=_('A count of all plays and tasks for the job run.'))
|
||||
|
||||
class Meta:
|
||||
@@ -1622,14 +1621,6 @@ class ProjectUpdateDetailSerializer(ProjectUpdateSerializer):
|
||||
|
||||
return data
|
||||
|
||||
def get_host_status_counts(self, obj):
|
||||
try:
|
||||
counts = obj.project_update_events.only('event_data').get(event='playbook_on_stats').get_host_status_counts()
|
||||
except ProjectUpdateEvent.DoesNotExist:
|
||||
counts = {}
|
||||
|
||||
return counts
|
||||
|
||||
|
||||
class ProjectUpdateListSerializer(ProjectUpdateSerializer, UnifiedJobListSerializer):
|
||||
class Meta:
|
||||
@@ -2673,6 +2664,13 @@ class CredentialSerializer(BaseSerializer):
|
||||
|
||||
return credential_type
|
||||
|
||||
def validate_inputs(self, inputs):
|
||||
if self.instance and self.instance.credential_type.kind == "vault":
|
||||
if 'vault_id' in inputs and inputs['vault_id'] != self.instance.inputs['vault_id']:
|
||||
raise ValidationError(_('Vault IDs cannot be changed once they have been created.'))
|
||||
|
||||
return inputs
|
||||
|
||||
|
||||
class CredentialSerializerCreate(CredentialSerializer):
|
||||
|
||||
@@ -3107,7 +3105,6 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
||||
|
||||
class JobDetailSerializer(JobSerializer):
|
||||
|
||||
host_status_counts = serializers.SerializerMethodField(help_text=_('A count of hosts uniquely assigned to each status.'))
|
||||
playbook_counts = serializers.SerializerMethodField(help_text=_('A count of all plays and tasks for the job run.'))
|
||||
custom_virtualenv = serializers.ReadOnlyField()
|
||||
|
||||
@@ -3123,14 +3120,6 @@ class JobDetailSerializer(JobSerializer):
|
||||
|
||||
return data
|
||||
|
||||
def get_host_status_counts(self, obj):
|
||||
try:
|
||||
counts = obj.get_event_queryset().only('event_data').get(event='playbook_on_stats').get_host_status_counts()
|
||||
except JobEvent.DoesNotExist:
|
||||
counts = {}
|
||||
|
||||
return counts
|
||||
|
||||
|
||||
class JobCancelSerializer(BaseSerializer):
|
||||
|
||||
@@ -3319,21 +3308,10 @@ class AdHocCommandSerializer(UnifiedJobSerializer):
|
||||
|
||||
|
||||
class AdHocCommandDetailSerializer(AdHocCommandSerializer):
|
||||
|
||||
host_status_counts = serializers.SerializerMethodField(help_text=_('A count of hosts uniquely assigned to each status.'))
|
||||
|
||||
class Meta:
|
||||
model = AdHocCommand
|
||||
fields = ('*', 'host_status_counts')
|
||||
|
||||
def get_host_status_counts(self, obj):
|
||||
try:
|
||||
counts = obj.ad_hoc_command_events.only('event_data').get(event='playbook_on_stats').get_host_status_counts()
|
||||
except AdHocCommandEvent.DoesNotExist:
|
||||
counts = {}
|
||||
|
||||
return counts
|
||||
|
||||
|
||||
class AdHocCommandCancelSerializer(AdHocCommandSerializer):
|
||||
|
||||
@@ -4645,61 +4623,60 @@ class SchedulePreviewSerializer(BaseSerializer):
|
||||
|
||||
# We reject rrules if:
|
||||
# - DTSTART is not include
|
||||
# - INTERVAL is not included
|
||||
# - SECONDLY is used
|
||||
# - TZID is used
|
||||
# - BYDAY prefixed with a number (MO is good but not 20MO)
|
||||
# - BYYEARDAY
|
||||
# - BYWEEKNO
|
||||
# - Multiple DTSTART or RRULE elements
|
||||
# - Can't contain both COUNT and UNTIL
|
||||
# - COUNT > 999
|
||||
# - Multiple DTSTART
|
||||
# - At least one of RRULE is not included
|
||||
# - EXDATE or RDATE is included
|
||||
# For any rule in the ruleset:
|
||||
# - INTERVAL is not included
|
||||
# - SECONDLY is used
|
||||
# - BYDAY prefixed with a number (MO is good but not 20MO)
|
||||
# - Can't contain both COUNT and UNTIL
|
||||
# - COUNT > 999
|
||||
def validate_rrule(self, value):
|
||||
rrule_value = value
|
||||
multi_by_month_day = r".*?BYMONTHDAY[\:\=][0-9]+,-*[0-9]+"
|
||||
multi_by_month = r".*?BYMONTH[\:\=][0-9]+,[0-9]+"
|
||||
by_day_with_numeric_prefix = r".*?BYDAY[\:\=][0-9]+[a-zA-Z]{2}"
|
||||
match_count = re.match(r".*?(COUNT\=[0-9]+)", rrule_value)
|
||||
match_multiple_dtstart = re.findall(r".*?(DTSTART(;[^:]+)?\:[0-9]+T[0-9]+Z?)", rrule_value)
|
||||
match_native_dtstart = re.findall(r".*?(DTSTART:[0-9]+T[0-9]+) ", rrule_value)
|
||||
match_multiple_rrule = re.findall(r".*?(RRULE\:)", rrule_value)
|
||||
match_multiple_rrule = re.findall(r".*?(RULE\:[^\s]*)", rrule_value)
|
||||
errors = []
|
||||
if not len(match_multiple_dtstart):
|
||||
raise serializers.ValidationError(_('Valid DTSTART required in rrule. Value should start with: DTSTART:YYYYMMDDTHHMMSSZ'))
|
||||
errors.append(_('Valid DTSTART required in rrule. Value should start with: DTSTART:YYYYMMDDTHHMMSSZ'))
|
||||
if len(match_native_dtstart):
|
||||
raise serializers.ValidationError(_('DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ.'))
|
||||
errors.append(_('DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ.'))
|
||||
if len(match_multiple_dtstart) > 1:
|
||||
raise serializers.ValidationError(_('Multiple DTSTART is not supported.'))
|
||||
if not len(match_multiple_rrule):
|
||||
raise serializers.ValidationError(_('RRULE required in rrule.'))
|
||||
if len(match_multiple_rrule) > 1:
|
||||
raise serializers.ValidationError(_('Multiple RRULE is not supported.'))
|
||||
if 'interval' not in rrule_value.lower():
|
||||
raise serializers.ValidationError(_('INTERVAL required in rrule.'))
|
||||
if 'secondly' in rrule_value.lower():
|
||||
raise serializers.ValidationError(_('SECONDLY is not supported.'))
|
||||
if re.match(multi_by_month_day, rrule_value):
|
||||
raise serializers.ValidationError(_('Multiple BYMONTHDAYs not supported.'))
|
||||
if re.match(multi_by_month, rrule_value):
|
||||
raise serializers.ValidationError(_('Multiple BYMONTHs not supported.'))
|
||||
if re.match(by_day_with_numeric_prefix, rrule_value):
|
||||
raise serializers.ValidationError(_("BYDAY with numeric prefix not supported."))
|
||||
if 'byyearday' in rrule_value.lower():
|
||||
raise serializers.ValidationError(_("BYYEARDAY not supported."))
|
||||
if 'byweekno' in rrule_value.lower():
|
||||
raise serializers.ValidationError(_("BYWEEKNO not supported."))
|
||||
if 'COUNT' in rrule_value and 'UNTIL' in rrule_value:
|
||||
raise serializers.ValidationError(_("RRULE may not contain both COUNT and UNTIL"))
|
||||
if match_count:
|
||||
count_val = match_count.groups()[0].strip().split("=")
|
||||
if int(count_val[1]) > 999:
|
||||
raise serializers.ValidationError(_("COUNT > 999 is unsupported."))
|
||||
errors.append(_('Multiple DTSTART is not supported.'))
|
||||
if "rrule:" not in rrule_value.lower():
|
||||
errors.append(_('One or more rule required in rrule.'))
|
||||
if "exdate:" in rrule_value.lower():
|
||||
raise serializers.ValidationError(_('EXDATE not allowed in rrule.'))
|
||||
if "rdate:" in rrule_value.lower():
|
||||
raise serializers.ValidationError(_('RDATE not allowed in rrule.'))
|
||||
for a_rule in match_multiple_rrule:
|
||||
if 'interval' not in a_rule.lower():
|
||||
errors.append("{0}: {1}".format(_('INTERVAL required in rrule'), a_rule))
|
||||
elif 'secondly' in a_rule.lower():
|
||||
errors.append("{0}: {1}".format(_('SECONDLY is not supported'), a_rule))
|
||||
if re.match(by_day_with_numeric_prefix, a_rule):
|
||||
errors.append("{0}: {1}".format(_("BYDAY with numeric prefix not supported"), a_rule))
|
||||
if 'COUNT' in a_rule and 'UNTIL' in a_rule:
|
||||
errors.append("{0}: {1}".format(_("RRULE may not contain both COUNT and UNTIL"), a_rule))
|
||||
match_count = re.match(r".*?(COUNT\=[0-9]+)", a_rule)
|
||||
if match_count:
|
||||
count_val = match_count.groups()[0].strip().split("=")
|
||||
if int(count_val[1]) > 999:
|
||||
errors.append("{0}: {1}".format(_("COUNT > 999 is unsupported"), a_rule))
|
||||
|
||||
try:
|
||||
Schedule.rrulestr(rrule_value)
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
logger.error(traceback.format_exc())
|
||||
raise serializers.ValidationError(_("rrule parsing failed validation: {}").format(e))
|
||||
errors.append(_("rrule parsing failed validation: {}").format(e))
|
||||
|
||||
if errors:
|
||||
raise serializers.ValidationError(errors)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
|
||||
@@ -537,6 +537,7 @@ class ScheduleList(ListCreateAPIView):
|
||||
name = _("Schedules")
|
||||
model = models.Schedule
|
||||
serializer_class = serializers.ScheduleSerializer
|
||||
ordering = ('id',)
|
||||
|
||||
|
||||
class ScheduleDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# Python
|
||||
import contextlib
|
||||
import logging
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import os
|
||||
@@ -31,7 +30,7 @@ from awx.conf.models import Setting
|
||||
|
||||
logger = logging.getLogger('awx.conf.settings')
|
||||
|
||||
SETTING_MEMORY_TTL = 5 if 'callback_receiver' in ' '.join(sys.argv) else 0
|
||||
SETTING_MEMORY_TTL = 5
|
||||
|
||||
# Store a special value to indicate when a setting is not set in the database.
|
||||
SETTING_CACHE_NOTSET = '___notset___'
|
||||
@@ -234,6 +233,8 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
self.__dict__['_awx_conf_init_readonly'] = False
|
||||
self.__dict__['cache'] = EncryptedCacheProxy(cache, registry)
|
||||
self.__dict__['registry'] = registry
|
||||
self.__dict__['_awx_conf_memoizedcache'] = cachetools.TTLCache(maxsize=2048, ttl=SETTING_MEMORY_TTL)
|
||||
self.__dict__['_awx_conf_memoizedcache_lock'] = threading.Lock()
|
||||
|
||||
# record the current pid so we compare it post-fork for
|
||||
# processes like the dispatcher and callback receiver
|
||||
@@ -396,12 +397,20 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
def SETTINGS_MODULE(self):
|
||||
return self._get_default('SETTINGS_MODULE')
|
||||
|
||||
@cachetools.cached(cache=cachetools.TTLCache(maxsize=2048, ttl=SETTING_MEMORY_TTL))
|
||||
@cachetools.cachedmethod(
|
||||
cache=lambda self: self.__dict__['_awx_conf_memoizedcache'],
|
||||
key=lambda *args, **kwargs: SettingsWrapper.hashkey(*args, **kwargs),
|
||||
lock=lambda self: self.__dict__['_awx_conf_memoizedcache_lock'],
|
||||
)
|
||||
def _get_local_with_cache(self, name):
|
||||
"""Get value while accepting the in-memory cache if key is available"""
|
||||
with _ctit_db_wrapper(trans_safe=True):
|
||||
return self._get_local(name)
|
||||
|
||||
def __getattr__(self, name):
|
||||
value = empty
|
||||
if name in self.all_supported_settings:
|
||||
with _ctit_db_wrapper(trans_safe=True):
|
||||
value = self._get_local(name)
|
||||
value = self._get_local_with_cache(name)
|
||||
if value is not empty:
|
||||
return value
|
||||
return self._get_default(name)
|
||||
@@ -475,6 +484,23 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting)
|
||||
return set_locally or set_on_default
|
||||
|
||||
@classmethod
|
||||
def hashkey(cls, *args, **kwargs):
|
||||
"""
|
||||
Usage of @cachetools.cached has changed to @cachetools.cachedmethod
|
||||
The previous cachetools decorator called the hash function and passed in (self, key).
|
||||
The new cachtools decorator calls the hash function with just (key).
|
||||
Ideally, we would continue to pass self, however, the cachetools decorator interface
|
||||
does not allow us to.
|
||||
|
||||
This hashkey function is to maintain that the key generated looks like
|
||||
('<SettingsWrapper>', key). The thought is that maybe it is important to namespace
|
||||
our cache to the SettingsWrapper scope in case some other usage of this cache exists.
|
||||
I can not think of how any other system could and would use our private cache, but
|
||||
for safety sake we are ensuring the key schema does not change.
|
||||
"""
|
||||
return cachetools.keys.hashkey(f"<{cls.__name__}>", *args, **kwargs)
|
||||
|
||||
|
||||
def __getattr_without_cache__(self, name):
|
||||
# Django 1.10 added an optimization to settings lookup:
|
||||
|
||||
@@ -28,6 +28,9 @@ def handle_setting_change(key, for_delete=False):
|
||||
cache_keys = {Setting.get_cache_key(k) for k in setting_keys}
|
||||
cache.delete_many(cache_keys)
|
||||
|
||||
# if we have changed a setting, we want to avoid mucking with the in-memory cache entirely
|
||||
settings._awx_conf_memoizedcache.clear()
|
||||
|
||||
# Send setting_changed signal with new value for each setting.
|
||||
for setting_key in setting_keys:
|
||||
setting_changed.send(sender=Setting, setting=setting_key, value=getattr(settings, setting_key, None), enter=not bool(for_delete))
|
||||
|
||||
@@ -8,6 +8,8 @@ import codecs
|
||||
from uuid import uuid4
|
||||
import time
|
||||
|
||||
from unittest import mock
|
||||
|
||||
from django.conf import LazySettings
|
||||
from django.core.cache.backends.locmem import LocMemCache
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
@@ -299,3 +301,33 @@ def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
||||
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_VAR='DEFAULT')
|
||||
def test_in_memory_cache_only_for_registered_settings(settings):
|
||||
"Test that we only make use of the in-memory TTL cache for registered settings"
|
||||
settings._awx_conf_memoizedcache.clear()
|
||||
settings.MIDDLEWARE
|
||||
assert len(settings._awx_conf_memoizedcache) == 0 # does not cache MIDDLEWARE
|
||||
settings.registry.register('AWX_VAR', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
settings._wrapped.__dict__['all_supported_settings'] = ['AWX_VAR'] # because it is cached_property
|
||||
settings._awx_conf_memoizedcache.clear()
|
||||
assert settings.AWX_VAR == 'DEFAULT'
|
||||
assert len(settings._awx_conf_memoizedcache) == 1 # caches registered settings
|
||||
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_VAR='DEFAULT')
|
||||
def test_in_memory_cache_works(settings):
|
||||
settings._awx_conf_memoizedcache.clear()
|
||||
settings.registry.register('AWX_VAR', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
settings._wrapped.__dict__['all_supported_settings'] = ['AWX_VAR']
|
||||
|
||||
settings._awx_conf_memoizedcache.clear()
|
||||
|
||||
with mock.patch('awx.conf.settings.SettingsWrapper._get_local', return_value='DEFAULT') as mock_get:
|
||||
assert settings.AWX_VAR == 'DEFAULT'
|
||||
mock_get.assert_called_once_with('AWX_VAR')
|
||||
|
||||
with mock.patch.object(settings, '_get_local') as mock_get:
|
||||
assert settings.AWX_VAR == 'DEFAULT'
|
||||
mock_get.assert_not_called()
|
||||
|
||||
@@ -6,8 +6,9 @@ import time
|
||||
import traceback
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.timezone import now as tz_now
|
||||
from django.db import DatabaseError, OperationalError, connection as django_connection
|
||||
from django.db import DatabaseError, OperationalError, transaction, connection as django_connection
|
||||
from django.db.utils import InterfaceError, InternalError
|
||||
from django_guid import set_guid
|
||||
|
||||
@@ -16,8 +17,8 @@ import psutil
|
||||
import redis
|
||||
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
from awx.main.models import JobEvent, AdHocCommandEvent, ProjectUpdateEvent, InventoryUpdateEvent, SystemJobEvent, UnifiedJob, Job
|
||||
from awx.main.tasks.system import handle_success_and_failure_notifications
|
||||
from awx.main.models import JobEvent, AdHocCommandEvent, ProjectUpdateEvent, InventoryUpdateEvent, SystemJobEvent, UnifiedJob
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.models.events import emit_event_detail
|
||||
from awx.main.utils.profiling import AWXProfiler
|
||||
import awx.main.analytics.subsystem_metrics as s_metrics
|
||||
@@ -26,6 +27,32 @@ from .base import BaseWorker
|
||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||
|
||||
|
||||
def job_stats_wrapup(job_identifier, event=None):
|
||||
"""Fill in the unified job host_status_counts, fire off notifications if needed"""
|
||||
try:
|
||||
# empty dict (versus default of None) can still indicate that events have been processed
|
||||
# for job types like system jobs, and jobs with no hosts matched
|
||||
host_status_counts = {}
|
||||
if event:
|
||||
host_status_counts = event.get_host_status_counts()
|
||||
|
||||
# Update host_status_counts while holding the row lock
|
||||
with transaction.atomic():
|
||||
uj = UnifiedJob.objects.select_for_update().get(pk=job_identifier)
|
||||
uj.host_status_counts = host_status_counts
|
||||
uj.save(update_fields=['host_status_counts'])
|
||||
|
||||
uj.log_lifecycle("stats_wrapup_finished")
|
||||
|
||||
# If the status was a finished state before this update was made, send notifications
|
||||
# If not, we will send notifications when the status changes
|
||||
if uj.status not in ACTIVE_STATES:
|
||||
uj.send_notification_templates('succeeded' if uj.status == 'successful' else 'failed')
|
||||
|
||||
except Exception:
|
||||
logger.exception('Worker failed to save stats or emit notifications: Job {}'.format(job_identifier))
|
||||
|
||||
|
||||
class CallbackBrokerWorker(BaseWorker):
|
||||
"""
|
||||
A worker implementation that deserializes callback event data and persists
|
||||
@@ -44,7 +71,6 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
|
||||
def __init__(self):
|
||||
self.buff = {}
|
||||
self.pid = os.getpid()
|
||||
self.redis = redis.Redis.from_url(settings.BROKER_URL)
|
||||
self.subsystem_metrics = s_metrics.Metrics(auto_pipe_execute=False)
|
||||
self.queue_pop = 0
|
||||
@@ -53,6 +79,11 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
for key in self.redis.keys('awx_callback_receiver_statistics_*'):
|
||||
self.redis.delete(key)
|
||||
|
||||
@cached_property
|
||||
def pid(self):
|
||||
"""This needs to be obtained after forking, or else it will give the parent process"""
|
||||
return os.getpid()
|
||||
|
||||
def read(self, queue):
|
||||
try:
|
||||
res = self.redis.blpop(self.queue_name, timeout=1)
|
||||
@@ -146,6 +177,8 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
if not getattr(e, '_skip_websocket_message', False):
|
||||
metrics_events_broadcast += 1
|
||||
emit_event_detail(e)
|
||||
if getattr(e, '_notification_trigger_event', False):
|
||||
job_stats_wrapup(getattr(e, e.JOB_REFERENCE), event=e)
|
||||
self.buff = {}
|
||||
self.last_flush = time.time()
|
||||
# only update metrics if we saved events
|
||||
@@ -165,47 +198,32 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
if flush:
|
||||
self.last_event = ''
|
||||
if not flush:
|
||||
event_map = {
|
||||
'job_id': JobEvent,
|
||||
'ad_hoc_command_id': AdHocCommandEvent,
|
||||
'project_update_id': ProjectUpdateEvent,
|
||||
'inventory_update_id': InventoryUpdateEvent,
|
||||
'system_job_id': SystemJobEvent,
|
||||
}
|
||||
|
||||
job_identifier = 'unknown job'
|
||||
for key, cls in event_map.items():
|
||||
if key in body:
|
||||
job_identifier = body[key]
|
||||
for cls in (JobEvent, AdHocCommandEvent, ProjectUpdateEvent, InventoryUpdateEvent, SystemJobEvent):
|
||||
if cls.JOB_REFERENCE in body:
|
||||
job_identifier = body[cls.JOB_REFERENCE]
|
||||
break
|
||||
|
||||
self.last_event = f'\n\t- {cls.__name__} for #{job_identifier} ({body.get("event", "")} {body.get("uuid", "")})' # noqa
|
||||
|
||||
notification_trigger_event = bool(body.get('event') == cls.WRAPUP_EVENT)
|
||||
|
||||
if body.get('event') == 'EOF':
|
||||
try:
|
||||
if 'guid' in body:
|
||||
set_guid(body['guid'])
|
||||
final_counter = body.get('final_counter', 0)
|
||||
logger.info('Event processing is finished for Job {}, sending notifications'.format(job_identifier))
|
||||
logger.info('Starting EOF event processing for Job {}'.format(job_identifier))
|
||||
# EOF events are sent when stdout for the running task is
|
||||
# closed. don't actually persist them to the database; we
|
||||
# just use them to report `summary` websocket events as an
|
||||
# approximation for when a job is "done"
|
||||
emit_channel_notification('jobs-summary', dict(group_name='jobs', unified_job_id=job_identifier, final_counter=final_counter))
|
||||
# Additionally, when we've processed all events, we should
|
||||
# have all the data we need to send out success/failure
|
||||
# notification templates
|
||||
uj = UnifiedJob.objects.get(pk=job_identifier)
|
||||
|
||||
if isinstance(uj, Job):
|
||||
# *actual playbooks* send their success/failure
|
||||
# notifications in response to the playbook_on_stats
|
||||
# event handling code in main.models.events
|
||||
pass
|
||||
elif hasattr(uj, 'send_notification_templates'):
|
||||
handle_success_and_failure_notifications.apply_async([uj.id])
|
||||
if notification_trigger_event:
|
||||
job_stats_wrapup(job_identifier)
|
||||
except Exception:
|
||||
logger.exception('Worker failed to emit notifications: Job {}'.format(job_identifier))
|
||||
logger.exception('Worker failed to perform EOF tasks: Job {}'.format(job_identifier))
|
||||
finally:
|
||||
self.subsystem_metrics.inc('callback_receiver_events_in_memory', -1)
|
||||
set_guid('')
|
||||
@@ -215,9 +233,12 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
|
||||
event = cls.create_from_data(**body)
|
||||
|
||||
if skip_websocket_message:
|
||||
if skip_websocket_message: # if this event sends websocket messages, fire them off on flush
|
||||
event._skip_websocket_message = True
|
||||
|
||||
if notification_trigger_event: # if this is an Ansible stats event, ensure notifications on flush
|
||||
event._notification_trigger_event = True
|
||||
|
||||
self.buff.setdefault(cls, []).append(event)
|
||||
|
||||
retries = 0
|
||||
|
||||
@@ -103,7 +103,7 @@ class DeleteMeta:
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
query = "SELECT inhrelid::regclass::text AS child FROM pg_catalog.pg_inherits"
|
||||
query += f" WHERE inhparent = 'public.{tbl_name}'::regclass"
|
||||
query += f" WHERE inhparent = '{tbl_name}'::regclass"
|
||||
query += f" AND TO_TIMESTAMP(LTRIM(inhrelid::regclass::text, '{tbl_name}_'), 'YYYYMMDD_HH24') < '{self.cutoff}'"
|
||||
query += " ORDER BY inhrelid::regclass::text"
|
||||
|
||||
|
||||
@@ -26,6 +26,17 @@ logger = logging.getLogger('awx.main.middleware')
|
||||
perf_logger = logging.getLogger('awx.analytics.performance')
|
||||
|
||||
|
||||
class SettingsCacheMiddleware(MiddlewareMixin):
|
||||
"""
|
||||
Clears the in-memory settings cache at the beginning of a request.
|
||||
We do this so that a script can POST to /api/v2/settings/all/ and then
|
||||
right away GET /api/v2/settings/all/ and see the updated value.
|
||||
"""
|
||||
|
||||
def process_request(self, request):
|
||||
settings._awx_conf_memoizedcache.clear()
|
||||
|
||||
|
||||
class TimingMiddleware(threading.local, MiddlewareMixin):
|
||||
|
||||
dest = '/var/log/tower/profile'
|
||||
|
||||
18
awx/main/migrations/0160_alter_schedule_rrule.py
Normal file
18
awx/main/migrations/0160_alter_schedule_rrule.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.12 on 2022-04-18 21:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0159_deprecate_inventory_source_UoPU_field'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='schedule',
|
||||
name='rrule',
|
||||
field=models.TextField(help_text='A value representing the schedules iCal recurrence rule.'),
|
||||
),
|
||||
]
|
||||
18
awx/main/migrations/0161_unifiedjob_host_status_counts.py
Normal file
18
awx/main/migrations/0161_unifiedjob_host_status_counts.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.12 on 2022-04-27 02:16
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0160_alter_schedule_rrule'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='host_status_counts',
|
||||
field=models.JSONField(blank=True, default=None, editable=False, help_text='Playbook stats from the Ansible playbook_on_stats event.', null=True),
|
||||
),
|
||||
]
|
||||
18
awx/main/migrations/0162_alter_unifiedjob_dependent_jobs.py
Normal file
18
awx/main/migrations/0162_alter_unifiedjob_dependent_jobs.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.13 on 2022-05-02 21:27
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0161_unifiedjob_host_status_counts'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='dependent_jobs',
|
||||
field=models.ManyToManyField(editable=False, related_name='unifiedjob_blocked_jobs', to='main.UnifiedJob'),
|
||||
),
|
||||
]
|
||||
@@ -6,7 +6,7 @@ from collections import defaultdict
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import models, DatabaseError, connection
|
||||
from django.db import models, DatabaseError
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.text import Truncator
|
||||
from django.utils.timezone import utc, now
|
||||
@@ -126,6 +126,7 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
'host_name',
|
||||
'verbosity',
|
||||
]
|
||||
WRAPUP_EVENT = 'playbook_on_stats'
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
@@ -384,14 +385,6 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
job.get_event_queryset().filter(uuid__in=changed).update(changed=True)
|
||||
job.get_event_queryset().filter(uuid__in=failed).update(failed=True)
|
||||
|
||||
# send success/failure notifications when we've finished handling the playbook_on_stats event
|
||||
from awx.main.tasks.system import handle_success_and_failure_notifications # circular import
|
||||
|
||||
def _send_notifications():
|
||||
handle_success_and_failure_notifications.apply_async([job.id])
|
||||
|
||||
connection.on_commit(_send_notifications)
|
||||
|
||||
for field in ('playbook', 'play', 'task', 'role'):
|
||||
value = force_str(event_data.get(field, '')).strip()
|
||||
if value != getattr(self, field):
|
||||
@@ -470,6 +463,7 @@ class JobEvent(BasePlaybookEvent):
|
||||
"""
|
||||
|
||||
VALID_KEYS = BasePlaybookEvent.VALID_KEYS + ['job_id', 'workflow_job_id', 'job_created']
|
||||
JOB_REFERENCE = 'job_id'
|
||||
|
||||
objects = DeferJobCreatedManager()
|
||||
|
||||
@@ -600,6 +594,7 @@ UnpartitionedJobEvent._meta.db_table = '_unpartitioned_' + JobEvent._meta.db_tab
|
||||
class ProjectUpdateEvent(BasePlaybookEvent):
|
||||
|
||||
VALID_KEYS = BasePlaybookEvent.VALID_KEYS + ['project_update_id', 'workflow_job_id', 'job_created']
|
||||
JOB_REFERENCE = 'project_update_id'
|
||||
|
||||
objects = DeferJobCreatedManager()
|
||||
|
||||
@@ -641,6 +636,7 @@ class BaseCommandEvent(CreatedModifiedModel):
|
||||
"""
|
||||
|
||||
VALID_KEYS = ['event_data', 'created', 'counter', 'uuid', 'stdout', 'start_line', 'end_line', 'verbosity']
|
||||
WRAPUP_EVENT = 'EOF'
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
@@ -736,6 +732,8 @@ class BaseCommandEvent(CreatedModifiedModel):
|
||||
class AdHocCommandEvent(BaseCommandEvent):
|
||||
|
||||
VALID_KEYS = BaseCommandEvent.VALID_KEYS + ['ad_hoc_command_id', 'event', 'host_name', 'host_id', 'workflow_job_id', 'job_created']
|
||||
WRAPUP_EVENT = 'playbook_on_stats' # exception to BaseCommandEvent
|
||||
JOB_REFERENCE = 'ad_hoc_command_id'
|
||||
|
||||
objects = DeferJobCreatedManager()
|
||||
|
||||
@@ -836,6 +834,7 @@ UnpartitionedAdHocCommandEvent._meta.db_table = '_unpartitioned_' + AdHocCommand
|
||||
class InventoryUpdateEvent(BaseCommandEvent):
|
||||
|
||||
VALID_KEYS = BaseCommandEvent.VALID_KEYS + ['inventory_update_id', 'workflow_job_id', 'job_created']
|
||||
JOB_REFERENCE = 'inventory_update_id'
|
||||
|
||||
objects = DeferJobCreatedManager()
|
||||
|
||||
@@ -881,6 +880,7 @@ UnpartitionedInventoryUpdateEvent._meta.db_table = '_unpartitioned_' + Inventory
|
||||
class SystemJobEvent(BaseCommandEvent):
|
||||
|
||||
VALID_KEYS = BaseCommandEvent.VALID_KEYS + ['system_job_id', 'job_created']
|
||||
JOB_REFERENCE = 'system_job_id'
|
||||
|
||||
objects = DeferJobCreatedManager()
|
||||
|
||||
|
||||
@@ -407,41 +407,54 @@ class TaskManagerUnifiedJobMixin(models.Model):
|
||||
def get_jobs_fail_chain(self):
|
||||
return []
|
||||
|
||||
def dependent_jobs_finished(self):
|
||||
return True
|
||||
|
||||
|
||||
class TaskManagerJobMixin(TaskManagerUnifiedJobMixin):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def get_jobs_fail_chain(self):
|
||||
return [self.project_update] if self.project_update else []
|
||||
|
||||
def dependent_jobs_finished(self):
|
||||
for j in self.dependent_jobs.all():
|
||||
if j.status in ['pending', 'waiting', 'running']:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class TaskManagerUpdateOnLaunchMixin(TaskManagerUnifiedJobMixin):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def get_jobs_fail_chain(self):
|
||||
return list(self.dependent_jobs.all())
|
||||
|
||||
|
||||
class TaskManagerProjectUpdateMixin(TaskManagerUpdateOnLaunchMixin):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def get_jobs_fail_chain(self):
|
||||
# project update can be a dependency of an inventory update, in which
|
||||
# case we need to fail the job that may have spawned the inventory
|
||||
# update.
|
||||
# The inventory update will fail, but since it is not running it will
|
||||
# not cascade fail to the job from the errback logic in apply_async. As
|
||||
# such we should capture it here.
|
||||
blocked_jobs = list(self.unifiedjob_blocked_jobs.all().prefetch_related("unifiedjob_blocked_jobs"))
|
||||
other_tasks = []
|
||||
for b in blocked_jobs:
|
||||
other_tasks += list(b.unifiedjob_blocked_jobs.all())
|
||||
return blocked_jobs + other_tasks
|
||||
|
||||
|
||||
class TaskManagerInventoryUpdateMixin(TaskManagerUpdateOnLaunchMixin):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def get_jobs_fail_chain(self):
|
||||
blocked_jobs = list(self.unifiedjob_blocked_jobs.all())
|
||||
other_updates = []
|
||||
if blocked_jobs:
|
||||
# blocked_jobs[0] is just a reference to a job that depends on this
|
||||
# inventory update.
|
||||
# We can look at the dependencies of this blocked job to find other
|
||||
# inventory sources that are safe to fail.
|
||||
# Since the dependencies could also include project updates,
|
||||
# we need to check for type.
|
||||
for dep in blocked_jobs[0].dependent_jobs.all():
|
||||
if type(dep) is type(self) and dep.id != self.id:
|
||||
other_updates.append(dep)
|
||||
return blocked_jobs + other_updates
|
||||
|
||||
|
||||
class ExecutionEnvironmentMixin(models.Model):
|
||||
class Meta:
|
||||
|
||||
@@ -421,21 +421,8 @@ class JobNotificationMixin(object):
|
||||
The context will contain allowed content retrieved from a serialized job object
|
||||
(see JobNotificationMixin.JOB_FIELDS_ALLOWED_LIST the job's friendly name,
|
||||
and a url to the job run."""
|
||||
job_context = {'host_status_counts': {}}
|
||||
summary = None
|
||||
try:
|
||||
has_event_property = any([f for f in self.event_class._meta.fields if f.name == 'event'])
|
||||
except NotImplementedError:
|
||||
has_event_property = False
|
||||
if has_event_property:
|
||||
qs = self.get_event_queryset()
|
||||
if qs:
|
||||
event = qs.only('event_data').filter(event='playbook_on_stats').first()
|
||||
if event:
|
||||
summary = event.get_host_status_counts()
|
||||
job_context['host_status_counts'] = summary
|
||||
context = {
|
||||
'job': job_context,
|
||||
'job': {'host_status_counts': self.host_status_counts},
|
||||
'job_friendly_name': self.get_notification_friendly_name(),
|
||||
'url': self.get_ui_url(),
|
||||
'job_metadata': json.dumps(self.notification_data(), ensure_ascii=False, indent=4),
|
||||
|
||||
@@ -81,7 +81,7 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
|
||||
dtend = models.DateTimeField(
|
||||
null=True, default=None, editable=False, help_text=_("The last occurrence of the schedule occurs before this time, aftewards the schedule expires.")
|
||||
)
|
||||
rrule = models.CharField(max_length=255, help_text=_("A value representing the schedules iCal recurrence rule."))
|
||||
rrule = models.TextField(help_text=_("A value representing the schedules iCal recurrence rule."))
|
||||
next_run = models.DateTimeField(null=True, default=None, editable=False, help_text=_("The next time that the scheduled action will run."))
|
||||
|
||||
@classmethod
|
||||
@@ -91,22 +91,22 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
|
||||
@property
|
||||
def timezone(self):
|
||||
utc = tzutc()
|
||||
# All rules in a ruleset will have the same dtstart so we can just take the first rule
|
||||
tzinfo = Schedule.rrulestr(self.rrule)._rrule[0]._dtstart.tzinfo
|
||||
if tzinfo is utc:
|
||||
return 'UTC'
|
||||
all_zones = Schedule.get_zoneinfo()
|
||||
all_zones.sort(key=lambda x: -len(x))
|
||||
for r in Schedule.rrulestr(self.rrule)._rrule:
|
||||
if r._dtstart:
|
||||
tzinfo = r._dtstart.tzinfo
|
||||
if tzinfo is utc:
|
||||
return 'UTC'
|
||||
fname = getattr(tzinfo, '_filename', None)
|
||||
if fname:
|
||||
for zone in all_zones:
|
||||
if fname.endswith(zone):
|
||||
return zone
|
||||
fname = getattr(tzinfo, '_filename', None)
|
||||
if fname:
|
||||
for zone in all_zones:
|
||||
if fname.endswith(zone):
|
||||
return zone
|
||||
logger.warning('Could not detect valid zoneinfo for {}'.format(self.rrule))
|
||||
return ''
|
||||
|
||||
@property
|
||||
# TODO: How would we handle multiple until parameters? The UI is currently using this on the edit screen of a schedule
|
||||
def until(self):
|
||||
# The UNTIL= datestamp (if any) coerced from UTC to the local naive time
|
||||
# of the DTSTART
|
||||
@@ -134,34 +134,48 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
|
||||
# timezone (America/New_York), and so we'll coerce to UTC _for you_
|
||||
# automatically.
|
||||
#
|
||||
if 'until=' in rrule.lower():
|
||||
# if DTSTART;TZID= is used, coerce "naive" UNTIL values
|
||||
# to the proper UTC date
|
||||
match_until = re.match(r".*?(?P<until>UNTIL\=[0-9]+T[0-9]+)(?P<utcflag>Z?)", rrule)
|
||||
if not len(match_until.group('utcflag')):
|
||||
# rrule = DTSTART;TZID=America/New_York:20200601T120000 RRULE:...;UNTIL=20200601T170000
|
||||
|
||||
# Find the UNTIL=N part of the string
|
||||
# naive_until = UNTIL=20200601T170000
|
||||
naive_until = match_until.group('until')
|
||||
# Find the DTSTART rule or raise an error, its usually the first rule but that is not strictly enforced
|
||||
start_date_rule = re.sub('^.*(DTSTART[^\s]+)\s.*$', r'\1', rrule)
|
||||
if not start_date_rule:
|
||||
raise ValueError('A DTSTART field needs to be in the rrule')
|
||||
|
||||
# What is the DTSTART timezone for:
|
||||
# DTSTART;TZID=America/New_York:20200601T120000 RRULE:...;UNTIL=20200601T170000Z
|
||||
# local_tz = tzfile('/usr/share/zoneinfo/America/New_York')
|
||||
local_tz = dateutil.rrule.rrulestr(rrule.replace(naive_until, naive_until + 'Z'), tzinfos=UTC_TIMEZONES)._dtstart.tzinfo
|
||||
rules = re.split(r'\s+', rrule)
|
||||
for index in range(0, len(rules)):
|
||||
rule = rules[index]
|
||||
if 'until=' in rule.lower():
|
||||
# if DTSTART;TZID= is used, coerce "naive" UNTIL values
|
||||
# to the proper UTC date
|
||||
match_until = re.match(r".*?(?P<until>UNTIL\=[0-9]+T[0-9]+)(?P<utcflag>Z?)", rule)
|
||||
if not len(match_until.group('utcflag')):
|
||||
# rule = DTSTART;TZID=America/New_York:20200601T120000 RRULE:...;UNTIL=20200601T170000
|
||||
|
||||
# Make a datetime object with tzinfo=<the DTSTART timezone>
|
||||
# localized_until = datetime.datetime(2020, 6, 1, 17, 0, tzinfo=tzfile('/usr/share/zoneinfo/America/New_York'))
|
||||
localized_until = make_aware(datetime.datetime.strptime(re.sub('^UNTIL=', '', naive_until), "%Y%m%dT%H%M%S"), local_tz)
|
||||
# Find the UNTIL=N part of the string
|
||||
# naive_until = UNTIL=20200601T170000
|
||||
naive_until = match_until.group('until')
|
||||
|
||||
# Coerce the datetime to UTC and format it as a string w/ Zulu format
|
||||
# utc_until = UNTIL=20200601T220000Z
|
||||
utc_until = 'UNTIL=' + localized_until.astimezone(pytz.utc).strftime('%Y%m%dT%H%M%SZ')
|
||||
# What is the DTSTART timezone for:
|
||||
# DTSTART;TZID=America/New_York:20200601T120000 RRULE:...;UNTIL=20200601T170000Z
|
||||
# local_tz = tzfile('/usr/share/zoneinfo/America/New_York')
|
||||
# We are going to construct a 'dummy' rule for parsing which will include the DTSTART and the rest of the rule
|
||||
temp_rule = "{} {}".format(start_date_rule, rule.replace(naive_until, naive_until + 'Z'))
|
||||
# If the rule is an EX rule we have to add an RRULE to it because an EX rule alone will not manifest into a ruleset
|
||||
if rule.lower().startswith('ex'):
|
||||
temp_rule = "{} {}".format(temp_rule, 'RRULE:FREQ=MINUTELY;INTERVAL=1;UNTIL=20380601T170000Z')
|
||||
local_tz = dateutil.rrule.rrulestr(temp_rule, tzinfos=UTC_TIMEZONES, **{'forceset': True})._rrule[0]._dtstart.tzinfo
|
||||
|
||||
# rrule was: DTSTART;TZID=America/New_York:20200601T120000 RRULE:...;UNTIL=20200601T170000
|
||||
# rrule is now: DTSTART;TZID=America/New_York:20200601T120000 RRULE:...;UNTIL=20200601T220000Z
|
||||
rrule = rrule.replace(naive_until, utc_until)
|
||||
return rrule
|
||||
# Make a datetime object with tzinfo=<the DTSTART timezone>
|
||||
# localized_until = datetime.datetime(2020, 6, 1, 17, 0, tzinfo=tzfile('/usr/share/zoneinfo/America/New_York'))
|
||||
localized_until = make_aware(datetime.datetime.strptime(re.sub('^UNTIL=', '', naive_until), "%Y%m%dT%H%M%S"), local_tz)
|
||||
|
||||
# Coerce the datetime to UTC and format it as a string w/ Zulu format
|
||||
# utc_until = UNTIL=20200601T220000Z
|
||||
utc_until = 'UNTIL=' + localized_until.astimezone(pytz.utc).strftime('%Y%m%dT%H%M%SZ')
|
||||
|
||||
# rule was: DTSTART;TZID=America/New_York:20200601T120000 RRULE:...;UNTIL=20200601T170000
|
||||
# rule is now: DTSTART;TZID=America/New_York:20200601T120000 RRULE:...;UNTIL=20200601T220000Z
|
||||
rules[index] = rule.replace(naive_until, utc_until)
|
||||
return " ".join(rules)
|
||||
|
||||
@classmethod
|
||||
def rrulestr(cls, rrule, fast_forward=True, **kwargs):
|
||||
@@ -176,20 +190,28 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
|
||||
if r._dtstart and r._dtstart.tzinfo is None:
|
||||
raise ValueError('A valid TZID must be provided (e.g., America/New_York)')
|
||||
|
||||
if fast_forward and ('MINUTELY' in rrule or 'HOURLY' in rrule) and 'COUNT=' not in rrule:
|
||||
# Fast forward is a way for us to limit the number of events in the rruleset
|
||||
# If we are fastforwading and we don't have a count limited rule that is minutely or hourley
|
||||
# We will modify the start date of the rule to last week to prevent a large number of entries
|
||||
if fast_forward:
|
||||
try:
|
||||
# All rules in a ruleset will have the same dtstart value
|
||||
# so lets compare the first event to now to see if its > 7 days old
|
||||
first_event = x[0]
|
||||
# If the first event was over a week ago...
|
||||
if (now() - first_event).days > 7:
|
||||
# hourly/minutely rrules with far-past DTSTART values
|
||||
# are *really* slow to precompute
|
||||
# start *from* one week ago to speed things up drastically
|
||||
dtstart = x._rrule[0]._dtstart.strftime(':%Y%m%dT')
|
||||
new_start = (now() - datetime.timedelta(days=7)).strftime(':%Y%m%dT')
|
||||
new_rrule = rrule.replace(dtstart, new_start)
|
||||
return Schedule.rrulestr(new_rrule, fast_forward=False)
|
||||
for rule in x._rrule:
|
||||
# If any rule has a minutely or hourly rule without a count...
|
||||
if rule._freq in [dateutil.rrule.MINUTELY, dateutil.rrule.HOURLY] and not rule._count:
|
||||
# hourly/minutely rrules with far-past DTSTART values
|
||||
# are *really* slow to precompute
|
||||
# start *from* one week ago to speed things up drastically
|
||||
new_start = (now() - datetime.timedelta(days=7)).strftime('%Y%m%d')
|
||||
# Now we want to repalce the DTSTART:<value>T with the new date (which includes the T)
|
||||
new_rrule = re.sub('(DTSTART[^:]*):[^T]+T', r'\1:{0}T'.format(new_start), rrule)
|
||||
return Schedule.rrulestr(new_rrule, fast_forward=False)
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
return x
|
||||
|
||||
def __str__(self):
|
||||
@@ -206,6 +228,22 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
|
||||
job_kwargs['_eager_fields'] = {'launch_type': 'scheduled', 'schedule': self}
|
||||
return job_kwargs
|
||||
|
||||
def get_end_date(ruleset):
|
||||
# if we have a complex ruleset with a lot of options getting the last index of the ruleset can take some time
|
||||
# And a ruleset without a count/until can come back as datetime.datetime(9999, 12, 31, 15, 0, tzinfo=tzfile('US/Eastern'))
|
||||
# So we are going to do a quick scan to make sure we would have an end date
|
||||
for a_rule in ruleset._rrule:
|
||||
# if this rule does not have until or count in it then we have no end date
|
||||
if not a_rule._until and not a_rule._count:
|
||||
return None
|
||||
|
||||
# If we made it this far we should have an end date and can ask the ruleset what the last date is
|
||||
# However, if the until/count is before dtstart we will get an IndexError when trying to get [-1]
|
||||
try:
|
||||
return ruleset[-1].astimezone(pytz.utc)
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
def update_computed_fields_no_save(self):
|
||||
affects_fields = ['next_run', 'dtstart', 'dtend']
|
||||
starting_values = {}
|
||||
@@ -229,12 +267,7 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
|
||||
self.dtstart = future_rs[0].astimezone(pytz.utc)
|
||||
except IndexError:
|
||||
self.dtstart = None
|
||||
self.dtend = None
|
||||
if 'until' in self.rrule.lower() or 'count' in self.rrule.lower():
|
||||
try:
|
||||
self.dtend = future_rs[-1].astimezone(pytz.utc)
|
||||
except IndexError:
|
||||
self.dtend = None
|
||||
self.dtend = Schedule.get_end_date(future_rs)
|
||||
|
||||
changed = any(getattr(self, field_name) != starting_values[field_name] for field_name in affects_fields)
|
||||
return changed
|
||||
|
||||
@@ -575,7 +575,8 @@ class UnifiedJob(
|
||||
dependent_jobs = models.ManyToManyField(
|
||||
'self',
|
||||
editable=False,
|
||||
related_name='%(class)s_blocked_jobs+',
|
||||
related_name='%(class)s_blocked_jobs',
|
||||
symmetrical=False,
|
||||
)
|
||||
execution_node = models.TextField(
|
||||
blank=True,
|
||||
@@ -717,6 +718,13 @@ class UnifiedJob(
|
||||
editable=False,
|
||||
help_text=_("The version of Ansible Core installed in the execution environment."),
|
||||
)
|
||||
host_status_counts = models.JSONField(
|
||||
blank=True,
|
||||
null=True,
|
||||
default=None,
|
||||
editable=False,
|
||||
help_text=_("Playbook stats from the Ansible playbook_on_stats event."),
|
||||
)
|
||||
work_unit_id = models.CharField(
|
||||
max_length=255, blank=True, default=None, editable=False, null=True, help_text=_("The Receptor work unit ID associated with this job.")
|
||||
)
|
||||
|
||||
@@ -26,7 +26,7 @@ class DependencyGraph(object):
|
||||
# The reason for tracking both inventory and inventory sources:
|
||||
# Consider InvA, which has two sources, InvSource1, InvSource2.
|
||||
# JobB might depend on InvA, which launches two updates, one for each source.
|
||||
# To determine if JobB can run, we can just check InvA, which is marked in
|
||||
# To determine if JobB can run, we can just check InvA, which is marked in
|
||||
# INVENTORY_UPDATES, instead of having to check for both entries in
|
||||
# INVENTORY_SOURCE_UPDATES.
|
||||
self.data[self.INVENTORY_UPDATES] = {}
|
||||
|
||||
@@ -34,6 +34,7 @@ from awx.main.utils.pglock import advisory_lock
|
||||
from awx.main.utils import get_type_for_model, task_manager_bulk_reschedule, schedule_task_manager
|
||||
from awx.main.utils.common import create_partition
|
||||
from awx.main.signals import disable_activity_stream
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.scheduler.dependency_graph import DependencyGraph
|
||||
from awx.main.scheduler.task_manager_models import TaskManagerInstances
|
||||
from awx.main.scheduler.task_manager_models import TaskManagerInstanceGroups
|
||||
@@ -79,10 +80,23 @@ class TaskManager:
|
||||
if blocked_by:
|
||||
return blocked_by
|
||||
|
||||
if not task.dependent_jobs_finished():
|
||||
blocked_by = task.dependent_jobs.first()
|
||||
if blocked_by:
|
||||
return blocked_by
|
||||
for dep in task.dependent_jobs.all():
|
||||
if dep.status in ACTIVE_STATES:
|
||||
return dep
|
||||
# if we detect a failed or error dependency, go ahead and fail this
|
||||
# task. The errback on the dependency takes some time to trigger,
|
||||
# and we don't want the task to enter running state if its
|
||||
# dependency has failed or errored.
|
||||
elif dep.status in ("error", "failed"):
|
||||
task.status = 'failed'
|
||||
task.job_explanation = 'Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % (
|
||||
get_type_for_model(type(dep)),
|
||||
dep.name,
|
||||
dep.id,
|
||||
)
|
||||
task.save(update_fields=['status', 'job_explanation'])
|
||||
task.websocket_emit_status('failed')
|
||||
return dep
|
||||
|
||||
return None
|
||||
|
||||
@@ -281,8 +295,10 @@ class TaskManager:
|
||||
for task in running_tasks:
|
||||
self.dependency_graph.add_job(task)
|
||||
|
||||
def create_project_update(self, task):
|
||||
project_task = Project.objects.get(id=task.project_id).create_project_update(_eager_fields=dict(launch_type='dependency'))
|
||||
def create_project_update(self, task, project_id=None):
|
||||
if project_id is None:
|
||||
project_id = task.project_id
|
||||
project_task = Project.objects.get(id=project_id).create_project_update(_eager_fields=dict(launch_type='dependency'))
|
||||
|
||||
# Project created 1 seconds behind
|
||||
project_task.created = task.created - timedelta(seconds=1)
|
||||
@@ -302,14 +318,10 @@ class TaskManager:
|
||||
# self.process_inventory_sources(inventory_sources)
|
||||
return inventory_task
|
||||
|
||||
def capture_chain_failure_dependencies(self, task, dependencies):
|
||||
def add_dependencies(self, task, dependencies):
|
||||
with disable_activity_stream():
|
||||
task.dependent_jobs.add(*dependencies)
|
||||
|
||||
for dep in dependencies:
|
||||
# Add task + all deps except self
|
||||
dep.dependent_jobs.add(*([task] + [d for d in dependencies if d != dep]))
|
||||
|
||||
def get_latest_inventory_update(self, inventory_source):
|
||||
latest_inventory_update = InventoryUpdate.objects.filter(inventory_source=inventory_source).order_by("-created")
|
||||
if not latest_inventory_update.exists():
|
||||
@@ -335,8 +347,8 @@ class TaskManager:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_latest_project_update(self, job):
|
||||
latest_project_update = ProjectUpdate.objects.filter(project=job.project, job_type='check').order_by("-created")
|
||||
def get_latest_project_update(self, project_id):
|
||||
latest_project_update = ProjectUpdate.objects.filter(project=project_id, job_type='check').order_by("-created")
|
||||
if not latest_project_update.exists():
|
||||
return None
|
||||
return latest_project_update.first()
|
||||
@@ -376,45 +388,69 @@ class TaskManager:
|
||||
return True
|
||||
return False
|
||||
|
||||
def gen_dep_for_job(self, task):
|
||||
created_dependencies = []
|
||||
dependencies = []
|
||||
# TODO: Can remove task.project None check after scan-job-default-playbook is removed
|
||||
if task.project is not None and task.project.scm_update_on_launch is True:
|
||||
latest_project_update = self.get_latest_project_update(task.project_id)
|
||||
if self.should_update_related_project(task, latest_project_update):
|
||||
latest_project_update = self.create_project_update(task)
|
||||
created_dependencies.append(latest_project_update)
|
||||
dependencies.append(latest_project_update)
|
||||
|
||||
# Inventory created 2 seconds behind job
|
||||
try:
|
||||
start_args = json.loads(decrypt_field(task, field_name="start_args"))
|
||||
except ValueError:
|
||||
start_args = dict()
|
||||
# generator for inventory sources related to this task
|
||||
task_inv_sources = (invsrc for invsrc in self.all_inventory_sources if invsrc.inventory_id == task.inventory_id)
|
||||
for inventory_source in task_inv_sources:
|
||||
if "inventory_sources_already_updated" in start_args and inventory_source.id in start_args['inventory_sources_already_updated']:
|
||||
continue
|
||||
if not inventory_source.update_on_launch:
|
||||
continue
|
||||
latest_inventory_update = self.get_latest_inventory_update(inventory_source)
|
||||
if self.should_update_inventory_source(task, latest_inventory_update):
|
||||
inventory_task = self.create_inventory_update(task, inventory_source)
|
||||
created_dependencies.append(inventory_task)
|
||||
dependencies.append(inventory_task)
|
||||
else:
|
||||
dependencies.append(latest_inventory_update)
|
||||
|
||||
if dependencies:
|
||||
self.add_dependencies(task, dependencies)
|
||||
|
||||
return created_dependencies
|
||||
|
||||
def gen_dep_for_inventory_update(self, inventory_task):
|
||||
created_dependencies = []
|
||||
if inventory_task.source == "scm":
|
||||
invsrc = inventory_task.inventory_source
|
||||
if not invsrc.source_project.scm_update_on_launch:
|
||||
return created_dependencies
|
||||
|
||||
latest_src_project_update = self.get_latest_project_update(invsrc.source_project_id)
|
||||
if self.should_update_related_project(inventory_task, latest_src_project_update):
|
||||
latest_src_project_update = self.create_project_update(inventory_task, project_id=invsrc.source_project_id)
|
||||
created_dependencies.append(latest_src_project_update)
|
||||
self.add_dependencies(inventory_task, [latest_src_project_update])
|
||||
latest_src_project_update.scm_inventory_updates.add(inventory_task)
|
||||
return created_dependencies
|
||||
|
||||
def generate_dependencies(self, undeped_tasks):
|
||||
created_dependencies = []
|
||||
for task in undeped_tasks:
|
||||
task.log_lifecycle("acknowledged")
|
||||
dependencies = []
|
||||
if not type(task) is Job:
|
||||
if type(task) is Job:
|
||||
created_dependencies += self.gen_dep_for_job(task)
|
||||
elif type(task) is InventoryUpdate:
|
||||
created_dependencies += self.gen_dep_for_inventory_update(task)
|
||||
else:
|
||||
continue
|
||||
# TODO: Can remove task.project None check after scan-job-default-playbook is removed
|
||||
if task.project is not None and task.project.scm_update_on_launch is True:
|
||||
latest_project_update = self.get_latest_project_update(task)
|
||||
if self.should_update_related_project(task, latest_project_update):
|
||||
project_task = self.create_project_update(task)
|
||||
created_dependencies.append(project_task)
|
||||
dependencies.append(project_task)
|
||||
else:
|
||||
dependencies.append(latest_project_update)
|
||||
|
||||
# Inventory created 2 seconds behind job
|
||||
try:
|
||||
start_args = json.loads(decrypt_field(task, field_name="start_args"))
|
||||
except ValueError:
|
||||
start_args = dict()
|
||||
for inventory_source in [invsrc for invsrc in self.all_inventory_sources if invsrc.inventory == task.inventory]:
|
||||
if "inventory_sources_already_updated" in start_args and inventory_source.id in start_args['inventory_sources_already_updated']:
|
||||
continue
|
||||
if not inventory_source.update_on_launch:
|
||||
continue
|
||||
latest_inventory_update = self.get_latest_inventory_update(inventory_source)
|
||||
if self.should_update_inventory_source(task, latest_inventory_update):
|
||||
inventory_task = self.create_inventory_update(task, inventory_source)
|
||||
created_dependencies.append(inventory_task)
|
||||
dependencies.append(inventory_task)
|
||||
else:
|
||||
dependencies.append(latest_inventory_update)
|
||||
|
||||
if len(dependencies) > 0:
|
||||
self.capture_chain_failure_dependencies(task, dependencies)
|
||||
|
||||
UnifiedJob.objects.filter(pk__in=[task.pk for task in undeped_tasks]).update(dependencies_processed=True)
|
||||
|
||||
return created_dependencies
|
||||
|
||||
def process_pending_tasks(self, pending_tasks):
|
||||
@@ -572,6 +608,8 @@ class TaskManager:
|
||||
pending_tasks = [t for t in all_sorted_tasks if t.status == 'pending']
|
||||
undeped_tasks = [t for t in pending_tasks if not t.dependencies_processed]
|
||||
dependencies = self.generate_dependencies(undeped_tasks)
|
||||
deps_of_deps = self.generate_dependencies(dependencies)
|
||||
dependencies += deps_of_deps
|
||||
self.process_pending_tasks(dependencies)
|
||||
self.process_pending_tasks(pending_tasks)
|
||||
|
||||
|
||||
@@ -9,10 +9,11 @@ import stat
|
||||
from django.utils.timezone import now
|
||||
from django.conf import settings
|
||||
from django_guid import get_guid
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
# AWX
|
||||
from awx.main.redact import UriCleaner
|
||||
from awx.main.constants import MINIMAL_EVENTS
|
||||
from awx.main.constants import MINIMAL_EVENTS, ANSIBLE_RUNNER_NEEDS_UPDATE_MESSAGE
|
||||
from awx.main.utils.update_model import update_model
|
||||
from awx.main.queue import CallbackQueueDispatcher
|
||||
|
||||
@@ -20,8 +21,6 @@ logger = logging.getLogger('awx.main.tasks.callback')
|
||||
|
||||
|
||||
class RunnerCallback:
|
||||
event_data_key = 'job_id'
|
||||
|
||||
def __init__(self, model=None):
|
||||
self.parent_workflow_job_id = None
|
||||
self.host_map = {}
|
||||
@@ -33,10 +32,40 @@ class RunnerCallback:
|
||||
self.event_ct = 0
|
||||
self.model = model
|
||||
self.update_attempts = int(settings.DISPATCHER_DB_DOWNTOWN_TOLLERANCE / 5)
|
||||
self.wrapup_event_dispatched = False
|
||||
self.extra_update_fields = {}
|
||||
|
||||
def update_model(self, pk, _attempt=0, **updates):
|
||||
return update_model(self.model, pk, _attempt=0, _max_attempts=self.update_attempts, **updates)
|
||||
|
||||
@cached_property
|
||||
def wrapup_event_type(self):
|
||||
return self.instance.event_class.WRAPUP_EVENT
|
||||
|
||||
@cached_property
|
||||
def event_data_key(self):
|
||||
return self.instance.event_class.JOB_REFERENCE
|
||||
|
||||
def delay_update(self, skip_if_already_set=False, **kwargs):
|
||||
"""Stash fields that should be saved along with the job status change"""
|
||||
for key, value in kwargs.items():
|
||||
if key in self.extra_update_fields and skip_if_already_set:
|
||||
continue
|
||||
elif key in self.extra_update_fields and key in ('job_explanation', 'result_traceback'):
|
||||
if str(value) in self.extra_update_fields.get(key, ''):
|
||||
continue # if already set, avoid duplicating messages
|
||||
# In the case of these fields, we do not want to lose any prior information, so combine values
|
||||
self.extra_update_fields[key] = '\n'.join([str(self.extra_update_fields[key]), str(value)])
|
||||
else:
|
||||
self.extra_update_fields[key] = value
|
||||
|
||||
def get_delayed_update_fields(self):
|
||||
"""Return finalized dict of all fields that should be saved along with the job status change"""
|
||||
self.extra_update_fields['emitted_events'] = self.event_ct
|
||||
if 'got an unexpected keyword argument' in self.extra_update_fields.get('result_traceback', ''):
|
||||
self.delay_update(result_traceback=ANSIBLE_RUNNER_NEEDS_UPDATE_MESSAGE)
|
||||
return self.extra_update_fields
|
||||
|
||||
def event_handler(self, event_data):
|
||||
#
|
||||
# ⚠️ D-D-D-DANGER ZONE ⚠️
|
||||
@@ -130,6 +159,9 @@ class RunnerCallback:
|
||||
elif self.recent_event_timings.maxlen:
|
||||
self.recent_event_timings.append(time.time())
|
||||
|
||||
if event_data.get('event', '') == self.wrapup_event_type:
|
||||
self.wrapup_event_dispatched = True
|
||||
|
||||
event_data.setdefault(self.event_data_key, self.instance.id)
|
||||
self.dispatcher.dispatch(event_data)
|
||||
self.event_ct += 1
|
||||
@@ -138,8 +170,7 @@ class RunnerCallback:
|
||||
Handle artifacts
|
||||
'''
|
||||
if event_data.get('event_data', {}).get('artifact_data', {}):
|
||||
self.instance.artifacts = event_data['event_data']['artifact_data']
|
||||
self.instance.save(update_fields=['artifacts'])
|
||||
self.delay_update(artifacts=event_data['event_data']['artifact_data'])
|
||||
|
||||
return False
|
||||
|
||||
@@ -170,6 +201,8 @@ class RunnerCallback:
|
||||
}
|
||||
event_data.setdefault(self.event_data_key, self.instance.id)
|
||||
self.dispatcher.dispatch(event_data)
|
||||
if self.wrapup_event_type == 'EOF':
|
||||
self.wrapup_event_dispatched = True
|
||||
|
||||
def status_handler(self, status_data, runner_config):
|
||||
"""
|
||||
@@ -205,16 +238,10 @@ class RunnerCallback:
|
||||
elif status_data['status'] == 'error':
|
||||
result_traceback = status_data.get('result_traceback', None)
|
||||
if result_traceback:
|
||||
from awx.main.signals import disable_activity_stream # Circular import
|
||||
|
||||
with disable_activity_stream():
|
||||
self.instance = self.update_model(self.instance.pk, result_traceback=result_traceback)
|
||||
self.delay_update(result_traceback=result_traceback)
|
||||
|
||||
|
||||
class RunnerCallbackForProjectUpdate(RunnerCallback):
|
||||
|
||||
event_data_key = 'project_update_id'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(RunnerCallbackForProjectUpdate, self).__init__(*args, **kwargs)
|
||||
self.playbook_new_revision = None
|
||||
@@ -231,9 +258,6 @@ class RunnerCallbackForProjectUpdate(RunnerCallback):
|
||||
|
||||
|
||||
class RunnerCallbackForInventoryUpdate(RunnerCallback):
|
||||
|
||||
event_data_key = 'inventory_update_id'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(RunnerCallbackForInventoryUpdate, self).__init__(*args, **kwargs)
|
||||
self.end_line = 0
|
||||
@@ -245,9 +269,6 @@ class RunnerCallbackForInventoryUpdate(RunnerCallback):
|
||||
|
||||
|
||||
class RunnerCallbackForAdHocCommand(RunnerCallback):
|
||||
|
||||
event_data_key = 'ad_hoc_command_id'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(RunnerCallbackForAdHocCommand, self).__init__(*args, **kwargs)
|
||||
self.host_map = {}
|
||||
@@ -255,4 +276,4 @@ class RunnerCallbackForAdHocCommand(RunnerCallback):
|
||||
|
||||
class RunnerCallbackForSystemJob(RunnerCallback):
|
||||
|
||||
event_data_key = 'system_job_id'
|
||||
pass
|
||||
|
||||
@@ -40,7 +40,6 @@ from awx.main.constants import (
|
||||
JOB_FOLDER_PREFIX,
|
||||
MAX_ISOLATED_PATH_COLON_DELIMITER,
|
||||
CONTAINER_VOLUMES_MOUNT_TYPES,
|
||||
ANSIBLE_RUNNER_NEEDS_UPDATE_MESSAGE,
|
||||
)
|
||||
from awx.main.models import (
|
||||
Instance,
|
||||
@@ -78,7 +77,7 @@ from awx.main.utils.common import (
|
||||
)
|
||||
from awx.conf.license import get_license
|
||||
from awx.main.utils.handlers import SpecialInventoryHandler
|
||||
from awx.main.tasks.system import handle_success_and_failure_notifications, update_smart_memberships_for_inventory, update_inventory_computed_fields
|
||||
from awx.main.tasks.system import update_smart_memberships_for_inventory, update_inventory_computed_fields
|
||||
from awx.main.utils.update_model import update_model
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -119,12 +118,11 @@ class BaseTask(object):
|
||||
def update_model(self, pk, _attempt=0, **updates):
|
||||
return update_model(self.model, pk, _attempt=0, _max_attempts=self.update_attempts, **updates)
|
||||
|
||||
def write_private_data_file(self, private_data_dir, file_name, data, sub_dir=None, permissions=0o600):
|
||||
def write_private_data_file(self, private_data_dir, file_name, data, sub_dir=None, file_permissions=0o600):
|
||||
base_path = private_data_dir
|
||||
if sub_dir:
|
||||
base_path = os.path.join(private_data_dir, sub_dir)
|
||||
if not os.path.exists(base_path):
|
||||
os.mkdir(base_path, 0o700)
|
||||
os.makedirs(base_path, mode=0o700, exist_ok=True)
|
||||
|
||||
# If we got a file name create it, otherwise we want a temp file
|
||||
if file_name:
|
||||
@@ -134,7 +132,7 @@ class BaseTask(object):
|
||||
os.close(handle)
|
||||
|
||||
file = Path(file_path)
|
||||
file.touch(mode=permissions, exist_ok=True)
|
||||
file.touch(mode=file_permissions, exist_ok=True)
|
||||
with open(file_path, 'w') as f:
|
||||
f.write(data)
|
||||
return file_path
|
||||
@@ -257,9 +255,9 @@ class BaseTask(object):
|
||||
# Instead, ssh private key file is explicitly passed via an
|
||||
# env variable.
|
||||
else:
|
||||
private_data_files['credentials'][credential] = self.write_private_data_file(private_data_dir, None, data, 'env')
|
||||
private_data_files['credentials'][credential] = self.write_private_data_file(private_data_dir, None, data, sub_dir='env')
|
||||
for credential, data in private_data.get('certificates', {}).items():
|
||||
self.write_private_data_file(private_data_dir, 'ssh_key_data-cert.pub', data, 'artifacts')
|
||||
self.write_private_data_file(private_data_dir, 'ssh_key_data-cert.pub', data, sub_dir=os.path.join('artifacts', str(self.instance.id)))
|
||||
return private_data_files, ssh_key_data
|
||||
|
||||
def build_passwords(self, instance, runtime_passwords):
|
||||
@@ -282,7 +280,7 @@ class BaseTask(object):
|
||||
content = yaml.safe_dump(vars)
|
||||
else:
|
||||
content = safe_dump(vars, safe_dict)
|
||||
return self.write_private_data_file(private_data_dir, 'extravars', content, 'env')
|
||||
return self.write_private_data_file(private_data_dir, 'extravars', content, sub_dir='env')
|
||||
|
||||
def add_awx_venv(self, env):
|
||||
env['VIRTUAL_ENV'] = settings.AWX_VENV_PATH
|
||||
@@ -321,13 +319,13 @@ class BaseTask(object):
|
||||
# so we can associate emitted events to Host objects
|
||||
self.runner_callback.host_map = {hostname: hv.pop('remote_tower_id', '') for hostname, hv in script_data.get('_meta', {}).get('hostvars', {}).items()}
|
||||
file_content = '#! /usr/bin/env python3\n# -*- coding: utf-8 -*-\nprint(%r)\n' % json.dumps(script_data)
|
||||
return self.write_private_data_file(private_data_dir, 'hosts', file_content, 'inventory', 0o700)
|
||||
return self.write_private_data_file(private_data_dir, 'hosts', file_content, sub_dir='inventory', file_permissions=0o700)
|
||||
|
||||
def build_args(self, instance, private_data_dir, passwords):
|
||||
raise NotImplementedError
|
||||
|
||||
def write_args_file(self, private_data_dir, args):
|
||||
return self.write_private_data_file(private_data_dir, 'cmdline', ansible_runner.utils.args2cmdline(*args), 'env')
|
||||
return self.write_private_data_file(private_data_dir, 'cmdline', ansible_runner.utils.args2cmdline(*args), sub_dir='env')
|
||||
|
||||
def build_credentials_list(self, instance):
|
||||
return []
|
||||
@@ -412,7 +410,6 @@ class BaseTask(object):
|
||||
self.instance = self.update_model(pk, status='running', start_args='') # blank field to remove encrypted passwords
|
||||
self.instance.websocket_emit_status("running")
|
||||
status, rc = 'error', None
|
||||
extra_update_fields = {}
|
||||
fact_modification_times = {}
|
||||
self.runner_callback.event_ct = 0
|
||||
|
||||
@@ -523,7 +520,7 @@ class BaseTask(object):
|
||||
runner_settings['idle_timeout'] = idle_timeout
|
||||
|
||||
# Write out our own settings file
|
||||
self.write_private_data_file(private_data_dir, 'settings', json.dumps(runner_settings), 'env')
|
||||
self.write_private_data_file(private_data_dir, 'settings', json.dumps(runner_settings), sub_dir='env')
|
||||
|
||||
self.instance.log_lifecycle("running_playbook")
|
||||
if isinstance(self.instance, SystemJob):
|
||||
@@ -547,20 +544,14 @@ class BaseTask(object):
|
||||
rc = res.rc
|
||||
|
||||
if status in ('timeout', 'error'):
|
||||
job_explanation = f"Job terminated due to {status}"
|
||||
self.instance.job_explanation = self.instance.job_explanation or job_explanation
|
||||
self.runner_callback.delay_update(skip_if_already_set=True, job_explanation=f"Job terminated due to {status}")
|
||||
if status == 'timeout':
|
||||
status = 'failed'
|
||||
|
||||
extra_update_fields['job_explanation'] = self.instance.job_explanation
|
||||
# ensure failure notification sends even if playbook_on_stats event is not triggered
|
||||
handle_success_and_failure_notifications.apply_async([self.instance.id])
|
||||
|
||||
except ReceptorNodeNotFound as exc:
|
||||
extra_update_fields['job_explanation'] = str(exc)
|
||||
self.runner_callback.delay_update(job_explanation=str(exc))
|
||||
except Exception:
|
||||
# this could catch programming or file system errors
|
||||
extra_update_fields['result_traceback'] = traceback.format_exc()
|
||||
self.runner_callback.delay_update(result_traceback=traceback.format_exc())
|
||||
logger.exception('%s Exception occurred while running task', self.instance.log_format)
|
||||
finally:
|
||||
logger.debug('%s finished running, producing %s events.', self.instance.log_format, self.runner_callback.event_ct)
|
||||
@@ -570,18 +561,19 @@ class BaseTask(object):
|
||||
except PostRunError as exc:
|
||||
if status == 'successful':
|
||||
status = exc.status
|
||||
extra_update_fields['job_explanation'] = exc.args[0]
|
||||
self.runner_callback.delay_update(job_explanation=exc.args[0])
|
||||
if exc.tb:
|
||||
extra_update_fields['result_traceback'] = exc.tb
|
||||
self.runner_callback.delay_update(result_traceback=exc.tb)
|
||||
except Exception:
|
||||
logger.exception('{} Post run hook errored.'.format(self.instance.log_format))
|
||||
|
||||
# We really shouldn't get into this one but just in case....
|
||||
if 'got an unexpected keyword argument' in extra_update_fields.get('result_traceback', ''):
|
||||
extra_update_fields['result_traceback'] = "{}\n\n{}".format(extra_update_fields['result_traceback'], ANSIBLE_RUNNER_NEEDS_UPDATE_MESSAGE)
|
||||
|
||||
self.instance = self.update_model(pk)
|
||||
self.instance = self.update_model(pk, status=status, emitted_events=self.runner_callback.event_ct, **extra_update_fields)
|
||||
self.instance = self.update_model(pk, status=status, select_for_update=True, **self.runner_callback.get_delayed_update_fields())
|
||||
|
||||
# Field host_status_counts is used as a metric to check if event processing is finished
|
||||
# we send notifications if it is, if not, callback receiver will send them
|
||||
if (self.instance.host_status_counts is not None) or (not self.runner_callback.wrapup_event_dispatched):
|
||||
self.instance.send_notification_templates('succeeded' if status == 'successful' else 'failed')
|
||||
|
||||
try:
|
||||
self.final_run_hook(self.instance, status, private_data_dir, fact_modification_times)
|
||||
@@ -1459,8 +1451,8 @@ class RunProjectUpdate(BaseTask):
|
||||
params.setdefault('container_volume_mounts', [])
|
||||
params['container_volume_mounts'].extend(
|
||||
[
|
||||
f"{project_path}:{project_path}:Z",
|
||||
f"{cache_path}:{cache_path}:Z",
|
||||
f"{project_path}:{project_path}:z",
|
||||
f"{cache_path}:{cache_path}:z",
|
||||
]
|
||||
)
|
||||
return params
|
||||
@@ -1609,7 +1601,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
if injector is not None:
|
||||
content = injector.inventory_contents(inventory_update, private_data_dir)
|
||||
# must be a statically named file
|
||||
self.write_private_data_file(private_data_dir, injector.filename, content, 'inventory', 0o700)
|
||||
self.write_private_data_file(private_data_dir, injector.filename, content, sub_dir='inventory', file_permissions=0o700)
|
||||
rel_path = os.path.join('inventory', injector.filename)
|
||||
elif src == 'scm':
|
||||
rel_path = os.path.join('project', inventory_update.source_path)
|
||||
|
||||
@@ -24,10 +24,7 @@ from awx.main.utils.common import (
|
||||
parse_yaml_or_json,
|
||||
cleanup_new_process,
|
||||
)
|
||||
from awx.main.constants import (
|
||||
MAX_ISOLATED_PATH_COLON_DELIMITER,
|
||||
ANSIBLE_RUNNER_NEEDS_UPDATE_MESSAGE,
|
||||
)
|
||||
from awx.main.constants import MAX_ISOLATED_PATH_COLON_DELIMITER
|
||||
|
||||
# Receptorctl
|
||||
from receptorctl.socket_interface import ReceptorControl
|
||||
@@ -350,6 +347,11 @@ class AWXReceptorJob:
|
||||
resultsock.shutdown(socket.SHUT_RDWR)
|
||||
resultfile.close()
|
||||
elif res.status == 'error':
|
||||
# If ansible-runner ran, but an error occured at runtime, the traceback information
|
||||
# is saved via the status_handler passed in to the processor.
|
||||
if 'result_traceback' in self.task.runner_callback.extra_update_fields:
|
||||
return res
|
||||
|
||||
try:
|
||||
unit_status = receptor_ctl.simple_command(f'work status {self.unit_id}')
|
||||
detail = unit_status.get('Detail', None)
|
||||
@@ -365,28 +367,19 @@ class AWXReceptorJob:
|
||||
logger.warning(f"Could not launch pod for {log_name}. Exceeded quota.")
|
||||
self.task.update_model(self.task.instance.pk, status='pending')
|
||||
return
|
||||
# If ansible-runner ran, but an error occured at runtime, the traceback information
|
||||
# is saved via the status_handler passed in to the processor.
|
||||
if state_name == 'Succeeded':
|
||||
return res
|
||||
|
||||
if not self.task.instance.result_traceback:
|
||||
try:
|
||||
resultsock = receptor_ctl.get_work_results(self.unit_id, return_sockfile=True)
|
||||
lines = resultsock.readlines()
|
||||
receptor_output = b"".join(lines).decode()
|
||||
if receptor_output:
|
||||
self.task.instance.result_traceback = receptor_output
|
||||
if 'got an unexpected keyword argument' in receptor_output:
|
||||
self.task.instance.result_traceback = "{}\n\n{}".format(receptor_output, ANSIBLE_RUNNER_NEEDS_UPDATE_MESSAGE)
|
||||
self.task.instance.save(update_fields=['result_traceback'])
|
||||
elif detail:
|
||||
self.task.instance.result_traceback = detail
|
||||
self.task.instance.save(update_fields=['result_traceback'])
|
||||
else:
|
||||
logger.warning(f'No result details or output from {self.task.instance.log_format}, status:\n{state_name}')
|
||||
except Exception:
|
||||
raise RuntimeError(detail)
|
||||
try:
|
||||
resultsock = receptor_ctl.get_work_results(self.unit_id, return_sockfile=True)
|
||||
lines = resultsock.readlines()
|
||||
receptor_output = b"".join(lines).decode()
|
||||
if receptor_output:
|
||||
self.task.runner_callback.delay_update(result_traceback=receptor_output)
|
||||
elif detail:
|
||||
self.task.runner_callback.delay_update(result_traceback=detail)
|
||||
else:
|
||||
logger.warning(f'No result details or output from {self.task.instance.log_format}, status:\n{state_name}')
|
||||
except Exception:
|
||||
raise RuntimeError(detail)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
@@ -695,7 +695,7 @@ def handle_work_error(task_id, *args, **kwargs):
|
||||
first_instance = instance
|
||||
first_instance_type = each_task['type']
|
||||
|
||||
if instance.celery_task_id != task_id and not instance.cancel_flag and not instance.status == 'successful':
|
||||
if instance.celery_task_id != task_id and not instance.cancel_flag and not instance.status in ('successful', 'failed'):
|
||||
instance.status = 'failed'
|
||||
instance.failed = True
|
||||
if not instance.job_explanation:
|
||||
@@ -716,25 +716,6 @@ def handle_work_error(task_id, *args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
def handle_success_and_failure_notifications(job_id):
|
||||
uj = UnifiedJob.objects.get(pk=job_id)
|
||||
retries = 0
|
||||
while retries < settings.AWX_NOTIFICATION_JOB_FINISH_MAX_RETRY:
|
||||
if uj.finished:
|
||||
uj.send_notification_templates('succeeded' if uj.status == 'successful' else 'failed')
|
||||
return
|
||||
else:
|
||||
# wait a few seconds to avoid a race where the
|
||||
# events are persisted _before_ the UJ.status
|
||||
# changes from running -> successful
|
||||
retries += 1
|
||||
time.sleep(1)
|
||||
uj = UnifiedJob.objects.get(pk=job_id)
|
||||
|
||||
logger.warning(f"Failed to even try to send notifications for job '{uj}' due to job not being in finished state.")
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
def update_inventory_computed_fields(inventory_id):
|
||||
"""
|
||||
|
||||
@@ -532,6 +532,49 @@ def test_vault_password_required(post, organization, admin):
|
||||
assert 'required fields (vault_password)' in j.job_explanation
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_vault_id_immutable(post, patch, organization, admin):
|
||||
vault = CredentialType.defaults['vault']()
|
||||
vault.save()
|
||||
response = post(
|
||||
reverse('api:credential_list'),
|
||||
{
|
||||
'credential_type': vault.pk,
|
||||
'organization': organization.id,
|
||||
'name': 'Best credential ever',
|
||||
'inputs': {'vault_id': 'password', 'vault_password': 'password'},
|
||||
},
|
||||
admin,
|
||||
)
|
||||
assert response.status_code == 201
|
||||
assert Credential.objects.count() == 1
|
||||
response = patch(
|
||||
reverse('api:credential_detail', kwargs={'pk': response.data['id']}), {'inputs': {'vault_id': 'password2', 'vault_password': 'password'}}, admin
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert response.data['inputs'][0] == 'Vault IDs cannot be changed once they have been created.'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_patch_without_vault_id_valid(post, patch, organization, admin):
|
||||
vault = CredentialType.defaults['vault']()
|
||||
vault.save()
|
||||
response = post(
|
||||
reverse('api:credential_list'),
|
||||
{
|
||||
'credential_type': vault.pk,
|
||||
'organization': organization.id,
|
||||
'name': 'Best credential ever',
|
||||
'inputs': {'vault_id': 'password', 'vault_password': 'password'},
|
||||
},
|
||||
admin,
|
||||
)
|
||||
assert response.status_code == 201
|
||||
assert Credential.objects.count() == 1
|
||||
response = patch(reverse('api:credential_detail', kwargs={'pk': response.data['id']}), {'name': 'worst_credential_ever'}, admin)
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
#
|
||||
# Net Credentials
|
||||
#
|
||||
|
||||
@@ -111,21 +111,41 @@ def test_encrypted_survey_answer(post, patch, admin_user, project, inventory, su
|
||||
[
|
||||
("", "This field may not be blank"),
|
||||
("DTSTART:NONSENSE", "Valid DTSTART required in rrule"),
|
||||
("DTSTART:20300308T050000 RRULE:FREQ=DAILY;INTERVAL=1", "DTSTART cannot be a naive datetime"),
|
||||
("DTSTART:20300308T050000Z DTSTART:20310308T050000", "Multiple DTSTART is not supported"),
|
||||
("DTSTART:20300308T050000Z", "RRULE required in rrule"),
|
||||
("DTSTART:20300308T050000Z RRULE:NONSENSE", "INTERVAL required in rrule"),
|
||||
("DTSTART:20300308T050000Z", "One or more rule required in rrule"),
|
||||
("DTSTART:20300308T050000Z RRULE:FREQ=MONTHLY;INTERVAL=1; EXDATE:20220401", "EXDATE not allowed in rrule"),
|
||||
("DTSTART:20300308T050000Z RRULE:FREQ=MONTHLY;INTERVAL=1; RDATE:20220401", "RDATE not allowed in rrule"),
|
||||
("DTSTART:20300308T050000Z RRULE:FREQ=SECONDLY;INTERVAL=5;COUNT=6", "SECONDLY is not supported"),
|
||||
("DTSTART:20300308T050000Z RRULE:FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=3,4", "Multiple BYMONTHDAYs not supported"), # noqa
|
||||
("DTSTART:20300308T050000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYMONTH=1,2", "Multiple BYMONTHs not supported"), # noqa
|
||||
# Individual rule test
|
||||
("DTSTART:20300308T050000Z RRULE:NONSENSE", "INTERVAL required in rrule"),
|
||||
("DTSTART:20300308T050000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=5MO", "BYDAY with numeric prefix not supported"), # noqa
|
||||
("DTSTART:20300308T050000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYYEARDAY=100", "BYYEARDAY not supported"), # noqa
|
||||
("DTSTART:20300308T050000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYWEEKNO=20", "BYWEEKNO not supported"),
|
||||
("DTSTART:20030925T104941Z RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z", "RRULE may not contain both COUNT and UNTIL"), # noqa
|
||||
("DTSTART:20300308T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2000", "COUNT > 999 is unsupported"), # noqa
|
||||
# Individual rule test with multiple rules
|
||||
## Bad Rule: RRULE:NONSENSE
|
||||
("DTSTART:20300308T050000Z RRULE:NONSENSE RRULE:INTERVAL=1;FREQ=DAILY EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU", "INTERVAL required in rrule"),
|
||||
## Bad Rule: RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=5MO
|
||||
(
|
||||
"DTSTART:20300308T050000Z RRULE:INTERVAL=1;FREQ=DAILY EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=5MO",
|
||||
"BYDAY with numeric prefix not supported",
|
||||
), # noqa
|
||||
## Bad Rule: RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z
|
||||
(
|
||||
"DTSTART:20030925T104941Z RRULE:INTERVAL=1;FREQ=DAILY EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z",
|
||||
"RRULE may not contain both COUNT and UNTIL",
|
||||
), # noqa
|
||||
## Bad Rule: RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2000
|
||||
(
|
||||
"DTSTART:20300308T050000Z RRULE:INTERVAL=1;FREQ=DAILY EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2000",
|
||||
"COUNT > 999 is unsupported",
|
||||
), # noqa
|
||||
# Multiple errors, first condition should be returned
|
||||
("DTSTART:NONSENSE RRULE:NONSENSE RRULE:FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=3,4", "Valid DTSTART required in rrule"),
|
||||
# Parsing Tests
|
||||
("DTSTART;TZID=US-Eastern:19961105T090000 RRULE:FREQ=MINUTELY;INTERVAL=10;COUNT=5", "A valid TZID must be provided"), # noqa
|
||||
("DTSTART:20300308T050000Z RRULE:FREQ=REGULARLY;INTERVAL=1", "rrule parsing failed validation: invalid 'FREQ': REGULARLY"), # noqa
|
||||
("DTSTART:20030925T104941Z RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z", "RRULE may not contain both COUNT and UNTIL"), # noqa
|
||||
("DTSTART;TZID=America/New_York:20300308T050000Z RRULE:FREQ=DAILY;INTERVAL=1", "rrule parsing failed validation"),
|
||||
("DTSTART:20300308T050000 RRULE:FREQ=DAILY;INTERVAL=1", "DTSTART cannot be a naive datetime"),
|
||||
],
|
||||
)
|
||||
def test_invalid_rrules(post, admin_user, project, inventory, rrule, error):
|
||||
@@ -143,6 +163,29 @@ def test_invalid_rrules(post, admin_user, project, inventory, rrule, error):
|
||||
assert error in smart_str(resp.content)
|
||||
|
||||
|
||||
def test_multiple_invalid_rrules(post, admin_user, project, inventory):
|
||||
job_template = JobTemplate.objects.create(name='test-jt', project=project, playbook='helloworld.yml', inventory=inventory)
|
||||
url = reverse('api:job_template_schedules_list', kwargs={'pk': job_template.id})
|
||||
resp = post(
|
||||
url,
|
||||
{
|
||||
'name': 'Some Schedule',
|
||||
'rrule': "EXRULE:FREQ=SECONDLY DTSTART;TZID=US-Eastern:19961105T090000 RRULE:FREQ=MINUTELY;INTERVAL=10;COUNT=5;UNTIL=20220101 DTSTART;TZID=US-Eastern:19961105T090000",
|
||||
},
|
||||
admin_user,
|
||||
expect=400,
|
||||
)
|
||||
expected_result = {
|
||||
"rrule": [
|
||||
"Multiple DTSTART is not supported.",
|
||||
"INTERVAL required in rrule: RULE:FREQ=SECONDLY",
|
||||
"RRULE may not contain both COUNT and UNTIL: RULE:FREQ=MINUTELY;INTERVAL=10;COUNT=5;UNTIL=20220101",
|
||||
"rrule parsing failed validation: 'NoneType' object has no attribute 'group'",
|
||||
]
|
||||
}
|
||||
assert expected_result == resp.data
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_normal_users_can_preview_schedules(post, alice):
|
||||
url = reverse('api:schedule_rrule')
|
||||
@@ -381,6 +424,78 @@ def test_dst_rollback_duplicates(post, admin_user):
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'rrule, expected_result',
|
||||
(
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20300302T150000 RRULE:INTERVAL=1;FREQ=DAILY;UNTIL=20300304T1500 EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU',
|
||||
['2030-03-02 15:00:00-05:00', '2030-03-04 15:00:00-05:00'],
|
||||
id="Every day except sundays",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=US/Eastern:20300428T170000 RRULE:INTERVAL=1;FREQ=DAILY;COUNT=4 EXRULE:INTERVAL=1;FREQ=DAILY;BYMONTH=4;BYMONTHDAY=30',
|
||||
['2030-04-28 17:00:00-04:00', '2030-04-29 17:00:00-04:00', '2030-05-01 17:00:00-04:00'],
|
||||
id="Every day except April 30th",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20300313T164500 RRULE:INTERVAL=5;FREQ=MINUTELY EXRULE:FREQ=MINUTELY;INTERVAL=5;BYDAY=WE;BYHOUR=17,18',
|
||||
[
|
||||
'2030-03-13 16:45:00-04:00',
|
||||
'2030-03-13 16:50:00-04:00',
|
||||
'2030-03-13 16:55:00-04:00',
|
||||
'2030-03-13 19:00:00-04:00',
|
||||
'2030-03-13 19:05:00-04:00',
|
||||
'2030-03-13 19:10:00-04:00',
|
||||
'2030-03-13 19:15:00-04:00',
|
||||
'2030-03-13 19:20:00-04:00',
|
||||
'2030-03-13 19:25:00-04:00',
|
||||
'2030-03-13 19:30:00-04:00',
|
||||
],
|
||||
id="Every 5 minutes but not Wednesdays from 5-7pm",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20300426T100100 RRULE:INTERVAL=15;FREQ=MINUTELY;BYDAY=MO,TU,WE,TH,FR;BYHOUR=10,11 EXRULE:INTERVAL=15;FREQ=MINUTELY;BYDAY=MO,TU,WE,TH,FR;BYHOUR=11;BYMINUTE=3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,34,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59',
|
||||
[
|
||||
'2030-04-26 10:01:00-04:00',
|
||||
'2030-04-26 10:16:00-04:00',
|
||||
'2030-04-26 10:31:00-04:00',
|
||||
'2030-04-26 10:46:00-04:00',
|
||||
'2030-04-26 11:01:00-04:00',
|
||||
'2030-04-29 10:01:00-04:00',
|
||||
'2030-04-29 10:16:00-04:00',
|
||||
'2030-04-29 10:31:00-04:00',
|
||||
'2030-04-29 10:46:00-04:00',
|
||||
'2030-04-29 11:01:00-04:00',
|
||||
],
|
||||
id="Every 15 minutes Monday - Friday from 10:01am to 11:02pm (inclusive)",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART:20301219T130551Z RRULE:FREQ=MONTHLY;INTERVAL=1;BYDAY=SA;BYMONTHDAY=12,13,14,15,16,17,18',
|
||||
[
|
||||
'2031-01-18 13:05:51+00:00',
|
||||
'2031-02-15 13:05:51+00:00',
|
||||
'2031-03-15 13:05:51+00:00',
|
||||
'2031-04-12 13:05:51+00:00',
|
||||
'2031-05-17 13:05:51+00:00',
|
||||
'2031-06-14 13:05:51+00:00',
|
||||
'2031-07-12 13:05:51+00:00',
|
||||
'2031-08-16 13:05:51+00:00',
|
||||
'2031-09-13 13:05:51+00:00',
|
||||
'2031-10-18 13:05:51+00:00',
|
||||
],
|
||||
id="Any Saturday whose month day is between 12 and 18",
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_complex_schedule(post, admin_user, rrule, expected_result):
|
||||
# Every day except Sunday, 2022-05-01 is a Sunday
|
||||
|
||||
url = reverse('api:schedule_rrule')
|
||||
r = post(url, {'rrule': rrule}, admin_user, expect=200)
|
||||
|
||||
assert list(map(str, r.data['local'])) == expected_result
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_zoneinfo(get, admin_user):
|
||||
url = reverse('api:schedule_zoneinfo')
|
||||
|
||||
26
awx/main/tests/functional/commands/test_callback_receiver.py
Normal file
26
awx/main/tests/functional/commands/test_callback_receiver.py
Normal file
@@ -0,0 +1,26 @@
|
||||
import pytest
|
||||
|
||||
from awx.main.dispatch.worker.callback import job_stats_wrapup
|
||||
from awx.main.models.jobs import Job
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_wrapup_does_not_send_notifications(mocker):
|
||||
job = Job.objects.create(status='running')
|
||||
assert job.host_status_counts is None
|
||||
mock = mocker.patch('awx.main.models.notifications.JobNotificationMixin.send_notification_templates')
|
||||
job_stats_wrapup(job.id)
|
||||
job.refresh_from_db()
|
||||
assert job.host_status_counts == {}
|
||||
mock.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_wrapup_does_send_notifications(mocker):
|
||||
job = Job.objects.create(status='successful')
|
||||
assert job.host_status_counts is None
|
||||
mock = mocker.patch('awx.main.models.notifications.JobNotificationMixin.send_notification_templates')
|
||||
job_stats_wrapup(job.id)
|
||||
job.refresh_from_db()
|
||||
assert job.host_status_counts == {}
|
||||
mock.assert_called_once_with('succeeded')
|
||||
@@ -52,10 +52,12 @@ class TestKeyRegeneration:
|
||||
settings.cache.delete('REDHAT_PASSWORD')
|
||||
|
||||
# verify that the old SECRET_KEY doesn't work
|
||||
settings._awx_conf_memoizedcache.clear()
|
||||
with pytest.raises(InvalidToken):
|
||||
settings.REDHAT_PASSWORD
|
||||
|
||||
# verify that the new SECRET_KEY *does* work
|
||||
settings._awx_conf_memoizedcache.clear()
|
||||
with override_settings(SECRET_KEY=new_key):
|
||||
assert settings.REDHAT_PASSWORD == 'sensitive'
|
||||
|
||||
|
||||
@@ -251,18 +251,17 @@ def test_utc_until(job_template, until, dtend):
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
'dtstart, until',
|
||||
'rrule, length',
|
||||
[
|
||||
['DTSTART:20380601T120000Z', '20380601T170000'], # noon UTC to 5PM UTC
|
||||
['DTSTART;TZID=America/New_York:20380601T120000', '20380601T170000'], # noon EST to 5PM EST
|
||||
['DTSTART:20380601T120000Z RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000', 6], # noon UTC to 5PM UTC (noon, 1pm, 2, 3, 4, 5pm)
|
||||
['DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000', 6], # noon EST to 5PM EST
|
||||
],
|
||||
)
|
||||
def test_tzinfo_naive_until(job_template, dtstart, until):
|
||||
rrule = '{} RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL={}'.format(dtstart, until) # noqa
|
||||
def test_tzinfo_naive_until(job_template, rrule, length):
|
||||
s = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
|
||||
s.save()
|
||||
gen = Schedule.rrulestr(s.rrule).xafter(now(), count=20)
|
||||
assert len(list(gen)) == 6 # noon, 1PM, 2, 3, 4, 5PM
|
||||
assert len(list(gen)) == length
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -309,6 +308,12 @@ def test_beginning_of_time(job_template):
|
||||
[
|
||||
['DTSTART:20300112T210000Z RRULE:FREQ=DAILY;INTERVAL=1', 'UTC'],
|
||||
['DTSTART;TZID=US/Eastern:20300112T210000 RRULE:FREQ=DAILY;INTERVAL=1', 'US/Eastern'],
|
||||
['DTSTART;TZID=US/Eastern:20300112T210000 RRULE:FREQ=DAILY;INTERVAL=1 EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU', 'US/Eastern'],
|
||||
# Technically the serializer should never let us get 2 dtstarts in a rule but its still valid and the rrule will prefer the last DTSTART
|
||||
[
|
||||
'DTSTART;TZID=US/Eastern:20300112T210000 RRULE:FREQ=DAILY;INTERVAL=1 EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU DTSTART;TZID=US/Pacific:20300112T210000',
|
||||
'US/Pacific',
|
||||
],
|
||||
],
|
||||
)
|
||||
def test_timezone_property(job_template, rrule, tz):
|
||||
@@ -389,3 +394,163 @@ def test_duplicate_name_within_template(job_template):
|
||||
s2.save()
|
||||
|
||||
assert str(ierror.value) == "UNIQUE constraint failed: main_schedule.unified_job_template_id, main_schedule.name"
|
||||
|
||||
|
||||
# Test until with multiple entries (should only return the first)
|
||||
# NOTE: this test may change once we determine how the UI will start to handle this field
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
'rrule, expected_until',
|
||||
[
|
||||
pytest.param('DTSTART:20380601T120000Z RRULE:FREQ=HOURLY;INTERVAL=1', '', id="No until"),
|
||||
pytest.param('DTSTART:20380601T120000Z RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000Z', '2038-06-01T17:00:00', id="One until in UTC"),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000',
|
||||
'2038-06-01T17:00:00',
|
||||
id="One until in local TZ",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T220000 RRULE:FREQ=MINUTELY;INTERVAL=1;UNTIL=20380601T170000',
|
||||
'2038-06-01T22:00:00',
|
||||
id="Multiple untils (return only the first one",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_until_with_complex_schedules(job_template, rrule, expected_until):
|
||||
sched = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
|
||||
assert sched.until == expected_until
|
||||
|
||||
|
||||
# Test coerce_naive_until, this method takes a naive until field and forces it into utc
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
'rrule, expected_result',
|
||||
[
|
||||
pytest.param(
|
||||
'DTSTART:20380601T120000Z RRULE:FREQ=HOURLY;INTERVAL=1',
|
||||
'DTSTART:20380601T120000Z RRULE:FREQ=HOURLY;INTERVAL=1',
|
||||
id="No untils present",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART:20380601T120000Z RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000Z',
|
||||
'DTSTART:20380601T120000Z RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000Z',
|
||||
id="One until already in UTC",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000',
|
||||
'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T220000Z',
|
||||
id="One until with local tz",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART:20380601T120000Z RRULE:FREQ=MINUTLEY;INTERVAL=1;UNTIL=20380601T170000Z EXRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000Z',
|
||||
'DTSTART:20380601T120000Z RRULE:FREQ=MINUTLEY;INTERVAL=1;UNTIL=20380601T170000Z EXRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000Z',
|
||||
id="Multiple untils all in UTC",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=MINUTELY;INTERVAL=1;UNTIL=20380601T170000 EXRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000',
|
||||
'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=MINUTELY;INTERVAL=1;UNTIL=20380601T220000Z EXRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T220000Z',
|
||||
id="Multiple untils with local tz",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=MINUTELY;INTERVAL=1;UNTIL=20380601T170000Z EXRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000',
|
||||
'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=MINUTELY;INTERVAL=1;UNTIL=20380601T170000Z EXRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T220000Z',
|
||||
id="Multiple untils mixed",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_coerce_naive_until(rrule, expected_result):
|
||||
new_rrule = Schedule.coerce_naive_until(rrule)
|
||||
assert new_rrule == expected_result
|
||||
|
||||
|
||||
# Test skipping days with exclusion
|
||||
@pytest.mark.django_db
|
||||
def test_skip_sundays():
|
||||
rrule = '''
|
||||
DTSTART;TZID=America/New_York:20220310T150000
|
||||
RRULE:INTERVAL=1;FREQ=DAILY
|
||||
EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU
|
||||
'''
|
||||
timezone = pytz.timezone("America/New_York")
|
||||
friday_apr_29th = datetime(2022, 4, 29, 0, 0, 0, 0, timezone)
|
||||
monday_may_2nd = datetime(2022, 5, 2, 23, 59, 59, 999, timezone)
|
||||
ruleset = Schedule.rrulestr(rrule)
|
||||
gen = ruleset.between(friday_apr_29th, monday_may_2nd, True)
|
||||
# We should only get Fri, Sat and Mon (skipping Sunday)
|
||||
assert len(list(gen)) == 3
|
||||
saturday_night = datetime(2022, 4, 30, 23, 59, 59, 9999, timezone)
|
||||
monday_morning = datetime(2022, 5, 2, 0, 0, 0, 0, timezone)
|
||||
gen = ruleset.between(saturday_night, monday_morning, True)
|
||||
assert len(list(gen)) == 0
|
||||
|
||||
|
||||
# Test the get_end_date function
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
'rrule, expected_result',
|
||||
[
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20210310T150000 RRULE:INTERVAL=1;FREQ=DAILY;UNTIL=20210430T150000Z EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU;COUNT=5',
|
||||
datetime(2021, 4, 29, 19, 0, 0, tzinfo=pytz.utc),
|
||||
id="Single rule in rule set with UTC TZ aware until",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20220310T150000 RRULE:INTERVAL=1;FREQ=DAILY;UNTIL=20220430T150000 EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU;COUNT=5',
|
||||
datetime(2022, 4, 30, 19, 0, tzinfo=pytz.utc),
|
||||
id="Single rule in ruleset with naive until",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20220310T150000 RRULE:INTERVAL=1;FREQ=DAILY;COUNT=4 EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU;COUNT=5',
|
||||
datetime(2022, 3, 12, 20, 0, tzinfo=pytz.utc),
|
||||
id="Single rule in ruleset with count",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20220310T150000 RRULE:INTERVAL=1;FREQ=DAILY EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU;COUNT=5',
|
||||
None,
|
||||
id="Single rule in ruleset with no end",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20220310T150000 RRULE:INTERVAL=1;FREQ=DAILY',
|
||||
None,
|
||||
id="Single rule in rule with no end",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20220310T150000 RRULE:INTERVAL=1;FREQ=DAILY;UNTIL=20220430T150000Z',
|
||||
datetime(2022, 4, 29, 19, 0, tzinfo=pytz.utc),
|
||||
id="Single rule in rule with UTZ TZ aware until",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20220310T150000 RRULE:INTERVAL=1;FREQ=DAILY;UNTIL=20220430T150000',
|
||||
datetime(2022, 4, 30, 19, 0, tzinfo=pytz.utc),
|
||||
id="Single rule in rule with naive until",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20220310T150000 RRULE:INTERVAL=1;FREQ=DAILY;BYDAY=SU RRULE:INTERVAL=1;FREQ=DAILY;BYDAY=MO',
|
||||
None,
|
||||
id="Multi rule with no end",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20220310T150000 RRULE:INTERVAL=1;FREQ=DAILY;BYDAY=SU RRULE:INTERVAL=1;FREQ=DAILY;BYDAY=MO;COUNT=4',
|
||||
None,
|
||||
id="Multi rule one with no end and one with an count",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20220310T150000 RRULE:INTERVAL=1;FREQ=DAILY;BYDAY=SU;UNTIL=20220430T1500Z RRULE:INTERVAL=1;FREQ=DAILY;BYDAY=MO;COUNT=4',
|
||||
datetime(2022, 4, 24, 19, 0, tzinfo=pytz.utc),
|
||||
id="Multi rule one with until and one with an count",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20010430T1500 RRULE:INTERVAL=1;FREQ=DAILY;BYDAY=SU;COUNT=1',
|
||||
datetime(2001, 5, 6, 19, 0, tzinfo=pytz.utc),
|
||||
id="Rule with count but ends in the past",
|
||||
),
|
||||
pytest.param(
|
||||
'DTSTART;TZID=America/New_York:20220430T1500 RRULE:INTERVAL=1;FREQ=DAILY;BYDAY=SU;UNTIL=20010430T1500',
|
||||
None,
|
||||
id="Rule with until that ends in the past",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_get_end_date(rrule, expected_result):
|
||||
ruleset = Schedule.rrulestr(rrule)
|
||||
assert expected_result == Schedule.get_end_date(ruleset)
|
||||
|
||||
@@ -324,6 +324,22 @@ def test_single_job_dependencies_inventory_update_launch(controlplane_instance_g
|
||||
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, [], instance)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_inventory_update_launches_project_update(controlplane_instance_group, scm_inventory_source):
|
||||
ii = scm_inventory_source
|
||||
project = scm_inventory_source.source_project
|
||||
project.scm_update_on_launch = True
|
||||
project.save()
|
||||
iu = ii.create_inventory_update()
|
||||
iu.status = "pending"
|
||||
iu.save()
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
tm = TaskManager()
|
||||
with mock.patch.object(TaskManager, "create_project_update", wraps=tm.create_project_update) as mock_pu:
|
||||
tm.schedule()
|
||||
mock_pu.assert_called_with(iu, project_id=project.id)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_dependency_with_already_updated(controlplane_instance_group, job_template_factory, mocker, inventory_source_factory):
|
||||
objects = job_template_factory('jt', organization='org1', project='proj', inventory='inv', credential='cred', jobs=["job_should_start"])
|
||||
@@ -382,7 +398,7 @@ def test_shared_dependencies_launch(controlplane_instance_group, job_template_fa
|
||||
pu = p.project_updates.first()
|
||||
iu = ii.inventory_updates.first()
|
||||
TaskManager.start_task.assert_has_calls(
|
||||
[mock.call(iu, controlplane_instance_group, [j1, j2, pu], instance), mock.call(pu, controlplane_instance_group, [j1, j2, iu], instance)]
|
||||
[mock.call(iu, controlplane_instance_group, [j1, j2], instance), mock.call(pu, controlplane_instance_group, [j1, j2], instance)]
|
||||
)
|
||||
pu.status = "successful"
|
||||
pu.finished = pu.created + timedelta(seconds=1)
|
||||
@@ -464,7 +480,6 @@ def test_generate_dependencies_only_once(job_template_factory):
|
||||
job.status = "pending"
|
||||
job.name = "job_gen_dep"
|
||||
job.save()
|
||||
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
# job starts with dependencies_processed as False
|
||||
assert not job.dependencies_processed
|
||||
@@ -478,10 +493,6 @@ def test_generate_dependencies_only_once(job_template_factory):
|
||||
# Run ._schedule() again, but make sure .generate_dependencies() is not
|
||||
# called with job in the argument list
|
||||
tm = TaskManager()
|
||||
tm.generate_dependencies = mock.MagicMock()
|
||||
tm.generate_dependencies = mock.MagicMock(return_value=[])
|
||||
tm._schedule()
|
||||
|
||||
# .call_args is tuple, (positional_args, kwargs), [0][0] then is
|
||||
# the first positional arg, i.e. the first argument of
|
||||
# .generate_dependencies()
|
||||
assert tm.generate_dependencies.call_args[0][0] == []
|
||||
tm.generate_dependencies.assert_has_calls([mock.call([]), mock.call([])])
|
||||
|
||||
@@ -10,6 +10,8 @@ from awx.main.models.notifications import NotificationTemplate, Notification
|
||||
from awx.main.models.inventory import Inventory, InventorySource
|
||||
from awx.main.models.jobs import JobTemplate
|
||||
|
||||
from django.test.utils import override_settings
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_notification_template_list(get, user, notification_template):
|
||||
@@ -163,7 +165,7 @@ def test_custom_environment_injection(post, user, organization):
|
||||
)
|
||||
assert response.status_code == 201
|
||||
template = NotificationTemplate.objects.get(pk=response.data['id'])
|
||||
with pytest.raises(ConnectionError), mock.patch('django.conf.settings.AWX_TASK_ENV', {'HTTPS_PROXY': '192.168.50.100:1234'}), mock.patch.object(
|
||||
with pytest.raises(ConnectionError), override_settings(AWX_TASK_ENV={'HTTPS_PROXY': '192.168.50.100:1234'}), mock.patch.object(
|
||||
HTTPAdapter, 'send'
|
||||
) as fake_send:
|
||||
|
||||
|
||||
@@ -1,16 +1,10 @@
|
||||
# Python
|
||||
from collections import namedtuple
|
||||
import pytest
|
||||
from unittest import mock
|
||||
import json
|
||||
|
||||
# AWX
|
||||
from awx.api.serializers import (
|
||||
JobDetailSerializer,
|
||||
JobSerializer,
|
||||
JobOptionsSerializer,
|
||||
ProjectUpdateDetailSerializer,
|
||||
)
|
||||
from awx.api.serializers import JobSerializer, JobOptionsSerializer
|
||||
|
||||
from awx.main.models import (
|
||||
Label,
|
||||
@@ -108,7 +102,7 @@ class TestJobOptionsSerializerGetSummaryFields:
|
||||
|
||||
|
||||
class TestJobDetailSerializerGetHostStatusCountFields(object):
|
||||
def test_hosts_are_counted_once(self, job, mocker):
|
||||
def test_hosts_are_counted_once(self):
|
||||
mock_event = JobEvent(
|
||||
**{
|
||||
'event': 'playbook_on_stats',
|
||||
@@ -133,26 +127,11 @@ class TestJobDetailSerializerGetHostStatusCountFields(object):
|
||||
}
|
||||
)
|
||||
|
||||
mock_qs = namedtuple('mock_qs', ['get'])(mocker.MagicMock(return_value=mock_event))
|
||||
only = mocker.MagicMock(return_value=mock_qs)
|
||||
job.get_event_queryset = lambda *args, **kwargs: mocker.MagicMock(only=only)
|
||||
|
||||
serializer = JobDetailSerializer()
|
||||
host_status_counts = serializer.get_host_status_counts(job)
|
||||
|
||||
assert host_status_counts == {'ok': 1, 'changed': 1, 'dark': 2}
|
||||
|
||||
def test_host_status_counts_is_empty_dict_without_stats_event(self, job):
|
||||
job.get_event_queryset = lambda *args, **kwargs: JobEvent.objects.none()
|
||||
|
||||
serializer = JobDetailSerializer()
|
||||
host_status_counts = serializer.get_host_status_counts(job)
|
||||
|
||||
assert host_status_counts == {}
|
||||
assert mock_event.get_host_status_counts() == {'ok': 1, 'changed': 1, 'dark': 2}
|
||||
|
||||
|
||||
class TestProjectUpdateDetailSerializerGetHostStatusCountFields(object):
|
||||
def test_hosts_are_counted_once(self, project_update, mocker):
|
||||
def test_hosts_are_counted_once(self):
|
||||
mock_event = ProjectUpdateEvent(
|
||||
**{
|
||||
'event': 'playbook_on_stats',
|
||||
@@ -177,18 +156,4 @@ class TestProjectUpdateDetailSerializerGetHostStatusCountFields(object):
|
||||
}
|
||||
)
|
||||
|
||||
mock_qs = namedtuple('mock_qs', ['get'])(mocker.MagicMock(return_value=mock_event))
|
||||
project_update.project_update_events.only = mocker.MagicMock(return_value=mock_qs)
|
||||
|
||||
serializer = ProjectUpdateDetailSerializer()
|
||||
host_status_counts = serializer.get_host_status_counts(project_update)
|
||||
|
||||
assert host_status_counts == {'ok': 1, 'changed': 1, 'dark': 2}
|
||||
|
||||
def test_host_status_counts_is_empty_dict_without_stats_event(self, project_update):
|
||||
project_update.project_update_events = ProjectUpdateEvent.objects.none()
|
||||
|
||||
serializer = ProjectUpdateDetailSerializer()
|
||||
host_status_counts = serializer.get_host_status_counts(project_update)
|
||||
|
||||
assert host_status_counts == {}
|
||||
assert mock_event.get_host_status_counts() == {'ok': 1, 'changed': 1, 'dark': 2}
|
||||
|
||||
52
awx/main/tests/unit/tasks/test_runner_callback.py
Normal file
52
awx/main/tests/unit/tasks/test_runner_callback.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from awx.main.tasks.callback import RunnerCallback
|
||||
from awx.main.constants import ANSIBLE_RUNNER_NEEDS_UPDATE_MESSAGE
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
def test_delay_update(mock_me):
|
||||
rc = RunnerCallback()
|
||||
rc.delay_update(foo='bar')
|
||||
assert rc.extra_update_fields == {'foo': 'bar'}
|
||||
rc.delay_update(foo='foobar')
|
||||
assert rc.extra_update_fields == {'foo': 'foobar'}
|
||||
rc.delay_update(bar='foo')
|
||||
assert rc.get_delayed_update_fields() == {'foo': 'foobar', 'bar': 'foo', 'emitted_events': 0}
|
||||
|
||||
|
||||
def test_delay_update_skip_if_set(mock_me):
|
||||
rc = RunnerCallback()
|
||||
rc.delay_update(foo='bar', skip_if_already_set=True)
|
||||
assert rc.extra_update_fields == {'foo': 'bar'}
|
||||
rc.delay_update(foo='foobar', skip_if_already_set=True)
|
||||
assert rc.extra_update_fields == {'foo': 'bar'}
|
||||
|
||||
|
||||
def test_delay_update_failure_fields(mock_me):
|
||||
rc = RunnerCallback()
|
||||
rc.delay_update(job_explanation='1')
|
||||
rc.delay_update(job_explanation=_('2'))
|
||||
assert rc.extra_update_fields == {'job_explanation': '1\n2'}
|
||||
rc.delay_update(result_traceback='1')
|
||||
rc.delay_update(result_traceback=_('2'))
|
||||
rc.delay_update(result_traceback=_('3'), skip_if_already_set=True)
|
||||
assert rc.extra_update_fields == {'job_explanation': '1\n2', 'result_traceback': '1\n2'}
|
||||
|
||||
|
||||
def test_duplicate_updates(mock_me):
|
||||
rc = RunnerCallback()
|
||||
rc.delay_update(job_explanation='really long summary...')
|
||||
rc.delay_update(job_explanation='really long summary...')
|
||||
rc.delay_update(job_explanation='really long summary...')
|
||||
assert rc.extra_update_fields == {'job_explanation': 'really long summary...'}
|
||||
|
||||
|
||||
def test_special_ansible_runner_message(mock_me):
|
||||
rc = RunnerCallback()
|
||||
rc.delay_update(result_traceback='Traceback:\ngot an unexpected keyword argument\nFile: foo.py')
|
||||
rc.delay_update(result_traceback='Traceback:\ngot an unexpected keyword argument\nFile: bar.py')
|
||||
assert rc.get_delayed_update_fields().get('result_traceback') == (
|
||||
'Traceback:\ngot an unexpected keyword argument\nFile: foo.py\n'
|
||||
'Traceback:\ngot an unexpected keyword argument\nFile: bar.py\n'
|
||||
f'{ANSIBLE_RUNNER_NEEDS_UPDATE_MESSAGE}'
|
||||
)
|
||||
@@ -1919,26 +1919,6 @@ def test_managed_injector_redaction(injector_cls):
|
||||
assert 'very_secret_value' not in str(build_safe_env(env))
|
||||
|
||||
|
||||
@mock.patch('logging.getLogger')
|
||||
def test_notification_job_not_finished(logging_getLogger, mocker):
|
||||
uj = mocker.MagicMock()
|
||||
uj.finished = False
|
||||
logger = mocker.Mock()
|
||||
logging_getLogger.return_value = logger
|
||||
|
||||
with mocker.patch('awx.main.models.UnifiedJob.objects.get', uj):
|
||||
system.handle_success_and_failure_notifications(1)
|
||||
assert logger.warning.called_with(f"Failed to even try to send notifications for job '{uj}' due to job not being in finished state.")
|
||||
|
||||
|
||||
def test_notification_job_finished(mocker):
|
||||
uj = mocker.MagicMock(send_notification_templates=mocker.MagicMock(), finished=True)
|
||||
|
||||
with mocker.patch('awx.main.models.UnifiedJob.objects.get', mocker.MagicMock(return_value=uj)):
|
||||
system.handle_success_and_failure_notifications(1)
|
||||
uj.send_notification_templates.assert_called()
|
||||
|
||||
|
||||
def test_job_run_no_ee(mock_me):
|
||||
org = Organization(pk=1)
|
||||
proj = Project(pk=1, organization=org)
|
||||
|
||||
@@ -7,14 +7,17 @@ import time
|
||||
logger = logging.getLogger('awx.main.tasks.utils')
|
||||
|
||||
|
||||
def update_model(model, pk, _attempt=0, _max_attempts=5, **updates):
|
||||
def update_model(model, pk, _attempt=0, _max_attempts=5, select_for_update=False, **updates):
|
||||
"""Reload the model instance from the database and update the
|
||||
given fields.
|
||||
"""
|
||||
try:
|
||||
with transaction.atomic():
|
||||
# Retrieve the model instance.
|
||||
instance = model.objects.get(pk=pk)
|
||||
if select_for_update:
|
||||
instance = model.objects.select_for_update().get(pk=pk)
|
||||
else:
|
||||
instance = model.objects.get(pk=pk)
|
||||
|
||||
# Update the appropriate fields and save the model
|
||||
# instance, then return the new instance.
|
||||
|
||||
@@ -26,7 +26,9 @@
|
||||
tasks:
|
||||
|
||||
- name: delete project directory before update
|
||||
command: "rm -rf {{project_path}}/*" # volume mounted, cannot delete folder itself
|
||||
command: "find -delete" # volume mounted, cannot delete folder itself
|
||||
args:
|
||||
chdir: "{{ project_path }}"
|
||||
tags:
|
||||
- delete
|
||||
|
||||
|
||||
@@ -955,6 +955,7 @@ RECEPTOR_RELEASE_WORK = True
|
||||
|
||||
MIDDLEWARE = [
|
||||
'django_guid.middleware.guid_middleware',
|
||||
'awx.main.middleware.SettingsCacheMiddleware',
|
||||
'awx.main.middleware.TimingMiddleware',
|
||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||
'awx.main.middleware.MigrationRanCheckMiddleware',
|
||||
@@ -997,9 +998,6 @@ BROADCAST_WEBSOCKET_NEW_INSTANCE_POLL_RATE_SECONDS = 10
|
||||
# How often websocket process will generate stats
|
||||
BROADCAST_WEBSOCKET_STATS_POLL_RATE_SECONDS = 5
|
||||
|
||||
# Number of times to retry sending a notification when waiting on a job to finish.
|
||||
AWX_NOTIFICATION_JOB_FINISH_MAX_RETRY = 5
|
||||
|
||||
DJANGO_GUID = {'GUID_HEADER_NAME': 'X-API-Request-Id'}
|
||||
|
||||
# Name of the default task queue
|
||||
|
||||
381
awx/ui/package-lock.json
generated
381
awx/ui/package-lock.json
generated
@@ -6,32 +6,32 @@
|
||||
"": {
|
||||
"name": "ui",
|
||||
"dependencies": {
|
||||
"@lingui/react": "3.9.0",
|
||||
"@patternfly/patternfly": "4.183.1",
|
||||
"@patternfly/react-core": "4.198.19",
|
||||
"@lingui/react": "3.13.3",
|
||||
"@patternfly/patternfly": "4.194.4",
|
||||
"@patternfly/react-core": "^4.201.0",
|
||||
"@patternfly/react-icons": "4.49.19",
|
||||
"@patternfly/react-table": "4.67.19",
|
||||
"ace-builds": "^1.4.12",
|
||||
"@patternfly/react-table": "4.83.1",
|
||||
"ace-builds": "^1.5.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.22.0",
|
||||
"codemirror": "^5.47.0",
|
||||
"d3": "7.1.1",
|
||||
"codemirror": "^5.65.4",
|
||||
"d3": "7.4.4",
|
||||
"dagre": "^0.8.4",
|
||||
"formik": "2.2.9",
|
||||
"has-ansi": "5.0.1",
|
||||
"html-entities": "2.3.2",
|
||||
"js-yaml": "^3.13.1",
|
||||
"luxon": "^2.0.1",
|
||||
"luxon": "^2.4.0",
|
||||
"prop-types": "^15.6.2",
|
||||
"react": "17.0.2",
|
||||
"react-ace": "^9.3.0",
|
||||
"react-dom": "17.0.2",
|
||||
"react-error-boundary": "^3.1.3",
|
||||
"react-error-boundary": "^3.1.4",
|
||||
"react-router-dom": "^5.1.2",
|
||||
"react-virtualized": "^9.21.1",
|
||||
"rrule": "2.6.4",
|
||||
"sanitize-html": "2.4.0",
|
||||
"styled-components": "5.3.0"
|
||||
"styled-components": "5.3.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.16.10",
|
||||
@@ -46,7 +46,7 @@
|
||||
"@lingui/macro": "^3.7.1",
|
||||
"@nteract/mockument": "^1.0.4",
|
||||
"@testing-library/jest-dom": "^5.16.2",
|
||||
"@testing-library/react": "^12.1.4",
|
||||
"@testing-library/react": "^12.1.5",
|
||||
"@wojtekmaj/enzyme-adapter-react-17": "0.6.5",
|
||||
"babel-plugin-macros": "3.1.0",
|
||||
"enzyme": "^3.10.0",
|
||||
@@ -56,14 +56,14 @@
|
||||
"eslint-config-airbnb": "19.0.4",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"eslint-import-resolver-webpack": "0.13.2",
|
||||
"eslint-plugin-i18next": "5.1.2",
|
||||
"eslint-plugin-i18next": "5.2.1",
|
||||
"eslint-plugin-import": "2.25.4",
|
||||
"eslint-plugin-jsx-a11y": "6.5.1",
|
||||
"eslint-plugin-react": "7.28.0",
|
||||
"eslint-plugin-react-hooks": "4.3.0",
|
||||
"http-proxy-middleware": "^1.0.3",
|
||||
"jest-websocket-mock": "^2.0.2",
|
||||
"mock-socket": "^9.0.3",
|
||||
"mock-socket": "^9.1.3",
|
||||
"prettier": "2.3.2",
|
||||
"react-scripts": "5.0.0"
|
||||
},
|
||||
@@ -1956,17 +1956,17 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@emotion/is-prop-valid": {
|
||||
"version": "0.8.8",
|
||||
"resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz",
|
||||
"integrity": "sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==",
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.1.2.tgz",
|
||||
"integrity": "sha512-3QnhqeL+WW88YjYbQL5gUIkthuMw7a0NGbZ7wfFVk2kg/CK5w8w5FFa0RzWjyY1+sujN0NWbtSHH6OJmWHtJpQ==",
|
||||
"dependencies": {
|
||||
"@emotion/memoize": "0.7.4"
|
||||
"@emotion/memoize": "^0.7.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@emotion/memoize": {
|
||||
"version": "0.7.4",
|
||||
"resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.4.tgz",
|
||||
"integrity": "sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw=="
|
||||
"version": "0.7.5",
|
||||
"resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.5.tgz",
|
||||
"integrity": "sha512-igX9a37DR2ZPGYtV6suZ6whr8pTFtyHL3K/oLUotxpSVO2ASaprmAe2Dkq7tBo7CRY7MMDrAa9nuQP9/YG8FxQ=="
|
||||
},
|
||||
"node_modules/@emotion/stylis": {
|
||||
"version": "0.8.5",
|
||||
@@ -3546,9 +3546,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@lingui/core": {
|
||||
"version": "3.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/core/-/core-3.9.0.tgz",
|
||||
"integrity": "sha512-tbOBqou4fPzHB5aivir1fuikFoMYFcvpWk68Pysg34KN0xagPtyj0Dnz3clmq6Vnuf4SaK4LJdXwNUPWXpah7A==",
|
||||
"version": "3.13.3",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/core/-/core-3.13.3.tgz",
|
||||
"integrity": "sha512-3rQDIC7PtPfUuZCSNfU0nziWNMlGk3JhpxENzGrlt1M8w5RHson89Mk1Ce/how+hWzFpumCQDWLDDhyRPpydbg==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.11.2",
|
||||
"make-plural": "^6.2.2",
|
||||
@@ -3589,15 +3589,18 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@lingui/react": {
|
||||
"version": "3.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/react/-/react-3.9.0.tgz",
|
||||
"integrity": "sha512-VG+sQyttrIouhBq0h6aTtzxKO3kKWRjJPiBnaFs3gVNcmodIpzdTA9YqbFusu3+Q/+w6LYP9HuSv5eMM5jQ5Cw==",
|
||||
"version": "3.13.3",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/react/-/react-3.13.3.tgz",
|
||||
"integrity": "sha512-sCCI5xMcUY9b6w2lwbwy6iHpo1Fb9TDcjcHAD2KI5JueLH+WWQG66tIHiVAlSsQ+hmQ9Tt+f86H05JQEiDdIvg==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.11.2",
|
||||
"@lingui/core": "^3.9.0"
|
||||
"@lingui/core": "^3.13.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0 || ^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@nodelib/fs.scandir": {
|
||||
@@ -3642,18 +3645,18 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@patternfly/patternfly": {
|
||||
"version": "4.183.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.183.1.tgz",
|
||||
"integrity": "sha512-XJZIG/kcEbIPI/0Q6+Q5ax2m295IpQCppertUQ4RfOSkvJVfjQ4CUNmR/ycgjlGm1DItmYJe/NqVFerNlvzUeg=="
|
||||
"version": "4.194.4",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.194.4.tgz",
|
||||
"integrity": "sha512-SJxr502v0xXk1N5OiPLunD9pdKvHp5XXJLXcD5lIPrimjjUcy46m48X8YONjDvnC/Y5xV92UI2KxoCVucE34eA=="
|
||||
},
|
||||
"node_modules/@patternfly/react-core": {
|
||||
"version": "4.198.19",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.198.19.tgz",
|
||||
"integrity": "sha512-f46CIKwWCJ1UNL50TXnvarYUhr2KtxNFw/kGYtG6QwrQwKXscZiXMMtW//0Q08cyhLB0vfxHOLbCKxVaVJ3R3w==",
|
||||
"version": "4.214.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.214.1.tgz",
|
||||
"integrity": "sha512-XHEqXpnBEDyLVdAEDOYlGqFHnN43eNLSD5HABB99xO6541JV9MRnbxs0+v9iYnfhcKh/8bhA9ITXnUi3f2PEvg==",
|
||||
"dependencies": {
|
||||
"@patternfly/react-icons": "^4.49.19",
|
||||
"@patternfly/react-styles": "^4.48.19",
|
||||
"@patternfly/react-tokens": "^4.50.19",
|
||||
"@patternfly/react-icons": "^4.65.1",
|
||||
"@patternfly/react-styles": "^4.64.1",
|
||||
"@patternfly/react-tokens": "^4.66.1",
|
||||
"focus-trap": "6.2.2",
|
||||
"react-dropzone": "9.0.0",
|
||||
"tippy.js": "5.1.2",
|
||||
@@ -3664,6 +3667,15 @@
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-core/node_modules/@patternfly/react-icons": {
|
||||
"version": "4.65.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.65.1.tgz",
|
||||
"integrity": "sha512-CUYFRPztFkR7qrXq/0UAhLjeHd8FdjLe4jBjj8tfKc7OXwxDeZczqNFyRMATZpPaduTH7BU2r3OUjQrgAbquWg==",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0",
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-core/node_modules/tslib": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz",
|
||||
@@ -3679,19 +3691,19 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-styles": {
|
||||
"version": "4.48.19",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.48.19.tgz",
|
||||
"integrity": "sha512-8+t8wqYGWkmyhxLty/kQXCY44rnW0y60nUMG7QKNzF1bAFJIpR8jKuVnHArM1h+MI9D53e8OVjKORH83hUAzJw=="
|
||||
"version": "4.64.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.64.1.tgz",
|
||||
"integrity": "sha512-+GxULkP2o5Vpr9w+J4NiGOGzhTfNniYzdPGEF/yC+oDoAXB6Q1HJyQnEj+kJH31xNvwmw3G3VFtwRLX4ZWr0oA=="
|
||||
},
|
||||
"node_modules/@patternfly/react-table": {
|
||||
"version": "4.67.19",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.67.19.tgz",
|
||||
"integrity": "sha512-pAa0tpafLHtICCiM3TDQ89xqQTvkZtRuwJ6+KKSpN1UdEEHy+3j0JjDUcslN+6Lo7stgoLwgWzGmE7bsx4Ys5Q==",
|
||||
"version": "4.83.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.83.1.tgz",
|
||||
"integrity": "sha512-mkq13x9funh+Nh2Uzj2ZQBOacNYc+a60yUAHZMXgNcljCJ3LTQUoYy6EonvYrqwSrpC7vj8nLt8+/XbDNc0Aig==",
|
||||
"dependencies": {
|
||||
"@patternfly/react-core": "^4.198.19",
|
||||
"@patternfly/react-icons": "^4.49.19",
|
||||
"@patternfly/react-styles": "^4.48.19",
|
||||
"@patternfly/react-tokens": "^4.50.19",
|
||||
"@patternfly/react-core": "^4.214.1",
|
||||
"@patternfly/react-icons": "^4.65.1",
|
||||
"@patternfly/react-styles": "^4.64.1",
|
||||
"@patternfly/react-tokens": "^4.66.1",
|
||||
"lodash": "^4.17.19",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
@@ -3700,15 +3712,24 @@
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-table/node_modules/@patternfly/react-icons": {
|
||||
"version": "4.65.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.65.1.tgz",
|
||||
"integrity": "sha512-CUYFRPztFkR7qrXq/0UAhLjeHd8FdjLe4jBjj8tfKc7OXwxDeZczqNFyRMATZpPaduTH7BU2r3OUjQrgAbquWg==",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0",
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-table/node_modules/tslib": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz",
|
||||
"integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw=="
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz",
|
||||
"integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ=="
|
||||
},
|
||||
"node_modules/@patternfly/react-tokens": {
|
||||
"version": "4.50.19",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.50.19.tgz",
|
||||
"integrity": "sha512-wbUPb8welJ8p+OjXrc0X3UYDj5JjN9xnfpYkZdAySpcFtk0BAn5Py6UEZCjKtw7XHHfCQ1zwKXpXDShcu/5KVQ=="
|
||||
"version": "4.66.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.66.1.tgz",
|
||||
"integrity": "sha512-k0IWqpufM6ezT+3gWlEamqQ7LW9yi8e8cBBlude5IU8eIEqIG6AccwR1WNBEK1wCVWGwVxakLMdf0XBLl4k52Q=="
|
||||
},
|
||||
"node_modules/@pmmmwh/react-refresh-webpack-plugin": {
|
||||
"version": "0.5.4",
|
||||
@@ -4375,21 +4396,21 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@testing-library/react": {
|
||||
"version": "12.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/react/-/react-12.1.4.tgz",
|
||||
"integrity": "sha512-jiPKOm7vyUw311Hn/HlNQ9P8/lHNtArAx0PisXyFixDDvfl8DbD6EUdbshK5eqauvBSvzZd19itqQ9j3nferJA==",
|
||||
"version": "12.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/react/-/react-12.1.5.tgz",
|
||||
"integrity": "sha512-OfTXCJUFgjd/digLUuPxa0+/3ZxsQmE7ub9kcbW/wi96Bh3o/p5vrETcBGfP17NWPGqeYYl5LTRpwyGoMC4ysg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@testing-library/dom": "^8.0.0",
|
||||
"@types/react-dom": "*"
|
||||
"@types/react-dom": "<18.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "*",
|
||||
"react-dom": "*"
|
||||
"react": "<18.0.0",
|
||||
"react-dom": "<18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@tootallnate/once": {
|
||||
@@ -5145,9 +5166,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/ace-builds": {
|
||||
"version": "1.4.12",
|
||||
"resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.4.12.tgz",
|
||||
"integrity": "sha512-G+chJctFPiiLGvs3+/Mly3apXTcfgE45dT5yp12BcWZ1kUs+gm0qd3/fv4gsz6fVag4mM0moHVpjHDIgph6Psg=="
|
||||
"version": "1.5.1",
|
||||
"resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.5.1.tgz",
|
||||
"integrity": "sha512-2G313uyM7lfqZgCs6xCW4QPeuX2GZKaCyRqKhTC2mBeZqC7TjkTXguKRyLzsAIMLJfj3koq98RXCBoemoZVAnQ=="
|
||||
},
|
||||
"node_modules/acorn": {
|
||||
"version": "7.4.1",
|
||||
@@ -6581,9 +6602,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/codemirror": {
|
||||
"version": "5.61.0",
|
||||
"resolved": "https://registry.npmjs.org/codemirror/-/codemirror-5.61.0.tgz",
|
||||
"integrity": "sha512-D3wYH90tYY1BsKlUe0oNj2JAhQ9TepkD51auk3N7q+4uz7A/cgJ5JsWHreT0PqieW1QhOuqxQ2reCXV1YXzecg=="
|
||||
"version": "5.65.4",
|
||||
"resolved": "https://registry.npmjs.org/codemirror/-/codemirror-5.65.4.tgz",
|
||||
"integrity": "sha512-tytrSm5Rh52b6j36cbDXN+FHwHCl9aroY4BrDZB2NFFL3Wjfq9nuYVLFFhaOYOczKAg3JXTr8BuT8LcE5QY4Iw=="
|
||||
},
|
||||
"node_modules/collect-v8-coverage": {
|
||||
"version": "1.0.1",
|
||||
@@ -7317,9 +7338,9 @@
|
||||
"integrity": "sha512-jXKhWqXPmlUeoQnF/EhTtTl4C9SnrxSH/jZUih3jmO6lBKr99rP3/+FmrMj4EFpOXzMtXHAZkd3x0E6h6Fgflw=="
|
||||
},
|
||||
"node_modules/d3": {
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/d3/-/d3-7.1.1.tgz",
|
||||
"integrity": "sha512-8zkLMwSvUAnfN9pcJDfkuxU0Nvg4RLUD0A4BZN1KxJPtlnCGzMx3xM5cRl4m8fym/Vy8rlq52tl90UF3m91OnA==",
|
||||
"version": "7.4.4",
|
||||
"resolved": "https://registry.npmjs.org/d3/-/d3-7.4.4.tgz",
|
||||
"integrity": "sha512-97FE+MYdAlV3R9P74+R3Uar7wUKkIFu89UWMjEaDhiJ9VxKvqaMxauImy8PC2DdBkdM2BxJOIoLxPrcZUyrKoQ==",
|
||||
"dependencies": {
|
||||
"d3-array": "3",
|
||||
"d3-axis": "3",
|
||||
@@ -8867,9 +8888,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-plugin-i18next": {
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-i18next/-/eslint-plugin-i18next-5.1.2.tgz",
|
||||
"integrity": "sha512-YuJWaio8BE7eoWE2V3UnddwJhf3XNQ2tb7XAKZhbkeA+BWzm33ujOv6Ezm98Wjc8VCyT9NJvDyvs5/a9AG4QpQ==",
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-i18next/-/eslint-plugin-i18next-5.2.1.tgz",
|
||||
"integrity": "sha512-yXlWOMiyWz9aCGVrLeFijt+LsCXZj9QoddYXmxUeFZrqst4Z2j6vAMBn2iSE2JTNbPDyrdGl3H03UCo+CbdKbQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"requireindex": "~1.1.0"
|
||||
@@ -15246,11 +15267,11 @@
|
||||
}
|
||||
},
|
||||
"node_modules/luxon": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-2.0.1.tgz",
|
||||
"integrity": "sha512-8Eawf81c9ZlQj62W3eq4mp+C7SAIAnmaS7ZuEAiX503YMcn+0C1JnMQRtfaQj6B5qTZLgHv0F4H5WabBCvi1fw==",
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-2.4.0.tgz",
|
||||
"integrity": "sha512-w+NAwWOUL5hO0SgwOHsMBAmZ15SoknmQXhSO0hIbJCAmPKSsGeK8MlmhYh2w6Iib38IxN2M+/ooXWLbeis7GuA==",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/lz-string": {
|
||||
@@ -15556,13 +15577,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/mock-socket": {
|
||||
"version": "9.0.3",
|
||||
"resolved": "https://registry.npmjs.org/mock-socket/-/mock-socket-9.0.3.tgz",
|
||||
"integrity": "sha512-SxIiD2yE/By79p3cNAAXyLQWTvEFNEzcAO7PH+DzRqKSFaplAPFjiQLmw8ofmpCsZf+Rhfn2/xCJagpdGmYdTw==",
|
||||
"version": "9.1.3",
|
||||
"resolved": "https://registry.npmjs.org/mock-socket/-/mock-socket-9.1.3.tgz",
|
||||
"integrity": "sha512-uz8lx8c5wuJYJ21f5UtovqpV0+KJuVwE7cVOLNhrl2QW/CvmstOLRfjXnLSbfFHZtJtiaSGQu0oCJA8SmRcK6A==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"url-parse": "^1.4.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
@@ -17727,12 +17745,6 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/querystringify": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
|
||||
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/queue-microtask": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
@@ -18214,15 +18226,18 @@
|
||||
}
|
||||
},
|
||||
"node_modules/react-error-boundary": {
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmjs.org/react-error-boundary/-/react-error-boundary-3.1.3.tgz",
|
||||
"integrity": "sha512-A+F9HHy9fvt9t8SNDlonq01prnU8AmkjvGKV4kk8seB9kU3xMEO8J/PQlLVmoOIDODl5U2kufSBs4vrWIqhsAA==",
|
||||
"version": "3.1.4",
|
||||
"resolved": "https://registry.npmjs.org/react-error-boundary/-/react-error-boundary-3.1.4.tgz",
|
||||
"integrity": "sha512-uM9uPzZJTF6wRQORmSrvOIgt4lJ9MC1sNgEOj2XGsDTRE4kmpWxg7ENK9EWNKJRMAOY9z0MuF4yIfl6gp4sotA==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.12.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10",
|
||||
"npm": ">=6"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": ">=16.13.1"
|
||||
}
|
||||
},
|
||||
"node_modules/react-error-overlay": {
|
||||
@@ -20020,13 +20035,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/styled-components": {
|
||||
"version": "5.3.0",
|
||||
"resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.0.tgz",
|
||||
"integrity": "sha512-bPJKwZCHjJPf/hwTJl6TbkSZg/3evha+XPEizrZUGb535jLImwDUdjTNxXqjjaASt2M4qO4AVfoHJNe3XB/tpQ==",
|
||||
"version": "5.3.5",
|
||||
"resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.5.tgz",
|
||||
"integrity": "sha512-ndETJ9RKaaL6q41B69WudeqLzOpY1A/ET/glXkNZ2T7dPjPqpPCXXQjDFYZWwNnE5co0wX+gTCqx9mfxTmSIPg==",
|
||||
"hasInstallScript": true,
|
||||
"dependencies": {
|
||||
"@babel/helper-module-imports": "^7.0.0",
|
||||
"@babel/traverse": "^7.4.5",
|
||||
"@emotion/is-prop-valid": "^0.8.8",
|
||||
"@emotion/is-prop-valid": "^1.1.0",
|
||||
"@emotion/stylis": "^0.8.4",
|
||||
"@emotion/unitless": "^0.7.4",
|
||||
"babel-plugin-styled-components": ">= 1.12.0",
|
||||
@@ -20969,16 +20985,6 @@
|
||||
"punycode": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/url-parse": {
|
||||
"version": "1.5.9",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.9.tgz",
|
||||
"integrity": "sha512-HpOvhKBvre8wYez+QhHcYiVvVmeF6DVnuSOOPhe3cTum3BnqHhvKaZm8FU5yTiOu/Jut2ZpB2rA/SbBA1JIGlQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"querystringify": "^2.1.1",
|
||||
"requires-port": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/util-deprecate": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||
@@ -23458,17 +23464,17 @@
|
||||
}
|
||||
},
|
||||
"@emotion/is-prop-valid": {
|
||||
"version": "0.8.8",
|
||||
"resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz",
|
||||
"integrity": "sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==",
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.1.2.tgz",
|
||||
"integrity": "sha512-3QnhqeL+WW88YjYbQL5gUIkthuMw7a0NGbZ7wfFVk2kg/CK5w8w5FFa0RzWjyY1+sujN0NWbtSHH6OJmWHtJpQ==",
|
||||
"requires": {
|
||||
"@emotion/memoize": "0.7.4"
|
||||
"@emotion/memoize": "^0.7.4"
|
||||
}
|
||||
},
|
||||
"@emotion/memoize": {
|
||||
"version": "0.7.4",
|
||||
"resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.4.tgz",
|
||||
"integrity": "sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw=="
|
||||
"version": "0.7.5",
|
||||
"resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.5.tgz",
|
||||
"integrity": "sha512-igX9a37DR2ZPGYtV6suZ6whr8pTFtyHL3K/oLUotxpSVO2ASaprmAe2Dkq7tBo7CRY7MMDrAa9nuQP9/YG8FxQ=="
|
||||
},
|
||||
"@emotion/stylis": {
|
||||
"version": "0.8.5",
|
||||
@@ -24705,9 +24711,9 @@
|
||||
}
|
||||
},
|
||||
"@lingui/core": {
|
||||
"version": "3.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/core/-/core-3.9.0.tgz",
|
||||
"integrity": "sha512-tbOBqou4fPzHB5aivir1fuikFoMYFcvpWk68Pysg34KN0xagPtyj0Dnz3clmq6Vnuf4SaK4LJdXwNUPWXpah7A==",
|
||||
"version": "3.13.3",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/core/-/core-3.13.3.tgz",
|
||||
"integrity": "sha512-3rQDIC7PtPfUuZCSNfU0nziWNMlGk3JhpxENzGrlt1M8w5RHson89Mk1Ce/how+hWzFpumCQDWLDDhyRPpydbg==",
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.11.2",
|
||||
"make-plural": "^6.2.2",
|
||||
@@ -24739,12 +24745,12 @@
|
||||
}
|
||||
},
|
||||
"@lingui/react": {
|
||||
"version": "3.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/react/-/react-3.9.0.tgz",
|
||||
"integrity": "sha512-VG+sQyttrIouhBq0h6aTtzxKO3kKWRjJPiBnaFs3gVNcmodIpzdTA9YqbFusu3+Q/+w6LYP9HuSv5eMM5jQ5Cw==",
|
||||
"version": "3.13.3",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/react/-/react-3.13.3.tgz",
|
||||
"integrity": "sha512-sCCI5xMcUY9b6w2lwbwy6iHpo1Fb9TDcjcHAD2KI5JueLH+WWQG66tIHiVAlSsQ+hmQ9Tt+f86H05JQEiDdIvg==",
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.11.2",
|
||||
"@lingui/core": "^3.9.0"
|
||||
"@lingui/core": "^3.13.3"
|
||||
}
|
||||
},
|
||||
"@nodelib/fs.scandir": {
|
||||
@@ -24780,24 +24786,30 @@
|
||||
"dev": true
|
||||
},
|
||||
"@patternfly/patternfly": {
|
||||
"version": "4.183.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.183.1.tgz",
|
||||
"integrity": "sha512-XJZIG/kcEbIPI/0Q6+Q5ax2m295IpQCppertUQ4RfOSkvJVfjQ4CUNmR/ycgjlGm1DItmYJe/NqVFerNlvzUeg=="
|
||||
"version": "4.194.4",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.194.4.tgz",
|
||||
"integrity": "sha512-SJxr502v0xXk1N5OiPLunD9pdKvHp5XXJLXcD5lIPrimjjUcy46m48X8YONjDvnC/Y5xV92UI2KxoCVucE34eA=="
|
||||
},
|
||||
"@patternfly/react-core": {
|
||||
"version": "4.198.19",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.198.19.tgz",
|
||||
"integrity": "sha512-f46CIKwWCJ1UNL50TXnvarYUhr2KtxNFw/kGYtG6QwrQwKXscZiXMMtW//0Q08cyhLB0vfxHOLbCKxVaVJ3R3w==",
|
||||
"version": "4.214.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.214.1.tgz",
|
||||
"integrity": "sha512-XHEqXpnBEDyLVdAEDOYlGqFHnN43eNLSD5HABB99xO6541JV9MRnbxs0+v9iYnfhcKh/8bhA9ITXnUi3f2PEvg==",
|
||||
"requires": {
|
||||
"@patternfly/react-icons": "^4.49.19",
|
||||
"@patternfly/react-styles": "^4.48.19",
|
||||
"@patternfly/react-tokens": "^4.50.19",
|
||||
"@patternfly/react-icons": "^4.65.1",
|
||||
"@patternfly/react-styles": "^4.64.1",
|
||||
"@patternfly/react-tokens": "^4.66.1",
|
||||
"focus-trap": "6.2.2",
|
||||
"react-dropzone": "9.0.0",
|
||||
"tippy.js": "5.1.2",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@patternfly/react-icons": {
|
||||
"version": "4.65.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.65.1.tgz",
|
||||
"integrity": "sha512-CUYFRPztFkR7qrXq/0UAhLjeHd8FdjLe4jBjj8tfKc7OXwxDeZczqNFyRMATZpPaduTH7BU2r3OUjQrgAbquWg==",
|
||||
"requires": {}
|
||||
},
|
||||
"tslib": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz",
|
||||
@@ -24812,34 +24824,40 @@
|
||||
"requires": {}
|
||||
},
|
||||
"@patternfly/react-styles": {
|
||||
"version": "4.48.19",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.48.19.tgz",
|
||||
"integrity": "sha512-8+t8wqYGWkmyhxLty/kQXCY44rnW0y60nUMG7QKNzF1bAFJIpR8jKuVnHArM1h+MI9D53e8OVjKORH83hUAzJw=="
|
||||
"version": "4.64.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.64.1.tgz",
|
||||
"integrity": "sha512-+GxULkP2o5Vpr9w+J4NiGOGzhTfNniYzdPGEF/yC+oDoAXB6Q1HJyQnEj+kJH31xNvwmw3G3VFtwRLX4ZWr0oA=="
|
||||
},
|
||||
"@patternfly/react-table": {
|
||||
"version": "4.67.19",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.67.19.tgz",
|
||||
"integrity": "sha512-pAa0tpafLHtICCiM3TDQ89xqQTvkZtRuwJ6+KKSpN1UdEEHy+3j0JjDUcslN+6Lo7stgoLwgWzGmE7bsx4Ys5Q==",
|
||||
"version": "4.83.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.83.1.tgz",
|
||||
"integrity": "sha512-mkq13x9funh+Nh2Uzj2ZQBOacNYc+a60yUAHZMXgNcljCJ3LTQUoYy6EonvYrqwSrpC7vj8nLt8+/XbDNc0Aig==",
|
||||
"requires": {
|
||||
"@patternfly/react-core": "^4.198.19",
|
||||
"@patternfly/react-icons": "^4.49.19",
|
||||
"@patternfly/react-styles": "^4.48.19",
|
||||
"@patternfly/react-tokens": "^4.50.19",
|
||||
"@patternfly/react-core": "^4.214.1",
|
||||
"@patternfly/react-icons": "^4.65.1",
|
||||
"@patternfly/react-styles": "^4.64.1",
|
||||
"@patternfly/react-tokens": "^4.66.1",
|
||||
"lodash": "^4.17.19",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@patternfly/react-icons": {
|
||||
"version": "4.65.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.65.1.tgz",
|
||||
"integrity": "sha512-CUYFRPztFkR7qrXq/0UAhLjeHd8FdjLe4jBjj8tfKc7OXwxDeZczqNFyRMATZpPaduTH7BU2r3OUjQrgAbquWg==",
|
||||
"requires": {}
|
||||
},
|
||||
"tslib": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz",
|
||||
"integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw=="
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz",
|
||||
"integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"@patternfly/react-tokens": {
|
||||
"version": "4.50.19",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.50.19.tgz",
|
||||
"integrity": "sha512-wbUPb8welJ8p+OjXrc0X3UYDj5JjN9xnfpYkZdAySpcFtk0BAn5Py6UEZCjKtw7XHHfCQ1zwKXpXDShcu/5KVQ=="
|
||||
"version": "4.66.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.66.1.tgz",
|
||||
"integrity": "sha512-k0IWqpufM6ezT+3gWlEamqQ7LW9yi8e8cBBlude5IU8eIEqIG6AccwR1WNBEK1wCVWGwVxakLMdf0XBLl4k52Q=="
|
||||
},
|
||||
"@pmmmwh/react-refresh-webpack-plugin": {
|
||||
"version": "0.5.4",
|
||||
@@ -25318,14 +25336,14 @@
|
||||
}
|
||||
},
|
||||
"@testing-library/react": {
|
||||
"version": "12.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/react/-/react-12.1.4.tgz",
|
||||
"integrity": "sha512-jiPKOm7vyUw311Hn/HlNQ9P8/lHNtArAx0PisXyFixDDvfl8DbD6EUdbshK5eqauvBSvzZd19itqQ9j3nferJA==",
|
||||
"version": "12.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/react/-/react-12.1.5.tgz",
|
||||
"integrity": "sha512-OfTXCJUFgjd/digLUuPxa0+/3ZxsQmE7ub9kcbW/wi96Bh3o/p5vrETcBGfP17NWPGqeYYl5LTRpwyGoMC4ysg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@testing-library/dom": "^8.0.0",
|
||||
"@types/react-dom": "*"
|
||||
"@types/react-dom": "<18.0.0"
|
||||
}
|
||||
},
|
||||
"@tootallnate/once": {
|
||||
@@ -26018,9 +26036,9 @@
|
||||
}
|
||||
},
|
||||
"ace-builds": {
|
||||
"version": "1.4.12",
|
||||
"resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.4.12.tgz",
|
||||
"integrity": "sha512-G+chJctFPiiLGvs3+/Mly3apXTcfgE45dT5yp12BcWZ1kUs+gm0qd3/fv4gsz6fVag4mM0moHVpjHDIgph6Psg=="
|
||||
"version": "1.5.1",
|
||||
"resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.5.1.tgz",
|
||||
"integrity": "sha512-2G313uyM7lfqZgCs6xCW4QPeuX2GZKaCyRqKhTC2mBeZqC7TjkTXguKRyLzsAIMLJfj3koq98RXCBoemoZVAnQ=="
|
||||
},
|
||||
"acorn": {
|
||||
"version": "7.4.1",
|
||||
@@ -27160,9 +27178,9 @@
|
||||
}
|
||||
},
|
||||
"codemirror": {
|
||||
"version": "5.61.0",
|
||||
"resolved": "https://registry.npmjs.org/codemirror/-/codemirror-5.61.0.tgz",
|
||||
"integrity": "sha512-D3wYH90tYY1BsKlUe0oNj2JAhQ9TepkD51auk3N7q+4uz7A/cgJ5JsWHreT0PqieW1QhOuqxQ2reCXV1YXzecg=="
|
||||
"version": "5.65.4",
|
||||
"resolved": "https://registry.npmjs.org/codemirror/-/codemirror-5.65.4.tgz",
|
||||
"integrity": "sha512-tytrSm5Rh52b6j36cbDXN+FHwHCl9aroY4BrDZB2NFFL3Wjfq9nuYVLFFhaOYOczKAg3JXTr8BuT8LcE5QY4Iw=="
|
||||
},
|
||||
"collect-v8-coverage": {
|
||||
"version": "1.0.1",
|
||||
@@ -27714,9 +27732,9 @@
|
||||
"integrity": "sha512-jXKhWqXPmlUeoQnF/EhTtTl4C9SnrxSH/jZUih3jmO6lBKr99rP3/+FmrMj4EFpOXzMtXHAZkd3x0E6h6Fgflw=="
|
||||
},
|
||||
"d3": {
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/d3/-/d3-7.1.1.tgz",
|
||||
"integrity": "sha512-8zkLMwSvUAnfN9pcJDfkuxU0Nvg4RLUD0A4BZN1KxJPtlnCGzMx3xM5cRl4m8fym/Vy8rlq52tl90UF3m91OnA==",
|
||||
"version": "7.4.4",
|
||||
"resolved": "https://registry.npmjs.org/d3/-/d3-7.4.4.tgz",
|
||||
"integrity": "sha512-97FE+MYdAlV3R9P74+R3Uar7wUKkIFu89UWMjEaDhiJ9VxKvqaMxauImy8PC2DdBkdM2BxJOIoLxPrcZUyrKoQ==",
|
||||
"requires": {
|
||||
"d3-array": "3",
|
||||
"d3-axis": "3",
|
||||
@@ -29043,9 +29061,9 @@
|
||||
}
|
||||
},
|
||||
"eslint-plugin-i18next": {
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-i18next/-/eslint-plugin-i18next-5.1.2.tgz",
|
||||
"integrity": "sha512-YuJWaio8BE7eoWE2V3UnddwJhf3XNQ2tb7XAKZhbkeA+BWzm33ujOv6Ezm98Wjc8VCyT9NJvDyvs5/a9AG4QpQ==",
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-i18next/-/eslint-plugin-i18next-5.2.1.tgz",
|
||||
"integrity": "sha512-yXlWOMiyWz9aCGVrLeFijt+LsCXZj9QoddYXmxUeFZrqst4Z2j6vAMBn2iSE2JTNbPDyrdGl3H03UCo+CbdKbQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"requireindex": "~1.1.0"
|
||||
@@ -33813,9 +33831,9 @@
|
||||
}
|
||||
},
|
||||
"luxon": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-2.0.1.tgz",
|
||||
"integrity": "sha512-8Eawf81c9ZlQj62W3eq4mp+C7SAIAnmaS7ZuEAiX503YMcn+0C1JnMQRtfaQj6B5qTZLgHv0F4H5WabBCvi1fw=="
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-2.4.0.tgz",
|
||||
"integrity": "sha512-w+NAwWOUL5hO0SgwOHsMBAmZ15SoknmQXhSO0hIbJCAmPKSsGeK8MlmhYh2w6Iib38IxN2M+/ooXWLbeis7GuA=="
|
||||
},
|
||||
"lz-string": {
|
||||
"version": "1.4.4",
|
||||
@@ -34047,13 +34065,10 @@
|
||||
"dev": true
|
||||
},
|
||||
"mock-socket": {
|
||||
"version": "9.0.3",
|
||||
"resolved": "https://registry.npmjs.org/mock-socket/-/mock-socket-9.0.3.tgz",
|
||||
"integrity": "sha512-SxIiD2yE/By79p3cNAAXyLQWTvEFNEzcAO7PH+DzRqKSFaplAPFjiQLmw8ofmpCsZf+Rhfn2/xCJagpdGmYdTw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"url-parse": "^1.4.4"
|
||||
}
|
||||
"version": "9.1.3",
|
||||
"resolved": "https://registry.npmjs.org/mock-socket/-/mock-socket-9.1.3.tgz",
|
||||
"integrity": "sha512-uz8lx8c5wuJYJ21f5UtovqpV0+KJuVwE7cVOLNhrl2QW/CvmstOLRfjXnLSbfFHZtJtiaSGQu0oCJA8SmRcK6A==",
|
||||
"dev": true
|
||||
},
|
||||
"moo": {
|
||||
"version": "0.5.1",
|
||||
@@ -35602,12 +35617,6 @@
|
||||
"integrity": "sha512-TIRk4aqYLNoJUbd+g2lEdz5kLWIuTMRagAXxl78Q0RiVjAOugHmeKNGdd3cwo/ktpf9aL9epCfFqWDEKysUlLQ==",
|
||||
"dev": true
|
||||
},
|
||||
"querystringify": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
|
||||
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==",
|
||||
"dev": true
|
||||
},
|
||||
"queue-microtask": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
@@ -35949,9 +35958,9 @@
|
||||
}
|
||||
},
|
||||
"react-error-boundary": {
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmjs.org/react-error-boundary/-/react-error-boundary-3.1.3.tgz",
|
||||
"integrity": "sha512-A+F9HHy9fvt9t8SNDlonq01prnU8AmkjvGKV4kk8seB9kU3xMEO8J/PQlLVmoOIDODl5U2kufSBs4vrWIqhsAA==",
|
||||
"version": "3.1.4",
|
||||
"resolved": "https://registry.npmjs.org/react-error-boundary/-/react-error-boundary-3.1.4.tgz",
|
||||
"integrity": "sha512-uM9uPzZJTF6wRQORmSrvOIgt4lJ9MC1sNgEOj2XGsDTRE4kmpWxg7ENK9EWNKJRMAOY9z0MuF4yIfl6gp4sotA==",
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.12.5"
|
||||
}
|
||||
@@ -37312,13 +37321,13 @@
|
||||
"requires": {}
|
||||
},
|
||||
"styled-components": {
|
||||
"version": "5.3.0",
|
||||
"resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.0.tgz",
|
||||
"integrity": "sha512-bPJKwZCHjJPf/hwTJl6TbkSZg/3evha+XPEizrZUGb535jLImwDUdjTNxXqjjaASt2M4qO4AVfoHJNe3XB/tpQ==",
|
||||
"version": "5.3.5",
|
||||
"resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.5.tgz",
|
||||
"integrity": "sha512-ndETJ9RKaaL6q41B69WudeqLzOpY1A/ET/glXkNZ2T7dPjPqpPCXXQjDFYZWwNnE5co0wX+gTCqx9mfxTmSIPg==",
|
||||
"requires": {
|
||||
"@babel/helper-module-imports": "^7.0.0",
|
||||
"@babel/traverse": "^7.4.5",
|
||||
"@emotion/is-prop-valid": "^0.8.8",
|
||||
"@emotion/is-prop-valid": "^1.1.0",
|
||||
"@emotion/stylis": "^0.8.4",
|
||||
"@emotion/unitless": "^0.7.4",
|
||||
"babel-plugin-styled-components": ">= 1.12.0",
|
||||
@@ -38024,16 +38033,6 @@
|
||||
"punycode": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"url-parse": {
|
||||
"version": "1.5.9",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.9.tgz",
|
||||
"integrity": "sha512-HpOvhKBvre8wYez+QhHcYiVvVmeF6DVnuSOOPhe3cTum3BnqHhvKaZm8FU5yTiOu/Jut2ZpB2rA/SbBA1JIGlQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"querystringify": "^2.1.1",
|
||||
"requires-port": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"util-deprecate": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||
|
||||
@@ -6,32 +6,32 @@
|
||||
"node": ">=16.13.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@lingui/react": "3.9.0",
|
||||
"@patternfly/patternfly": "4.183.1",
|
||||
"@patternfly/react-core": "4.198.19",
|
||||
"@lingui/react": "3.13.3",
|
||||
"@patternfly/patternfly": "4.194.4",
|
||||
"@patternfly/react-core": "^4.201.0",
|
||||
"@patternfly/react-icons": "4.49.19",
|
||||
"@patternfly/react-table": "4.67.19",
|
||||
"ace-builds": "^1.4.12",
|
||||
"@patternfly/react-table": "4.83.1",
|
||||
"ace-builds": "^1.5.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.22.0",
|
||||
"codemirror": "^5.47.0",
|
||||
"d3": "7.1.1",
|
||||
"codemirror": "^5.65.4",
|
||||
"d3": "7.4.4",
|
||||
"dagre": "^0.8.4",
|
||||
"formik": "2.2.9",
|
||||
"has-ansi": "5.0.1",
|
||||
"html-entities": "2.3.2",
|
||||
"js-yaml": "^3.13.1",
|
||||
"luxon": "^2.0.1",
|
||||
"luxon": "^2.4.0",
|
||||
"prop-types": "^15.6.2",
|
||||
"react": "17.0.2",
|
||||
"react-ace": "^9.3.0",
|
||||
"react-dom": "17.0.2",
|
||||
"react-error-boundary": "^3.1.3",
|
||||
"react-error-boundary": "^3.1.4",
|
||||
"react-router-dom": "^5.1.2",
|
||||
"react-virtualized": "^9.21.1",
|
||||
"rrule": "2.6.4",
|
||||
"sanitize-html": "2.4.0",
|
||||
"styled-components": "5.3.0"
|
||||
"styled-components": "5.3.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.16.10",
|
||||
@@ -46,7 +46,7 @@
|
||||
"@lingui/macro": "^3.7.1",
|
||||
"@nteract/mockument": "^1.0.4",
|
||||
"@testing-library/jest-dom": "^5.16.2",
|
||||
"@testing-library/react": "^12.1.4",
|
||||
"@testing-library/react": "^12.1.5",
|
||||
"@wojtekmaj/enzyme-adapter-react-17": "0.6.5",
|
||||
"babel-plugin-macros": "3.1.0",
|
||||
"enzyme": "^3.10.0",
|
||||
@@ -56,14 +56,14 @@
|
||||
"eslint-config-airbnb": "19.0.4",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"eslint-import-resolver-webpack": "0.13.2",
|
||||
"eslint-plugin-i18next": "5.1.2",
|
||||
"eslint-plugin-i18next": "5.2.1",
|
||||
"eslint-plugin-import": "2.25.4",
|
||||
"eslint-plugin-jsx-a11y": "6.5.1",
|
||||
"eslint-plugin-react": "7.28.0",
|
||||
"eslint-plugin-react-hooks": "4.3.0",
|
||||
"http-proxy-middleware": "^1.0.3",
|
||||
"jest-websocket-mock": "^2.0.2",
|
||||
"mock-socket": "^9.0.3",
|
||||
"mock-socket": "^9.1.3",
|
||||
"prettier": "2.3.2",
|
||||
"react-scripts": "5.0.0"
|
||||
},
|
||||
|
||||
@@ -368,7 +368,7 @@ describe('<AdHocCommandsWizard/>', () => {
|
||||
response: {
|
||||
config: {
|
||||
method: 'get',
|
||||
url: '/api/v2/credentals',
|
||||
url: '/api/v2/credentials',
|
||||
},
|
||||
data: 'An error occurred',
|
||||
status: 403,
|
||||
|
||||
@@ -3,6 +3,7 @@ import PropTypes from 'prop-types';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { t } from '@lingui/macro';
|
||||
import { TeamsAPI, UsersAPI } from 'api';
|
||||
import useSelected from 'hooks/useSelected';
|
||||
import SelectableCard from '../SelectableCard';
|
||||
import Wizard from '../Wizard';
|
||||
import SelectResourceStep from './SelectResourceStep';
|
||||
@@ -71,51 +72,31 @@ const teamSortColumns = [
|
||||
function AddResourceRole({ onSave, onClose, roles, resource, onError }) {
|
||||
const history = useHistory();
|
||||
|
||||
const [selectedResource, setSelectedResource] = useState(null);
|
||||
const [selectedResourceRows, setSelectedResourceRows] = useState([]);
|
||||
const [selectedRoleRows, setSelectedRoleRows] = useState([]);
|
||||
const {
|
||||
selected: resourcesSelected,
|
||||
handleSelect: handleResourceSelect,
|
||||
clearSelected: clearResources,
|
||||
} = useSelected([]);
|
||||
const {
|
||||
selected: rolesSelected,
|
||||
handleSelect: handleRoleSelect,
|
||||
clearSelected: clearRoles,
|
||||
} = useSelected([]);
|
||||
|
||||
const [resourceType, setResourceType] = useState(null);
|
||||
const [currentStepId, setCurrentStepId] = useState(1);
|
||||
const [maxEnabledStep, setMaxEnabledStep] = useState(1);
|
||||
|
||||
const handleResourceCheckboxClick = (user) => {
|
||||
const selectedIndex = selectedResourceRows.findIndex(
|
||||
(selectedRow) => selectedRow.id === user.id
|
||||
);
|
||||
if (selectedIndex > -1) {
|
||||
selectedResourceRows.splice(selectedIndex, 1);
|
||||
if (selectedResourceRows.length === 0) {
|
||||
setMaxEnabledStep(currentStepId);
|
||||
}
|
||||
setSelectedRoleRows(selectedResourceRows);
|
||||
} else {
|
||||
setSelectedResourceRows([...selectedResourceRows, user]);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (currentStepId === 1 && maxEnabledStep > 1) {
|
||||
history.push(history.location.pathname);
|
||||
}
|
||||
}, [currentStepId, history, maxEnabledStep]);
|
||||
|
||||
const handleRoleCheckboxClick = (role) => {
|
||||
const selectedIndex = selectedRoleRows.findIndex(
|
||||
(selectedRow) => selectedRow.id === role.id
|
||||
);
|
||||
|
||||
if (selectedIndex > -1) {
|
||||
setSelectedRoleRows(
|
||||
selectedRoleRows.filter((r, index) => index !== selectedIndex)
|
||||
);
|
||||
} else {
|
||||
setSelectedRoleRows([...selectedRoleRows, role]);
|
||||
}
|
||||
};
|
||||
|
||||
const handleResourceSelect = (resourceType) => {
|
||||
setSelectedResource(resourceType);
|
||||
setSelectedResourceRows([]);
|
||||
setSelectedRoleRows([]);
|
||||
const handleResourceTypeSelect = (type) => {
|
||||
setResourceType(type);
|
||||
clearResources();
|
||||
clearRoles();
|
||||
};
|
||||
|
||||
const handleWizardNext = (step) => {
|
||||
@@ -131,20 +112,20 @@ function AddResourceRole({ onSave, onClose, roles, resource, onError }) {
|
||||
try {
|
||||
const roleRequests = [];
|
||||
|
||||
for (let i = 0; i < selectedResourceRows.length; i++) {
|
||||
for (let j = 0; j < selectedRoleRows.length; j++) {
|
||||
if (selectedResource === 'users') {
|
||||
for (let i = 0; i < resourcesSelected.length; i++) {
|
||||
for (let j = 0; j < rolesSelected.length; j++) {
|
||||
if (resourceType === 'users') {
|
||||
roleRequests.push(
|
||||
UsersAPI.associateRole(
|
||||
selectedResourceRows[i].id,
|
||||
selectedRoleRows[j].id
|
||||
resourcesSelected[i].id,
|
||||
rolesSelected[j].id
|
||||
)
|
||||
);
|
||||
} else if (selectedResource === 'teams') {
|
||||
} else if (resourceType === 'teams') {
|
||||
roleRequests.push(
|
||||
TeamsAPI.associateRole(
|
||||
selectedResourceRows[i].id,
|
||||
selectedRoleRows[j].id
|
||||
resourcesSelected[i].id,
|
||||
rolesSelected[j].id
|
||||
)
|
||||
);
|
||||
}
|
||||
@@ -162,7 +143,7 @@ function AddResourceRole({ onSave, onClose, roles, resource, onError }) {
|
||||
// Object roles can be user only, so we remove them when
|
||||
// showing role choices for team access
|
||||
const selectableRoles = { ...roles };
|
||||
if (selectedResource === 'teams') {
|
||||
if (resourceType === 'teams') {
|
||||
Object.keys(roles).forEach((key) => {
|
||||
if (selectableRoles[key].user_only) {
|
||||
delete selectableRoles[key];
|
||||
@@ -172,7 +153,7 @@ function AddResourceRole({ onSave, onClose, roles, resource, onError }) {
|
||||
|
||||
let wizardTitle = '';
|
||||
|
||||
switch (selectedResource) {
|
||||
switch (resourceType) {
|
||||
case 'users':
|
||||
wizardTitle = t`Add User Roles`;
|
||||
break;
|
||||
@@ -193,60 +174,60 @@ function AddResourceRole({ onSave, onClose, roles, resource, onError }) {
|
||||
{t`Choose the type of resource that will be receiving new roles. For example, if you'd like to add new roles to a set of users please choose Users and click Next. You'll be able to select the specific resources in the next step.`}
|
||||
</div>
|
||||
<SelectableCard
|
||||
isSelected={selectedResource === 'users'}
|
||||
isSelected={resourceType === 'users'}
|
||||
label={t`Users`}
|
||||
ariaLabel={t`Users`}
|
||||
dataCy="add-role-users"
|
||||
onClick={() => handleResourceSelect('users')}
|
||||
onClick={() => handleResourceTypeSelect('users')}
|
||||
/>
|
||||
{resource?.type === 'team' ||
|
||||
(resource?.type === 'credential' &&
|
||||
!resource?.organization) ? null : (
|
||||
<SelectableCard
|
||||
isSelected={selectedResource === 'teams'}
|
||||
isSelected={resourceType === 'teams'}
|
||||
label={t`Teams`}
|
||||
ariaLabel={t`Teams`}
|
||||
dataCy="add-role-teams"
|
||||
onClick={() => handleResourceSelect('teams')}
|
||||
onClick={() => handleResourceTypeSelect('teams')}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
nextButtonText: t`Next`,
|
||||
enableNext: selectedResource !== null,
|
||||
enableNext: resourceType !== null,
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: t`Select Items from List`,
|
||||
component: (
|
||||
<>
|
||||
{selectedResource === 'users' && (
|
||||
{resourceType === 'users' && (
|
||||
<SelectResourceStep
|
||||
searchColumns={userSearchColumns}
|
||||
sortColumns={userSortColumns}
|
||||
displayKey="username"
|
||||
onRowClick={handleResourceCheckboxClick}
|
||||
onRowClick={handleResourceSelect}
|
||||
fetchItems={readUsers}
|
||||
fetchOptions={readUsersOptions}
|
||||
selectedLabel={t`Selected`}
|
||||
selectedResourceRows={selectedResourceRows}
|
||||
selectedResourceRows={resourcesSelected}
|
||||
sortedColumnKey="username"
|
||||
/>
|
||||
)}
|
||||
{selectedResource === 'teams' && (
|
||||
{resourceType === 'teams' && (
|
||||
<SelectResourceStep
|
||||
searchColumns={teamSearchColumns}
|
||||
sortColumns={teamSortColumns}
|
||||
onRowClick={handleResourceCheckboxClick}
|
||||
onRowClick={handleResourceSelect}
|
||||
fetchItems={readTeams}
|
||||
fetchOptions={readTeamsOptions}
|
||||
selectedLabel={t`Selected`}
|
||||
selectedResourceRows={selectedResourceRows}
|
||||
selectedResourceRows={resourcesSelected}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
),
|
||||
enableNext: selectedResourceRows.length > 0,
|
||||
enableNext: resourcesSelected.length > 0,
|
||||
nextButtonText: t`Next`,
|
||||
canJumpTo: maxEnabledStep >= 2,
|
||||
},
|
||||
@@ -255,16 +236,16 @@ function AddResourceRole({ onSave, onClose, roles, resource, onError }) {
|
||||
name: t`Select Roles to Apply`,
|
||||
component: (
|
||||
<SelectRoleStep
|
||||
onRolesClick={handleRoleCheckboxClick}
|
||||
onRolesClick={handleRoleSelect}
|
||||
roles={selectableRoles}
|
||||
selectedListKey={selectedResource === 'users' ? 'username' : 'name'}
|
||||
selectedListKey={resourceType === 'users' ? 'username' : 'name'}
|
||||
selectedListLabel={t`Selected`}
|
||||
selectedResourceRows={selectedResourceRows}
|
||||
selectedRoleRows={selectedRoleRows}
|
||||
selectedResourceRows={resourcesSelected}
|
||||
selectedRoleRows={rolesSelected}
|
||||
/>
|
||||
),
|
||||
nextButtonText: t`Save`,
|
||||
enableNext: selectedRoleRows.length > 0,
|
||||
enableNext: rolesSelected.length > 0,
|
||||
canJumpTo: maxEnabledStep >= 3,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -42,6 +42,7 @@ describe('<_AddResourceRole />', () => {
|
||||
results: [
|
||||
{ id: 1, username: 'foo', url: '' },
|
||||
{ id: 2, username: 'bar', url: '' },
|
||||
{ id: 3, username: 'baz', url: '' },
|
||||
],
|
||||
},
|
||||
});
|
||||
@@ -95,14 +96,20 @@ describe('<_AddResourceRole />', () => {
|
||||
// Step 2
|
||||
await waitForElement(wrapper, 'EmptyStateBody', (el) => el.length === 0);
|
||||
expect(wrapper.find('Chip').length).toBe(0);
|
||||
act(() =>
|
||||
wrapper.find('CheckboxListItem[name="foo"]').invoke('onSelect')(true)
|
||||
);
|
||||
wrapper.update();
|
||||
wrapper.find('CheckboxListItem[name="foo"]').invoke('onSelect')(true);
|
||||
wrapper.find('CheckboxListItem[name="bar"]').invoke('onSelect')(true);
|
||||
wrapper.find('CheckboxListItem[name="baz"]').invoke('onSelect')(true);
|
||||
wrapper.find('CheckboxListItem[name="baz"]').invoke('onSelect')(false);
|
||||
expect(
|
||||
wrapper.find('CheckboxListItem[name="foo"]').prop('isSelected')
|
||||
).toBe(true);
|
||||
expect(wrapper.find('Chip').length).toBe(1);
|
||||
expect(
|
||||
wrapper.find('CheckboxListItem[name="bar"]').prop('isSelected')
|
||||
).toBe(true);
|
||||
expect(
|
||||
wrapper.find('CheckboxListItem[name="baz"]').prop('isSelected')
|
||||
).toBe(false);
|
||||
expect(wrapper.find('Chip').length).toBe(2);
|
||||
act(() => wrapper.find('Button[type="submit"]').prop('onClick')());
|
||||
wrapper.update();
|
||||
|
||||
@@ -120,6 +127,8 @@ describe('<_AddResourceRole />', () => {
|
||||
wrapper.find('Button[type="submit"]').prop('onClick')()
|
||||
);
|
||||
expect(UsersAPI.associateRole).toBeCalledWith(1, 1);
|
||||
expect(UsersAPI.associateRole).toBeCalledWith(2, 1);
|
||||
expect(UsersAPI.associateRole).toBeCalledTimes(2);
|
||||
});
|
||||
|
||||
test('should call on error properly', async () => {
|
||||
@@ -189,7 +198,7 @@ describe('<_AddResourceRole />', () => {
|
||||
expect(onError).toBeCalled();
|
||||
});
|
||||
|
||||
test('should should update history properly', async () => {
|
||||
test('should update history properly', async () => {
|
||||
let wrapper;
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['organizations/2/access?resource.order_by=-username'],
|
||||
|
||||
@@ -154,7 +154,7 @@ VariablesDetail.propTypes = {
|
||||
label: node.isRequired,
|
||||
rows: oneOfType([number, string]),
|
||||
dataCy: string,
|
||||
helpText: string,
|
||||
helpText: oneOfType([node, string]),
|
||||
name: string.isRequired,
|
||||
};
|
||||
VariablesDetail.defaultProps = {
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
ToolbarGroup,
|
||||
ToolbarItem,
|
||||
ToolbarToggleGroup,
|
||||
Tooltip,
|
||||
Dropdown,
|
||||
DropdownPosition,
|
||||
KebabToggle,
|
||||
@@ -117,13 +118,15 @@ function DataListToolbar({
|
||||
{onSelectAll && (
|
||||
<ToolbarGroup>
|
||||
<ToolbarItem>
|
||||
<Checkbox
|
||||
isChecked={isAllSelected}
|
||||
onChange={onSelectAll}
|
||||
aria-label={t`Select all`}
|
||||
id="select-all"
|
||||
ouiaId="select-all"
|
||||
/>
|
||||
<Tooltip content={t`Select all`} position="top">
|
||||
<Checkbox
|
||||
isChecked={isAllSelected}
|
||||
onChange={onSelectAll}
|
||||
aria-label={t`Select all`}
|
||||
id="select-all"
|
||||
ouiaId="select-all"
|
||||
/>
|
||||
</Tooltip>
|
||||
</ToolbarItem>
|
||||
</ToolbarGroup>
|
||||
)}
|
||||
|
||||
@@ -3,6 +3,7 @@ import React from 'react';
|
||||
import styled from 'styled-components';
|
||||
import { TextListItemVariants } from '@patternfly/react-core';
|
||||
import { DetailName, DetailValue } from './Detail';
|
||||
import Popover from '../Popover';
|
||||
|
||||
const Value = styled(DetailValue)`
|
||||
margin-top: var(--pf-global--spacer--xs);
|
||||
@@ -12,7 +13,7 @@ const Value = styled(DetailValue)`
|
||||
overflow: auto;
|
||||
`;
|
||||
|
||||
function ArrayDetail({ label, value, dataCy }) {
|
||||
function ArrayDetail({ label, helpText, value, dataCy }) {
|
||||
const labelCy = dataCy ? `${dataCy}-label` : null;
|
||||
const valueCy = dataCy ? `${dataCy}-value` : null;
|
||||
|
||||
@@ -22,6 +23,7 @@ function ArrayDetail({ label, value, dataCy }) {
|
||||
<div css="grid-column: span 2">
|
||||
<DetailName component={TextListItemVariants.dt} data-cy={labelCy}>
|
||||
{label}
|
||||
{helpText && <Popover header={label} content={helpText} id={dataCy} />}
|
||||
</DetailName>
|
||||
<Value component={TextListItemVariants.dd} data-cy={valueCy}>
|
||||
{vals.map((v) => (
|
||||
|
||||
@@ -11,8 +11,15 @@ const Detail = styled(_Detail)`
|
||||
}
|
||||
`;
|
||||
|
||||
function DeletedDetail({ label }) {
|
||||
return <Detail label={label} value={t`Deleted`} />;
|
||||
function DeletedDetail({ label, dataCy, helpText }) {
|
||||
return (
|
||||
<Detail
|
||||
label={label}
|
||||
dataCy={dataCy}
|
||||
value={t`Deleted`}
|
||||
helpText={helpText}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
DeletedDetail.propTypes = {
|
||||
|
||||
@@ -21,7 +21,7 @@ const DetailValue = styled(
|
||||
<TextListItem {...props} />
|
||||
)
|
||||
)`
|
||||
word-break: break-all;
|
||||
overflow-wrap: break-word;
|
||||
${(props) =>
|
||||
props.fullWidth &&
|
||||
`
|
||||
|
||||
@@ -9,11 +9,12 @@ const Detail = styled(_Detail)`
|
||||
word-break: break-word;
|
||||
`;
|
||||
|
||||
function DetailBadge({ label, content, dataCy = null }) {
|
||||
function DetailBadge({ label, helpText, content, dataCy = null }) {
|
||||
return (
|
||||
<Detail
|
||||
label={label}
|
||||
dataCy={dataCy}
|
||||
helpText={helpText}
|
||||
value={<Badge isRead>{content}</Badge>}
|
||||
/>
|
||||
);
|
||||
|
||||
@@ -60,6 +60,7 @@ function ExecutionEnvironmentDetail({
|
||||
return (
|
||||
<Detail
|
||||
label={label}
|
||||
helpText={helpText}
|
||||
value={
|
||||
<>
|
||||
{t`Missing resource`}
|
||||
@@ -102,6 +103,7 @@ function ExecutionEnvironmentDetail({
|
||||
return (
|
||||
<Detail
|
||||
label={t`Execution Environment`}
|
||||
helpText={helpText}
|
||||
value={
|
||||
<>
|
||||
{t`Missing resource`}
|
||||
|
||||
@@ -103,13 +103,17 @@ function JobList({
|
||||
}, [fetchJobs]);
|
||||
|
||||
const fetchJobsById = useCallback(
|
||||
async (ids, qs = {}) => {
|
||||
const params = parseQueryString(qs, location.search);
|
||||
async (ids) => {
|
||||
const params = parseQueryString(qsConfig, location.search);
|
||||
params.id__in = ids.join(',');
|
||||
const { data } = await UnifiedJobsAPI.read(params);
|
||||
return data.results;
|
||||
try {
|
||||
const { data } = await UnifiedJobsAPI.read(params);
|
||||
return data.results;
|
||||
} catch (e) {
|
||||
return [];
|
||||
}
|
||||
},
|
||||
[location.search]
|
||||
[location.search] // eslint-disable-line react-hooks/exhaustive-deps
|
||||
);
|
||||
|
||||
const jobs = useWsJobs(results, fetchJobsById, qsConfig);
|
||||
|
||||
@@ -47,16 +47,9 @@ export default function useWsJobs(initialJobs, fetchJobsById, qsConfig) {
|
||||
return;
|
||||
}
|
||||
const params = parseQueryString(qsConfig, location.search);
|
||||
const filtersApplied = Object.keys(params).length > 4;
|
||||
if (
|
||||
filtersApplied &&
|
||||
!['completed', 'failed', 'error'].includes(lastMessage.status)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const jobId = lastMessage.unified_job_id;
|
||||
const index = jobs.findIndex((j) => j.id === jobId);
|
||||
|
||||
if (index > -1) {
|
||||
setJobs(sortJobs(updateJob(jobs, index, lastMessage), params));
|
||||
} else {
|
||||
|
||||
@@ -7,6 +7,7 @@ import { parseQueryString, updateQueryString } from 'util/qs';
|
||||
|
||||
const Th = styled(PFTh)`
|
||||
--pf-c-table--cell--Overflow: initial;
|
||||
--pf-c-table--cell--MaxWidth: none;
|
||||
`;
|
||||
|
||||
export default function HeaderRow({
|
||||
|
||||
@@ -166,7 +166,7 @@ function ScheduleList({
|
||||
headerRow={
|
||||
<HeaderRow qsConfig={QS_CONFIG}>
|
||||
<HeaderCell sortKey="name">{t`Name`}</HeaderCell>
|
||||
<HeaderCell>{t`Type`}</HeaderCell>
|
||||
<HeaderCell sortKey="unified_job_template__polymorphic_ctype__model">{t`Type`}</HeaderCell>
|
||||
<HeaderCell sortKey="next_run">{t`Next Run`}</HeaderCell>
|
||||
<HeaderCell>{t`Actions`}</HeaderCell>
|
||||
</HeaderRow>
|
||||
|
||||
@@ -27,7 +27,7 @@ function DateTimePicker({ dateFieldName, timeFieldName, label }) {
|
||||
const onDateChange = (inputDate, newDate) => {
|
||||
dateHelpers.setTouched();
|
||||
if (isValidDate(newDate) && inputDate === yyyyMMddFormat(newDate)) {
|
||||
dateHelpers.setValue(new Date(newDate).toISOString().split('T')[0]);
|
||||
dateHelpers.setValue(inputDate);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -17,6 +17,9 @@ import FormField from '../../FormField';
|
||||
import DateTimePicker from './DateTimePicker';
|
||||
|
||||
const RunOnRadio = styled(Radio)`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
|
||||
label {
|
||||
display: block;
|
||||
width: 100%;
|
||||
|
||||
@@ -11,6 +11,8 @@ import {
|
||||
FormGroup,
|
||||
Title,
|
||||
ActionGroup,
|
||||
// To be removed once UI completes complex schedules
|
||||
Alert,
|
||||
} from '@patternfly/react-core';
|
||||
import { Config } from 'contexts/Config';
|
||||
import { SchedulesAPI } from 'api';
|
||||
@@ -376,6 +378,8 @@ function ScheduleForm({
|
||||
launchConfig.ask_limit_on_launch ||
|
||||
launchConfig.ask_credential_on_launch ||
|
||||
launchConfig.ask_scm_branch_on_launch ||
|
||||
launchConfig.ask_tags_on_launch ||
|
||||
launchConfig.ask_skip_tags_on_launch ||
|
||||
launchConfig.survey_enabled ||
|
||||
launchConfig.inventory_needed_to_start ||
|
||||
launchConfig.variables_needed_to_start?.length > 0)
|
||||
@@ -439,6 +443,34 @@ function ScheduleForm({
|
||||
|
||||
if (Object.keys(schedule).length > 0) {
|
||||
if (schedule.rrule) {
|
||||
if (schedule.rrule.split(/\s+/).length > 2) {
|
||||
return (
|
||||
<Form autoComplete="off">
|
||||
<Alert
|
||||
variant="danger"
|
||||
isInline
|
||||
ouiaId="form-submit-error-alert"
|
||||
title={t`Complex schedules are not supported in the UI yet, please use the API to manage this schedule.`}
|
||||
/>
|
||||
<b>{t`Schedule Rules`}:</b>
|
||||
<pre css="white-space: pre; font-family: var(--pf-global--FontFamily--monospace)">
|
||||
{schedule.rrule}
|
||||
</pre>
|
||||
<ActionGroup>
|
||||
<Button
|
||||
ouiaId="schedule-form-cancel-button"
|
||||
aria-label={t`Cancel`}
|
||||
variant="secondary"
|
||||
type="button"
|
||||
onClick={handleCancel}
|
||||
>
|
||||
{t`Cancel`}
|
||||
</Button>
|
||||
</ActionGroup>
|
||||
</Form>
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
const {
|
||||
origOptions: {
|
||||
|
||||
@@ -153,7 +153,9 @@ function WorkflowNodeHelp({ node }) {
|
||||
<dt>
|
||||
<b>{t`Resource Name`}</b>
|
||||
</dt>
|
||||
<dd id="workflow-node-help-name">{unifiedJobTemplate.name}</dd>
|
||||
<dd id="workflow-node-help-name">
|
||||
{unifiedJobTemplate?.name || t`Deleted`}
|
||||
</dd>
|
||||
<dt>
|
||||
<b>{t`Type`}</b>
|
||||
</dt>
|
||||
@@ -187,7 +189,9 @@ function WorkflowNodeHelp({ node }) {
|
||||
<dt>
|
||||
<b>{t`Resource Name`}</b>
|
||||
</dt>
|
||||
<dd id="workflow-node-help-name">{unifiedJobTemplate.name}</dd>
|
||||
<dd id="workflow-node-help-name">
|
||||
{unifiedJobTemplate?.name || t`Deleted`}
|
||||
</dd>
|
||||
<dt>
|
||||
<b>{t`Type`}</b>
|
||||
</dt>
|
||||
|
||||
@@ -548,17 +548,21 @@ function selectSourceForLinking(state, sourceNode) {
|
||||
invalidLinkTargetIds.push(link.target.id);
|
||||
}
|
||||
if (!parentMap[link.target.id]) {
|
||||
parentMap[link.target.id] = [];
|
||||
parentMap[link.target.id] = {
|
||||
parents: [],
|
||||
traversed: false,
|
||||
};
|
||||
}
|
||||
parentMap[link.target.id].push(link.source.id);
|
||||
parentMap[link.target.id].parents.push(link.source.id);
|
||||
});
|
||||
|
||||
const getAncestors = (id) => {
|
||||
if (parentMap[id]) {
|
||||
parentMap[id].forEach((parentId) => {
|
||||
if (parentMap[id] && !parentMap[id].traversed) {
|
||||
parentMap[id].parents.forEach((parentId) => {
|
||||
invalidLinkTargetIds.push(parentId);
|
||||
getAncestors(parentId);
|
||||
});
|
||||
parentMap[id].traversed = true;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -9071,7 +9071,7 @@ msgstr "Utilisateur"
|
||||
|
||||
#: components/AppContainer/PageHeaderToolbar.js:165
|
||||
msgid "User Details"
|
||||
msgstr "Détails de l'erreur"
|
||||
msgstr "Détails de l'utilisateur"
|
||||
|
||||
#: screens/Setting/SettingList.js:117
|
||||
#: screens/Setting/Settings.js:114
|
||||
@@ -10164,4 +10164,4 @@ msgstr "{selectedItemsCount, plural, one {Click to run a health check on the sel
|
||||
|
||||
#: components/AppContainer/AppContainer.js:154
|
||||
msgid "{sessionCountdown, plural, one {You will be logged out in # second due to inactivity} other {You will be logged out in # seconds due to inactivity}}"
|
||||
msgstr "{sessionCountdown, plural, one {You will be logged out in # second due to inactivity} other {You will be logged out in # seconds due to inactivity}}"
|
||||
msgstr "{sessionCountdown, plural, one {You will be logged out in # second due to inactivity} other {You will be logged out in # seconds due to inactivity}}"
|
||||
|
||||
@@ -50,6 +50,7 @@ describe('<ApplicationAdd/>', () => {
|
||||
<ApplicationAdd onSuccessfulAdd={onSuccessfulAdd} />
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('ApplicationAdd').length).toBe(1);
|
||||
expect(wrapper.find('ApplicationForm').length).toBe(1);
|
||||
expect(ApplicationsAPI.readOptions).toBeCalled();
|
||||
|
||||
@@ -11,6 +11,7 @@ import { Detail, DetailList, UserDateDetail } from 'components/DetailList';
|
||||
import { ApplicationsAPI } from 'api';
|
||||
import DeleteButton from 'components/DeleteButton';
|
||||
import ErrorDetail from 'components/ErrorDetail';
|
||||
import applicationHelpTextStrings from '../shared/Application.helptext';
|
||||
|
||||
function ApplicationDetails({
|
||||
application,
|
||||
@@ -81,6 +82,7 @@ function ApplicationDetails({
|
||||
application.authorization_grant_type
|
||||
)}
|
||||
dataCy="app-detail-authorization-grant-type"
|
||||
helpText={applicationHelpTextStrings.authorizationGrantType}
|
||||
/>
|
||||
<Detail
|
||||
label={t`Client ID`}
|
||||
@@ -88,14 +90,16 @@ function ApplicationDetails({
|
||||
dataCy="app-detail-client-id"
|
||||
/>
|
||||
<Detail
|
||||
label={t`Redirect uris`}
|
||||
label={t`Redirect URIs`}
|
||||
value={application.redirect_uris}
|
||||
dataCy="app-detail-redirect-uris"
|
||||
helpText={applicationHelpTextStrings.redirectURIS}
|
||||
/>
|
||||
<Detail
|
||||
label={t`Client type`}
|
||||
value={getClientType(application.client_type)}
|
||||
dataCy="app-detail-client-type"
|
||||
helpText={applicationHelpTextStrings.clientType}
|
||||
/>
|
||||
<UserDateDetail label={t`Created`} date={application.created} />
|
||||
<UserDateDetail label={t`Last Modified`} date={application.modified} />
|
||||
|
||||
@@ -111,7 +111,7 @@ describe('<ApplicationDetails/>', () => {
|
||||
expect(
|
||||
wrapper.find('Detail[label="Authorization grant type"]').prop('value')
|
||||
).toBe('Authorization code');
|
||||
expect(wrapper.find('Detail[label="Redirect uris"]').prop('value')).toBe(
|
||||
expect(wrapper.find('Detail[label="Redirect URIs"]').prop('value')).toBe(
|
||||
'http://www.google.com'
|
||||
);
|
||||
expect(wrapper.find('Detail[label="Client type"]').prop('value')).toBe(
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
import { t } from '@lingui/macro';
|
||||
|
||||
const applicationHelpTextStrings = {
|
||||
authorizationGrantType: t`The Grant type the user must use to acquire tokens for this application`,
|
||||
clientType: t`Set to Public or Confidential depending on how secure the client device is.`,
|
||||
redirectURIS: t`Allowed URIs list, space separated`,
|
||||
};
|
||||
|
||||
export default applicationHelpTextStrings;
|
||||
@@ -13,6 +13,7 @@ import FormActionGroup from 'components/FormActionGroup/FormActionGroup';
|
||||
import OrganizationLookup from 'components/Lookup/OrganizationLookup';
|
||||
import AnsibleSelect from 'components/AnsibleSelect';
|
||||
import Popover from 'components/Popover';
|
||||
import applicationHelpTextStrings from './Application.helptext';
|
||||
|
||||
function ApplicationFormFields({
|
||||
application,
|
||||
@@ -83,7 +84,7 @@ function ApplicationFormFields({
|
||||
label={t`Authorization grant type`}
|
||||
labelIcon={
|
||||
<Popover
|
||||
content={t`The Grant type the user must use to acquire tokens for this application`}
|
||||
content={applicationHelpTextStrings.authorizationGrantType}
|
||||
/>
|
||||
}
|
||||
>
|
||||
@@ -113,7 +114,7 @@ function ApplicationFormFields({
|
||||
? required(null)
|
||||
: null
|
||||
}
|
||||
tooltip={t`Allowed URIs list, space separated`}
|
||||
tooltip={applicationHelpTextStrings.redirectURIS}
|
||||
/>
|
||||
<FormGroup
|
||||
fieldId="clientType"
|
||||
@@ -123,11 +124,7 @@ function ApplicationFormFields({
|
||||
}
|
||||
isRequired
|
||||
label={t`Client type`}
|
||||
labelIcon={
|
||||
<Popover
|
||||
content={t`Set to Public or Confidential depending on how secure the client device is.`}
|
||||
/>
|
||||
}
|
||||
labelIcon={<Popover content={applicationHelpTextStrings.clientType} />}
|
||||
>
|
||||
<AnsibleSelect
|
||||
{...clientTypeField}
|
||||
@@ -145,7 +142,6 @@ function ApplicationFormFields({
|
||||
function ApplicationForm({
|
||||
onCancel,
|
||||
onSubmit,
|
||||
|
||||
submitError,
|
||||
application,
|
||||
authorizationOptions,
|
||||
|
||||
@@ -100,12 +100,19 @@ function CredentialDetail({ credential }) {
|
||||
|
||||
const { error, dismissError } = useDismissableError(deleteError);
|
||||
|
||||
const renderDetail = ({ id, label, type, ask_at_runtime }) => {
|
||||
const renderDetail = ({
|
||||
id,
|
||||
label,
|
||||
type,
|
||||
ask_at_runtime,
|
||||
help_text = '',
|
||||
}) => {
|
||||
if (inputSources[id]) {
|
||||
return (
|
||||
<React.Fragment key={id}>
|
||||
<Detail
|
||||
dataCy={`credential-${id}-detail`}
|
||||
helpText={help_text}
|
||||
id={`credential-${id}-detail`}
|
||||
fullWidth
|
||||
label={<span>{label} *</span>}
|
||||
@@ -151,6 +158,7 @@ function CredentialDetail({ credential }) {
|
||||
key={id}
|
||||
label={label}
|
||||
value={t`Encrypted`}
|
||||
helpText={help_text}
|
||||
isEncrypted
|
||||
/>
|
||||
);
|
||||
@@ -160,6 +168,7 @@ function CredentialDetail({ credential }) {
|
||||
return (
|
||||
<Detail
|
||||
dataCy={`credential-${id}-detail`}
|
||||
helpText={help_text}
|
||||
id={`credential-${id}-detail`}
|
||||
key={id}
|
||||
label={label}
|
||||
@@ -175,6 +184,7 @@ function CredentialDetail({ credential }) {
|
||||
key={id}
|
||||
label={label}
|
||||
value={inputs[id]}
|
||||
helpText={help_text}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -49,9 +49,8 @@ function CredentialList() {
|
||||
CredentialsAPI.readOptions(),
|
||||
]);
|
||||
const searchKeys = getSearchableKeys(credActions.data.actions?.GET);
|
||||
const item = searchKeys.indexOf('type');
|
||||
if (item) {
|
||||
searchKeys[item] = 'credential_type__kind';
|
||||
if (credActions.data.actions?.GET.type) {
|
||||
searchKeys.push({ key: 'credential_type__kind', type: 'string' });
|
||||
}
|
||||
return {
|
||||
credentials: creds.data.results,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
/* eslint-disable react/jsx-no-useless-fragment */
|
||||
import React, { useState } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { useField, useFormikContext } from 'formik';
|
||||
import { shape, string } from 'prop-types';
|
||||
import styled from 'styled-components';
|
||||
@@ -31,6 +32,7 @@ function CredentialInput({
|
||||
fieldOptions,
|
||||
isFieldGroupValid,
|
||||
credentialKind,
|
||||
isVaultIdDisabled,
|
||||
...rest
|
||||
}) {
|
||||
const [fileName, setFileName] = useState('');
|
||||
@@ -148,6 +150,7 @@ function CredentialInput({
|
||||
onChange={(value, event) => {
|
||||
subFormField.onChange(event);
|
||||
}}
|
||||
isDisabled={isVaultIdDisabled}
|
||||
validated={isValid ? 'default' : 'error'}
|
||||
/>
|
||||
);
|
||||
@@ -167,6 +170,7 @@ CredentialInput.defaultProps = {
|
||||
|
||||
function CredentialField({ credentialType, fieldOptions }) {
|
||||
const { values: formikValues } = useFormikContext();
|
||||
const location = useLocation();
|
||||
const requiredFields = credentialType?.inputs?.required || [];
|
||||
const isRequired = requiredFields.includes(fieldOptions.id);
|
||||
const validateField = () => {
|
||||
@@ -242,6 +246,15 @@ function CredentialField({ credentialType, fieldOptions }) {
|
||||
<BecomeMethodField fieldOptions={fieldOptions} isRequired={isRequired} />
|
||||
);
|
||||
}
|
||||
|
||||
let disabled = false;
|
||||
if (
|
||||
credentialType.kind === 'vault' &&
|
||||
location.pathname.endsWith('edit') &&
|
||||
fieldOptions.id === 'vault_id'
|
||||
) {
|
||||
disabled = true;
|
||||
}
|
||||
return (
|
||||
<CredentialPluginField
|
||||
fieldOptions={fieldOptions}
|
||||
@@ -251,6 +264,7 @@ function CredentialField({ credentialType, fieldOptions }) {
|
||||
<CredentialInput
|
||||
isFieldGroupValid={isValid}
|
||||
fieldOptions={fieldOptions}
|
||||
isVaultIdDisabled={disabled}
|
||||
/>
|
||||
</CredentialPluginField>
|
||||
);
|
||||
|
||||
@@ -13,6 +13,12 @@ const fieldOptions = {
|
||||
secret: true,
|
||||
};
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: () => ({
|
||||
pathname: '/credentials/3/edit',
|
||||
}),
|
||||
}));
|
||||
describe('<CredentialField />', () => {
|
||||
let wrapper;
|
||||
test('renders correctly without initial value', () => {
|
||||
@@ -113,4 +119,33 @@ describe('<CredentialField />', () => {
|
||||
expect(wrapper.find('TextInput').props().value).toBe('');
|
||||
expect(wrapper.find('TextInput').props().placeholder).toBe('ENCRYPTED');
|
||||
});
|
||||
test('Should check to see if the ability to edit vault ID is disabled after creation.', () => {
|
||||
const vaultCredential = credentialTypes.find((type) => type.id === 3);
|
||||
const vaultFieldOptions = {
|
||||
id: 'vault_id',
|
||||
label: 'Vault Identifier',
|
||||
type: 'string',
|
||||
secret: true,
|
||||
};
|
||||
wrapper = mountWithContexts(
|
||||
<Formik
|
||||
initialValues={{
|
||||
passwordPrompts: {},
|
||||
inputs: {
|
||||
password: 'password',
|
||||
vault_id: 'vault_id',
|
||||
},
|
||||
}}
|
||||
>
|
||||
{() => (
|
||||
<CredentialField
|
||||
fieldOptions={vaultFieldOptions}
|
||||
credentialType={vaultCredential}
|
||||
/>
|
||||
)}
|
||||
</Formik>
|
||||
);
|
||||
expect(wrapper.find('CredentialInput').props().isDisabled).toBe(true);
|
||||
expect(wrapper.find('KeyIcon').length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -76,6 +76,7 @@ function CredentialTypeDetails({ credentialType }) {
|
||||
rows={6}
|
||||
name="input"
|
||||
dataCy="credential-type-detail-input"
|
||||
helpText={t`Input schema which defines a set of ordered fields for that type.`}
|
||||
/>
|
||||
<VariablesDetail
|
||||
label={t`Injector configuration`}
|
||||
@@ -83,6 +84,7 @@ function CredentialTypeDetails({ credentialType }) {
|
||||
rows={6}
|
||||
name="injector"
|
||||
dataCy="credential-type-detail-injector"
|
||||
helpText={t`Environment variables or extra variables that specify the values a credential type can inject.`}
|
||||
/>
|
||||
<UserDateDetail
|
||||
label={t`Created`}
|
||||
|
||||
@@ -12,6 +12,7 @@ import useRequest, { useDismissableError } from 'hooks/useRequest';
|
||||
import { toTitleCase } from 'util/strings';
|
||||
import { ExecutionEnvironmentsAPI } from 'api';
|
||||
import { relatedResourceDeleteRequests } from 'util/getRelatedResourceDeleteDetails';
|
||||
import helpText from '../shared/ExecutionEnvironment.helptext';
|
||||
|
||||
function ExecutionEnvironmentDetails({ executionEnvironment }) {
|
||||
const history = useHistory();
|
||||
@@ -52,6 +53,7 @@ function ExecutionEnvironmentDetails({ executionEnvironment }) {
|
||||
label={t`Image`}
|
||||
value={image}
|
||||
dataCy="execution-environment-detail-image"
|
||||
helpText={helpText.image}
|
||||
/>
|
||||
<Detail
|
||||
label={t`Description`}
|
||||
@@ -86,13 +88,14 @@ function ExecutionEnvironmentDetails({ executionEnvironment }) {
|
||||
/>
|
||||
{executionEnvironment.summary_fields.credential && (
|
||||
<Detail
|
||||
label={t`Credential`}
|
||||
label={t`Registry credential`}
|
||||
value={
|
||||
<Label variant="outline" color="blue">
|
||||
{executionEnvironment.summary_fields.credential.name}
|
||||
</Label>
|
||||
}
|
||||
dataCy="execution-environment-credential"
|
||||
helpText={helpText.registryCredential}
|
||||
/>
|
||||
)}
|
||||
<UserDateDetail
|
||||
|
||||
@@ -79,7 +79,8 @@ describe('<ExecutionEnvironmentDetails/>', () => {
|
||||
'Globally Available'
|
||||
);
|
||||
expect(
|
||||
wrapper.find('Detail[label="Credential"]').prop('value').props.children
|
||||
wrapper.find('Detail[label="Registry credential"]').prop('value').props
|
||||
.children
|
||||
).toEqual(executionEnvironment.summary_fields.credential.name);
|
||||
expect(wrapper.find('Detail[label="Managed"]').prop('value')).toEqual(
|
||||
'False'
|
||||
@@ -124,7 +125,8 @@ describe('<ExecutionEnvironmentDetails/>', () => {
|
||||
);
|
||||
expect(wrapper.find(`Detail[label="Organization"] dd`).text()).toBe('Bar');
|
||||
expect(
|
||||
wrapper.find('Detail[label="Credential"]').prop('value').props.children
|
||||
wrapper.find('Detail[label="Registry credential"]').prop('value').props
|
||||
.children
|
||||
).toEqual(executionEnvironment.summary_fields.credential.name);
|
||||
const dates = wrapper.find('UserDateDetail');
|
||||
expect(dates).toHaveLength(2);
|
||||
@@ -176,7 +178,8 @@ describe('<ExecutionEnvironmentDetails/>', () => {
|
||||
'Globally Available'
|
||||
);
|
||||
expect(
|
||||
wrapper.find('Detail[label="Credential"]').prop('value').props.children
|
||||
wrapper.find('Detail[label="Registry credential"]').prop('value').props
|
||||
.children
|
||||
).toEqual(executionEnvironment.summary_fields.credential.name);
|
||||
expect(wrapper.find('Detail[label="Managed"]').prop('value')).toEqual(
|
||||
'True'
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
import React from 'react';
|
||||
import { t } from '@lingui/macro';
|
||||
|
||||
const executionEnvironmentHelpTextStrings = {
|
||||
image: (
|
||||
<span>
|
||||
{t`The full image location, including the container registry, image name, and version tag.`}
|
||||
<br />
|
||||
<br />
|
||||
{t`Examples:`}
|
||||
<ul css="margin: 10px 0 10px 20px">
|
||||
<li>
|
||||
<code>quay.io/ansible/awx-ee:latest</code>
|
||||
</li>
|
||||
<li>
|
||||
<code>repo/project/image-name:tag</code>
|
||||
</li>
|
||||
</ul>
|
||||
</span>
|
||||
),
|
||||
registryCredential: t`Credential to authenticate with a protected container registry.`,
|
||||
};
|
||||
|
||||
export default executionEnvironmentHelpTextStrings;
|
||||
@@ -14,6 +14,7 @@ import ContentError from 'components/ContentError';
|
||||
import ContentLoading from 'components/ContentLoading';
|
||||
import { required } from 'util/validators';
|
||||
import useRequest from 'hooks/useRequest';
|
||||
import helpText from './ExecutionEnvironment.helptext';
|
||||
|
||||
function ExecutionEnvironmentFormFields({
|
||||
me,
|
||||
@@ -99,22 +100,7 @@ function ExecutionEnvironmentFormFields({
|
||||
validate={required(null)}
|
||||
isRequired
|
||||
isDisabled={executionEnvironment?.managed || false}
|
||||
tooltip={
|
||||
<span>
|
||||
{t`The full image location, including the container registry, image name, and version tag.`}
|
||||
<br />
|
||||
<br />
|
||||
{t`Examples:`}
|
||||
<ul css="margin: 10px 0 10px 20px">
|
||||
<li>
|
||||
<code>quay.io/ansible/awx-ee:latest</code>
|
||||
</li>
|
||||
<li>
|
||||
<code>repo/project/image-name:tag</code>
|
||||
</li>
|
||||
</ul>
|
||||
</span>
|
||||
}
|
||||
tooltip={helpText.image}
|
||||
/>
|
||||
<FormGroup
|
||||
fieldId="execution-environment-container-options"
|
||||
@@ -160,7 +146,7 @@ function ExecutionEnvironmentFormFields({
|
||||
onBlur={() => credentialHelpers.setTouched()}
|
||||
onChange={onCredentialChange}
|
||||
value={credentialField.value}
|
||||
tooltip={t`Credential to authenticate with a protected container registry.`}
|
||||
tooltip={helpText.registryCredential}
|
||||
isDisabled={executionEnvironment?.managed || false}
|
||||
/>
|
||||
</>
|
||||
|
||||
@@ -74,6 +74,7 @@ function HostDetail({ host }) {
|
||||
<Detail
|
||||
label={t`Inventory`}
|
||||
dataCy="host-inventory"
|
||||
helpText={t`The inventory that this host belongs to.`}
|
||||
value={
|
||||
<Link to={`/inventories/inventory/${inventory.id}/details`}>
|
||||
{inventory.name}
|
||||
|
||||
@@ -50,6 +50,7 @@ function ContainerGroupDetails({ instanceGroup }) {
|
||||
{instanceGroup.summary_fields.credential && (
|
||||
<Detail
|
||||
label={t`Credential`}
|
||||
helpText={t`Credential to authenticate with Kubernetes or OpenShift`}
|
||||
value={
|
||||
<Link
|
||||
to={`/credentials/${instanceGroup?.summary_fields?.credential?.id}`}
|
||||
@@ -81,6 +82,7 @@ function ContainerGroupDetails({ instanceGroup }) {
|
||||
: instanceGroup.pod_spec_override
|
||||
}
|
||||
rows={6}
|
||||
helpText={t`Custom Kubernetes or OpenShift Pod specification.`}
|
||||
name="pod_spec_override"
|
||||
dataCy="container-group-detail-pod-spec-override"
|
||||
/>
|
||||
|
||||
@@ -62,10 +62,14 @@ function InstanceGroupDetails({ instanceGroup }) {
|
||||
<DetailBadge
|
||||
label={t`Policy instance minimum`}
|
||||
dataCy="instance-group-policy-instance-minimum"
|
||||
helpText={t`Minimum number of instances that will be automatically
|
||||
assigned to this group when new instances come online.`}
|
||||
content={instanceGroup.policy_instance_minimum}
|
||||
/>
|
||||
<DetailBadge
|
||||
label={t`Policy instance percentage`}
|
||||
helpText={t`Minimum percentage of all instances that will be automatically
|
||||
assigned to this group when new instances come online.`}
|
||||
dataCy="instance-group-policy-instance-percentage"
|
||||
content={`${instanceGroup.policy_instance_percentage} %`}
|
||||
/>
|
||||
|
||||
@@ -78,6 +78,7 @@ function InventoryDetail({ inventory }) {
|
||||
</Link>
|
||||
}
|
||||
/>
|
||||
<Detail label={t`Total hosts`} value={inventory.total_hosts} />
|
||||
{instanceGroups && instanceGroups.length > 0 && (
|
||||
<Detail
|
||||
fullWidth
|
||||
|
||||
@@ -85,6 +85,7 @@ describe('<InventoryDetail />', () => {
|
||||
expectDetailToMatch(wrapper, 'Name', mockInventory.name);
|
||||
expectDetailToMatch(wrapper, 'Description', mockInventory.description);
|
||||
expectDetailToMatch(wrapper, 'Type', 'Inventory');
|
||||
expectDetailToMatch(wrapper, 'Total hosts', mockInventory.total_hosts);
|
||||
const link = wrapper.find('Detail[label="Organization"]').find('Link');
|
||||
|
||||
const org = wrapper.find('Detail[label="Organization"]');
|
||||
|
||||
@@ -14,6 +14,7 @@ import PaginatedTable, {
|
||||
ToolbarAddButton,
|
||||
ToolbarDeleteButton,
|
||||
ToolbarSyncSourceButton,
|
||||
getSearchableKeys,
|
||||
} from 'components/PaginatedTable';
|
||||
import useSelected from 'hooks/useSelected';
|
||||
import DatalistToolbar from 'components/DataListToolbar';
|
||||
@@ -57,9 +58,7 @@ function InventorySourceList() {
|
||||
sourceCount: results[0].data.count,
|
||||
sourceChoices: results[1].data.actions.GET.source.choices,
|
||||
sourceChoicesOptions: results[1].data.actions,
|
||||
searchableKeys: Object.keys(results[1].data.actions?.GET || {}).filter(
|
||||
(key) => results[1].data.actions?.GET[key].filterable
|
||||
),
|
||||
searchableKeys: getSearchableKeys(results[1].data.actions?.GET),
|
||||
relatedSearchableKeys: (
|
||||
results[1]?.data?.related_search_fields || []
|
||||
).map((val) => val.slice(0, -8)),
|
||||
|
||||
@@ -28,6 +28,7 @@ function SmartInventoryDetail({ inventory }) {
|
||||
id,
|
||||
modified,
|
||||
name,
|
||||
total_hosts,
|
||||
variables,
|
||||
summary_fields: {
|
||||
created_by,
|
||||
@@ -116,6 +117,7 @@ function SmartInventoryDetail({ inventory }) {
|
||||
label={t`Smart host filter`}
|
||||
value={<Label variant="outline">{host_filter}</Label>}
|
||||
/>
|
||||
<Detail label={t`Total hosts`} value={total_hosts} />
|
||||
{instanceGroups.length > 0 && (
|
||||
<Detail
|
||||
fullWidth
|
||||
|
||||
@@ -57,6 +57,7 @@ describe('<SmartInventoryDetail />', () => {
|
||||
assertDetail('Organization', 'Default');
|
||||
assertDetail('Smart host filter', 'name__icontains=local');
|
||||
assertDetail('Instance groups', 'mock instance group');
|
||||
assertDetail('Total hosts', '2');
|
||||
expect(wrapper.find(`Detail[label="Activity"] Sparkline`)).toHaveLength(
|
||||
1
|
||||
);
|
||||
|
||||
@@ -54,14 +54,16 @@ const SCMSubForm = ({ autoPopulateProject }) => {
|
||||
const handleProjectUpdate = useCallback(
|
||||
(value) => {
|
||||
setFieldValue('source_project', value);
|
||||
setFieldValue('source_path', '');
|
||||
setFieldTouched('source_path', false);
|
||||
setFieldTouched('source_project', true, false);
|
||||
if (sourcePathField.value) {
|
||||
setFieldValue('source_path', '');
|
||||
setFieldTouched('source_path', false);
|
||||
}
|
||||
if (value) {
|
||||
fetchSourcePath(value.id);
|
||||
}
|
||||
},
|
||||
[fetchSourcePath, setFieldValue, setFieldTouched]
|
||||
[fetchSourcePath, setFieldValue, setFieldTouched, sourcePathField.value]
|
||||
);
|
||||
|
||||
const handleCredentialUpdate = useCallback(
|
||||
|
||||
@@ -3,76 +3,76 @@
|
||||
"type": "inventory",
|
||||
"url": "/api/v2/inventories/2/",
|
||||
"related": {
|
||||
"created_by": "/api/v2/users/1/",
|
||||
"modified_by": "/api/v2/users/1/",
|
||||
"hosts": "/api/v2/inventories/2/hosts/",
|
||||
"groups": "/api/v2/inventories/2/groups/",
|
||||
"root_groups": "/api/v2/inventories/2/root_groups/",
|
||||
"variable_data": "/api/v2/inventories/2/variable_data/",
|
||||
"script": "/api/v2/inventories/2/script/",
|
||||
"tree": "/api/v2/inventories/2/tree/",
|
||||
"inventory_sources": "/api/v2/inventories/2/inventory_sources/",
|
||||
"update_inventory_sources": "/api/v2/inventories/2/update_inventory_sources/",
|
||||
"activity_stream": "/api/v2/inventories/2/activity_stream/",
|
||||
"job_templates": "/api/v2/inventories/2/job_templates/",
|
||||
"ad_hoc_commands": "/api/v2/inventories/2/ad_hoc_commands/",
|
||||
"access_list": "/api/v2/inventories/2/access_list/",
|
||||
"object_roles": "/api/v2/inventories/2/object_roles/",
|
||||
"instance_groups": "/api/v2/inventories/2/instance_groups/",
|
||||
"copy": "/api/v2/inventories/2/copy/",
|
||||
"organization": "/api/v2/organizations/1/"
|
||||
"created_by": "/api/v2/users/1/",
|
||||
"modified_by": "/api/v2/users/1/",
|
||||
"hosts": "/api/v2/inventories/2/hosts/",
|
||||
"groups": "/api/v2/inventories/2/groups/",
|
||||
"root_groups": "/api/v2/inventories/2/root_groups/",
|
||||
"variable_data": "/api/v2/inventories/2/variable_data/",
|
||||
"script": "/api/v2/inventories/2/script/",
|
||||
"tree": "/api/v2/inventories/2/tree/",
|
||||
"inventory_sources": "/api/v2/inventories/2/inventory_sources/",
|
||||
"update_inventory_sources": "/api/v2/inventories/2/update_inventory_sources/",
|
||||
"activity_stream": "/api/v2/inventories/2/activity_stream/",
|
||||
"job_templates": "/api/v2/inventories/2/job_templates/",
|
||||
"ad_hoc_commands": "/api/v2/inventories/2/ad_hoc_commands/",
|
||||
"access_list": "/api/v2/inventories/2/access_list/",
|
||||
"object_roles": "/api/v2/inventories/2/object_roles/",
|
||||
"instance_groups": "/api/v2/inventories/2/instance_groups/",
|
||||
"copy": "/api/v2/inventories/2/copy/",
|
||||
"organization": "/api/v2/organizations/1/"
|
||||
},
|
||||
"summary_fields": {
|
||||
"organization": {
|
||||
"id": 1,
|
||||
"name": "Default",
|
||||
"description": ""
|
||||
"organization": {
|
||||
"id": 1,
|
||||
"name": "Default",
|
||||
"description": ""
|
||||
},
|
||||
"created_by": {
|
||||
"id": 1,
|
||||
"username": "admin",
|
||||
"first_name": "",
|
||||
"last_name": ""
|
||||
},
|
||||
"modified_by": {
|
||||
"id": 1,
|
||||
"username": "admin",
|
||||
"first_name": "",
|
||||
"last_name": ""
|
||||
},
|
||||
"object_roles": {
|
||||
"admin_role": {
|
||||
"description": "Can manage all aspects of the inventory",
|
||||
"name": "Admin",
|
||||
"id": 27
|
||||
},
|
||||
"created_by": {
|
||||
"id": 1,
|
||||
"username": "admin",
|
||||
"first_name": "",
|
||||
"last_name": ""
|
||||
"update_role": {
|
||||
"description": "May update the inventory",
|
||||
"name": "Update",
|
||||
"id": 28
|
||||
},
|
||||
"modified_by": {
|
||||
"id": 1,
|
||||
"username": "admin",
|
||||
"first_name": "",
|
||||
"last_name": ""
|
||||
"adhoc_role": {
|
||||
"description": "May run ad hoc commands on the inventory",
|
||||
"name": "Ad Hoc",
|
||||
"id": 29
|
||||
},
|
||||
"object_roles": {
|
||||
"admin_role": {
|
||||
"description": "Can manage all aspects of the inventory",
|
||||
"name": "Admin",
|
||||
"id": 27
|
||||
},
|
||||
"update_role": {
|
||||
"description": "May update the inventory",
|
||||
"name": "Update",
|
||||
"id": 28
|
||||
},
|
||||
"adhoc_role": {
|
||||
"description": "May run ad hoc commands on the inventory",
|
||||
"name": "Ad Hoc",
|
||||
"id": 29
|
||||
},
|
||||
"use_role": {
|
||||
"description": "Can use the inventory in a job template",
|
||||
"name": "Use",
|
||||
"id": 30
|
||||
},
|
||||
"read_role": {
|
||||
"description": "May view settings for the inventory",
|
||||
"name": "Read",
|
||||
"id": 31
|
||||
}
|
||||
"use_role": {
|
||||
"description": "Can use the inventory in a job template",
|
||||
"name": "Use",
|
||||
"id": 30
|
||||
},
|
||||
"user_capabilities": {
|
||||
"edit": true,
|
||||
"delete": true,
|
||||
"copy": true,
|
||||
"adhoc": true
|
||||
"read_role": {
|
||||
"description": "May view settings for the inventory",
|
||||
"name": "Read",
|
||||
"id": 31
|
||||
}
|
||||
},
|
||||
"user_capabilities": {
|
||||
"edit": true,
|
||||
"delete": true,
|
||||
"copy": true,
|
||||
"adhoc": true
|
||||
}
|
||||
},
|
||||
"created": "2019-10-04T15:29:11.542911Z",
|
||||
"modified": "2019-10-04T15:29:11.542924Z",
|
||||
@@ -83,7 +83,7 @@
|
||||
"host_filter": "name__icontains=local",
|
||||
"variables": "",
|
||||
"has_active_failures": false,
|
||||
"total_hosts": 1,
|
||||
"total_hosts": 2,
|
||||
"hosts_with_active_failures": 0,
|
||||
"total_groups": 0,
|
||||
"groups_with_active_failures": 0,
|
||||
@@ -91,4 +91,4 @@
|
||||
"total_inventory_sources": 0,
|
||||
"inventory_sources_with_failures": 0,
|
||||
"pending_deletion": false
|
||||
}
|
||||
}
|
||||
|
||||
45
awx/ui/src/screens/Job/Job.helptext.js
Normal file
45
awx/ui/src/screens/Job/Job.helptext.js
Normal file
@@ -0,0 +1,45 @@
|
||||
import React from 'react';
|
||||
import { t } from '@lingui/macro';
|
||||
|
||||
const jobHelpText = {
|
||||
jobType: t`For job templates, select run to execute the playbook. Select check to only check playbook syntax, test environment setup, and report problems without executing the playbook.`,
|
||||
inventory: t`Select the inventory containing the hosts you want this job to manage.`,
|
||||
project: t`Select the project containing the playbook you want this job to execute.`,
|
||||
executionEnvironment: t`The execution environment that will be used when launching this job template. The resolved execution environment can be overridden by explicitly assigning a different one to this job template.`,
|
||||
playbook: t`Select the playbook to be executed by this job.`,
|
||||
credentials: t`Select credentials for accessing the nodes this job will be ran against. You can only select one credential of each type. For machine credentials (SSH), checking "Prompt on launch" without selecting credentials will require you to select a machine credential at run time. If you select credentials and check "Prompt on launch", the selected credential(s) become the defaults that can be updated at run time.`,
|
||||
labels: t`Optional labels that describe this job template, such as 'dev' or 'test'. Labels can be used to group and filter job templates and completed jobs.`,
|
||||
variables: t`Pass extra command line variables to the playbook. This is the -e or --extra-vars command line parameter for ansible-playbook. Provide key/value pairs using either YAML or JSON. Refer to the documentation for example syntax.`,
|
||||
limit: t`Provide a host pattern to further constrain the list of hosts that will be managed or affected by the playbook. Multiple patterns are allowed. Refer to Ansible documentation for more information and examples on patterns.`,
|
||||
verbosity: t`Control the level of output ansible will produce as the playbook executes.`,
|
||||
jobSlicing: t`Divide the work done by this job template into the specified number of job slices, each running the same tasks against a portion of the inventory.`,
|
||||
timeout: t`The amount of time (in seconds) to run before the job is canceled. Defaults to 0 for no job timeout.`,
|
||||
instanceGroups: t`Select the Instance Groups for this Job Template to run on.`,
|
||||
jobTags: t`Tags are useful when you have a large playbook, and you want to run a specific part of a play or task. Use commas to separate multiple tags. Refer to the documentation for details on the usage of tags.`,
|
||||
skipTags: t`Skip tags are useful when you have a large playbook, and you want to skip specific parts of a play or task. Use commas to separate multiple tags. Refer to the documentation for details on the usage of tags.`,
|
||||
sourceControlBranch: t`Select a branch for the workflow. This branch is applied to all job template nodes that prompt for a branch.`,
|
||||
forks: (
|
||||
<span>
|
||||
{t`The number of parallel or simultaneous processes to use while executing the playbook. An empty value, or a value less than 1 will use the Ansible default which is usually 5. The default number of forks can be overwritten with a change to`}{' '}
|
||||
<code>ansible.cfg</code>.{' '}
|
||||
{t`Refer to the Ansible documentation for details about the configuration file.`}
|
||||
</span>
|
||||
),
|
||||
module: (moduleName) =>
|
||||
moduleName ? (
|
||||
<>
|
||||
{t`These arguments are used with the specified module. You can find information about ${moduleName} by clicking `}{' '}
|
||||
<a
|
||||
href={`https://docs.ansible.com/ansible/latest/modules/${moduleName}_module.html`}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
{t`here.`}
|
||||
</a>
|
||||
</>
|
||||
) : (
|
||||
t`These arguments are used with the specified module.`
|
||||
),
|
||||
};
|
||||
|
||||
export default jobHelpText;
|
||||
@@ -28,6 +28,7 @@ import ExecutionEnvironmentDetail from 'components/ExecutionEnvironmentDetail';
|
||||
import { getJobModel, isJobRunning } from 'util/jobs';
|
||||
import { formatDateString } from 'util/dates';
|
||||
import { Job } from 'types';
|
||||
import jobHelpText from '../Job.helptext';
|
||||
|
||||
const StatusDetailValue = styled.div`
|
||||
align-items: center;
|
||||
@@ -75,6 +76,14 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
workflow_job: t`Workflow Job`,
|
||||
};
|
||||
|
||||
const scmTypes = {
|
||||
'': t`Manual`,
|
||||
git: t`Git`,
|
||||
svn: t`Subversion`,
|
||||
insights: t`Red Hat Insights`,
|
||||
archive: t`Remote Archive`,
|
||||
};
|
||||
|
||||
const deleteJob = async () => {
|
||||
try {
|
||||
await getJobModel(job.type).destroy(job.id);
|
||||
@@ -102,6 +111,7 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
<Detail
|
||||
dataCy="job-inventory"
|
||||
label={t`Inventory`}
|
||||
helpText={jobHelpText.inventory}
|
||||
value={
|
||||
<Link
|
||||
to={
|
||||
@@ -115,7 +125,7 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
}
|
||||
/>
|
||||
) : (
|
||||
<DeletedDetail label={t`Inventory`} />
|
||||
<DeletedDetail label={t`Inventory`} helpText={jobHelpText.inventory} />
|
||||
);
|
||||
}
|
||||
if (job.type === 'workflow_job') {
|
||||
@@ -123,6 +133,7 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
<Detail
|
||||
dataCy="job-inventory"
|
||||
label={t`Inventory`}
|
||||
helpText={jobHelpText.inventory}
|
||||
value={
|
||||
<Link
|
||||
to={
|
||||
@@ -152,6 +163,7 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
<Detail
|
||||
dataCy="job-project"
|
||||
label={t`Project`}
|
||||
helpText={jobHelpText.project}
|
||||
value={<Link to={`/projects/${project.id}`}>{project.name}</Link>}
|
||||
/>
|
||||
<Detail
|
||||
@@ -186,6 +198,9 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
value={
|
||||
<StatusDetailValue>
|
||||
{job.status && <StatusLabel status={job.status} />}
|
||||
{job.job_explanation && job.job_explanation !== job.status
|
||||
? job.job_explanation
|
||||
: null}
|
||||
</StatusDetailValue>
|
||||
}
|
||||
/>
|
||||
@@ -239,8 +254,14 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
<Detail
|
||||
dataCy="job-type"
|
||||
label={t`Job Type`}
|
||||
helpText={jobHelpText.jobType}
|
||||
value={jobTypes[job.type]}
|
||||
/>
|
||||
<Detail
|
||||
dataCy="source-control-type"
|
||||
label={t`Source Control Type`}
|
||||
value={scmTypes[job.scm_type]}
|
||||
/>
|
||||
<LaunchedByDetail dataCy="job-launched-by" job={job} />
|
||||
{renderInventoryDetail()}
|
||||
{inventory_source && (
|
||||
@@ -288,6 +309,7 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
<Detail
|
||||
dataCy="source-control-branch"
|
||||
label={t`Source Control Branch`}
|
||||
helpText={jobHelpText.sourceControlBranch}
|
||||
value={scmBranch}
|
||||
/>
|
||||
)}
|
||||
@@ -299,18 +321,26 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
<Detail
|
||||
dataCy="job-playbook"
|
||||
label={t`Playbook`}
|
||||
helpText={jobHelpText.playbook}
|
||||
value={job.playbook}
|
||||
/>
|
||||
<Detail dataCy="job-limit" label={t`Limit`} value={job.limit} />
|
||||
<Detail
|
||||
dataCy="job-limit"
|
||||
label={t`Limit`}
|
||||
helpText={jobHelpText.limit}
|
||||
value={job.limit}
|
||||
/>
|
||||
<Detail
|
||||
dataCy="job-verbosity"
|
||||
label={t`Verbosity`}
|
||||
helpText={jobHelpText.verbosity}
|
||||
value={VERBOSITY[job.verbosity]}
|
||||
/>
|
||||
{job.type !== 'workflow_job' && !isJobRunning(job.status) && (
|
||||
<ExecutionEnvironmentDetail
|
||||
dataCy="job-execution-environment"
|
||||
executionEnvironment={executionEnvironment}
|
||||
helpText={jobHelpText.executionEnvironment}
|
||||
verifyMissingVirtualEnv={false}
|
||||
/>
|
||||
)}
|
||||
@@ -323,6 +353,7 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
<Detail
|
||||
dataCy="job-instance-group"
|
||||
label={t`Instance Group`}
|
||||
helpText={jobHelpText.instanceGroups}
|
||||
value={buildInstanceGroupLink(instanceGroup)}
|
||||
/>
|
||||
)}
|
||||
@@ -338,6 +369,7 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
<Detail
|
||||
dataCy="job-slice"
|
||||
label={t`Job Slice`}
|
||||
helpText={jobHelpText.jobSlicing}
|
||||
value={`${job.job_slice_number}/${job.job_slice_count}`}
|
||||
/>
|
||||
)}
|
||||
@@ -348,6 +380,14 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
value={t`True`}
|
||||
/>
|
||||
)}
|
||||
{typeof job.forks === 'number' && (
|
||||
<Detail
|
||||
dataCy="forks"
|
||||
label={t`Forks`}
|
||||
value={`${job.forks}`}
|
||||
helpText={jobHelpText.forks}
|
||||
/>
|
||||
)}
|
||||
|
||||
{credential && (
|
||||
<Detail
|
||||
@@ -373,6 +413,7 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
<Detail
|
||||
dataCy="job-credentials"
|
||||
fullWidth
|
||||
helpText={jobHelpText.credentials}
|
||||
label={t`Credentials`}
|
||||
value={
|
||||
<ChipGroup
|
||||
@@ -397,6 +438,7 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
dataCy="job-labels"
|
||||
fullWidth
|
||||
label={t`Labels`}
|
||||
helpText={jobHelpText.labels}
|
||||
value={
|
||||
<ChipGroup
|
||||
numChips={5}
|
||||
@@ -417,6 +459,7 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
dataCy="job-tags"
|
||||
fullWidth
|
||||
label={t`Job Tags`}
|
||||
helpText={jobHelpText.jobTags}
|
||||
value={
|
||||
<ChipGroup
|
||||
numChips={5}
|
||||
@@ -441,6 +484,7 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
dataCy="job-skip-tags"
|
||||
fullWidth
|
||||
label={t`Skip Tags`}
|
||||
helpText={jobHelpText.skipTags}
|
||||
value={
|
||||
<ChipGroup
|
||||
numChips={5}
|
||||
@@ -464,6 +508,7 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
dataCy="job-module-name"
|
||||
label={t`Module Name`}
|
||||
value={job.module_name}
|
||||
helpText={jobHelpText.module(job.module_name)}
|
||||
/>
|
||||
<Detail
|
||||
dataCy="job-module-arguments"
|
||||
@@ -486,6 +531,7 @@ function JobDetail({ job, inventorySourceLabels }) {
|
||||
label={t`Variables`}
|
||||
name="extra_vars"
|
||||
dataCy="job-detail-extra-variables"
|
||||
helpText={jobHelpText.variables}
|
||||
/>
|
||||
)}
|
||||
{job.artifacts && (
|
||||
|
||||
@@ -46,7 +46,12 @@ describe('<JobDetail />', () => {
|
||||
|
||||
// StatusIcon adds visibly hidden accessibility text " successful "
|
||||
assertDetail('Job ID', '2');
|
||||
assertDetail('Status', 'Successful');
|
||||
expect(wrapper.find(`Detail[label="Status"] dd`).text()).toContain(
|
||||
'Successful'
|
||||
);
|
||||
expect(wrapper.find(`Detail[label="Status"] dd`).text()).toContain(
|
||||
'Job explanation placeholder'
|
||||
);
|
||||
assertDetail('Started', '8/8/2019, 7:24:18 PM');
|
||||
assertDetail('Finished', '8/8/2019, 7:24:50 PM');
|
||||
assertDetail('Job Template', mockJobData.summary_fields.job_template.name);
|
||||
@@ -73,6 +78,7 @@ describe('<JobDetail />', () => {
|
||||
);
|
||||
|
||||
assertDetail('Job Slice', '0/1');
|
||||
assertDetail('Forks', '42');
|
||||
|
||||
const credentialChip = wrapper.find(
|
||||
`Detail[label="Credentials"] CredentialChip`
|
||||
|
||||
@@ -115,8 +115,8 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) {
|
||||
|
||||
const [jobStatus, setJobStatus] = useState(job.status ?? 'waiting');
|
||||
const [forceFlatMode, setForceFlatMode] = useState(false);
|
||||
const isFlatMode = isJobRunning(jobStatus) || location.search.length > 1;
|
||||
|
||||
const isFlatMode =
|
||||
isJobRunning(jobStatus) || location.search.length > 1 || job.type !== 'job';
|
||||
const [isTreeReady, setIsTreeReady] = useState(false);
|
||||
const [onReadyEvents, setOnReadyEvents] = useState([]);
|
||||
|
||||
@@ -652,7 +652,7 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) {
|
||||
isDeleteDisabled={isDeleting}
|
||||
/>
|
||||
</OutputHeader>
|
||||
<HostStatusBar counts={job.host_status_counts} />
|
||||
<HostStatusBar counts={job.host_status_counts || {}} />
|
||||
<JobOutputSearch
|
||||
qsConfig={QS_CONFIG}
|
||||
job={job}
|
||||
|
||||
@@ -44,4 +44,14 @@ describe('getEventRequestParams', () => {
|
||||
});
|
||||
expect(loadRange).toEqual(range(121, 126));
|
||||
});
|
||||
|
||||
it('should return last event only', () => {
|
||||
const [params, loadRange] = getEventRequestParams(job, 72, [72, 72]);
|
||||
|
||||
expect(params).toEqual({
|
||||
page: 72,
|
||||
page_size: 1,
|
||||
});
|
||||
expect(loadRange).toEqual(range(72, 72));
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,7 +3,7 @@ export default function getRowRangePageSize(startIndex, stopIndex) {
|
||||
let pageSize;
|
||||
|
||||
if (startIndex === stopIndex) {
|
||||
page = startIndex + 1;
|
||||
page = startIndex;
|
||||
pageSize = 1;
|
||||
} else if (stopIndex >= startIndex + 50) {
|
||||
page = Math.floor(startIndex / 50) + 1;
|
||||
|
||||
@@ -3,11 +3,20 @@ import getRowRangePageSize from './jobOutputUtils';
|
||||
describe('getRowRangePageSize', () => {
|
||||
test('handles range of 1', () => {
|
||||
expect(getRowRangePageSize(1, 1)).toEqual({
|
||||
page: 2,
|
||||
page: 1,
|
||||
pageSize: 1,
|
||||
firstIndex: 1,
|
||||
firstIndex: 0,
|
||||
});
|
||||
});
|
||||
|
||||
test('handles range of 1 at a higher number', () => {
|
||||
expect(getRowRangePageSize(72, 72)).toEqual({
|
||||
page: 72,
|
||||
pageSize: 1,
|
||||
firstIndex: 71,
|
||||
});
|
||||
});
|
||||
|
||||
test('handles range larger than 50 rows', () => {
|
||||
expect(getRowRangePageSize(55, 125)).toEqual({
|
||||
page: 2,
|
||||
@@ -15,6 +24,7 @@ describe('getRowRangePageSize', () => {
|
||||
firstIndex: 50,
|
||||
});
|
||||
});
|
||||
|
||||
test('handles small range', () => {
|
||||
expect(getRowRangePageSize(47, 53)).toEqual({
|
||||
page: 6,
|
||||
@@ -22,6 +32,7 @@ describe('getRowRangePageSize', () => {
|
||||
firstIndex: 45,
|
||||
});
|
||||
});
|
||||
|
||||
test('handles perfect range', () => {
|
||||
expect(getRowRangePageSize(5, 9)).toEqual({
|
||||
page: 2,
|
||||
@@ -29,6 +40,7 @@ describe('getRowRangePageSize', () => {
|
||||
firstIndex: 5,
|
||||
});
|
||||
});
|
||||
|
||||
test('handles range with 0 startIndex', () => {
|
||||
expect(getRowRangePageSize(0, 50)).toEqual({
|
||||
page: 1,
|
||||
|
||||
@@ -49,6 +49,7 @@ export default function useJobEvents(callbacks, jobId, isFlatMode) {
|
||||
|
||||
useEffect(() => {
|
||||
if (isFlatMode) {
|
||||
callbacks.setJobTreeReady();
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ import useJobEvents, {
|
||||
jobEventsReducer,
|
||||
ADD_EVENTS,
|
||||
TOGGLE_NODE_COLLAPSED,
|
||||
SET_EVENT_NUM_CHILDREN,
|
||||
} from './useJobEvents';
|
||||
|
||||
function Child() {
|
||||
@@ -16,6 +15,7 @@ function HookTest({
|
||||
fetchChildrenSummary = () => {},
|
||||
setForceFlatMode = () => {},
|
||||
setJobTreeReady = () => {},
|
||||
jobId = 1,
|
||||
isFlatMode = false,
|
||||
}) {
|
||||
const hookFuncs = useJobEvents(
|
||||
@@ -25,6 +25,7 @@ function HookTest({
|
||||
setForceFlatMode,
|
||||
setJobTreeReady,
|
||||
},
|
||||
jobId,
|
||||
isFlatMode
|
||||
);
|
||||
return <Child id="test" {...hookFuncs} />;
|
||||
@@ -1295,18 +1296,24 @@ describe('useJobEvents', () => {
|
||||
|
||||
describe('getTotalNumChildren', () => {
|
||||
let wrapper;
|
||||
beforeEach(() => {
|
||||
|
||||
test('should not make call to get child events, because there are none for this job type', () => {
|
||||
wrapper = shallow(<HookTest />);
|
||||
wrapper.find('#test').prop('addEvents')(eventsList);
|
||||
expect(callbacks.fetchChildrenSummary).not.toBeCalled();
|
||||
});
|
||||
|
||||
test('should get basic number of children', () => {
|
||||
wrapper = shallow(<HookTest />);
|
||||
wrapper.find('#test').prop('addEvents')(eventsList);
|
||||
expect(
|
||||
wrapper.find('#test').prop('getTotalNumChildren')('abc-002')
|
||||
).toEqual(3);
|
||||
});
|
||||
|
||||
test('should get total number of nested children', () => {
|
||||
wrapper = shallow(<HookTest />);
|
||||
wrapper.find('#test').prop('addEvents')(eventsList);
|
||||
expect(
|
||||
wrapper.find('#test').prop('getTotalNumChildren')('abc-001')
|
||||
).toEqual(8);
|
||||
|
||||
@@ -104,7 +104,7 @@
|
||||
"project": 6,
|
||||
"playbook": "chatty_tasks.yml",
|
||||
"scm_branch": "main",
|
||||
"forks": 0,
|
||||
"forks": 42,
|
||||
"limit": "",
|
||||
"verbosity": 0,
|
||||
"extra_vars": "{\"num_messages\": 94}",
|
||||
@@ -168,7 +168,7 @@
|
||||
"ANSIBLE_SSH_CONTROL_PATH_DIR": "/tmp/awx_2_a4b1afiw/cp",
|
||||
"ANSIBLE_STDOUT_CALLBACK": "awx_display"
|
||||
},
|
||||
"job_explanation": "",
|
||||
"job_explanation": "Job explanation placeholder",
|
||||
"execution_node": "awx",
|
||||
"controller_node": "",
|
||||
"result_traceback": "",
|
||||
|
||||
@@ -25,6 +25,7 @@ import useRequest, { useDismissableError } from 'hooks/useRequest';
|
||||
import StatusLabel from 'components/StatusLabel';
|
||||
import hasCustomMessages from '../shared/hasCustomMessages';
|
||||
import { NOTIFICATION_TYPES } from '../constants';
|
||||
import helpText from '../shared/Notifications.helptext';
|
||||
|
||||
const NUM_RETRIES = 25;
|
||||
const RETRY_TIMEOUT = 5000;
|
||||
@@ -34,7 +35,6 @@ function NotificationTemplateDetail({ template, defaultMessages }) {
|
||||
const [testStatus, setTestStatus] = useState(
|
||||
template.summary_fields?.recent_notifications[0]?.status ?? undefined
|
||||
);
|
||||
|
||||
const {
|
||||
created,
|
||||
modified,
|
||||
@@ -151,6 +151,7 @@ function NotificationTemplateDetail({ template, defaultMessages }) {
|
||||
/>
|
||||
<ArrayDetail
|
||||
label={t`Recipient List`}
|
||||
helpText={helpText.emailRecepients}
|
||||
value={configuration.recipients}
|
||||
dataCy="nt-detail-recipients"
|
||||
/>
|
||||
@@ -166,6 +167,7 @@ function NotificationTemplateDetail({ template, defaultMessages }) {
|
||||
/>
|
||||
<Detail
|
||||
label={t`Timeout`}
|
||||
helpText={helpText.emailTimeout}
|
||||
value={configuration.timeout}
|
||||
dataCy="nt-detail-timeout"
|
||||
/>
|
||||
@@ -178,6 +180,7 @@ function NotificationTemplateDetail({ template, defaultMessages }) {
|
||||
<>
|
||||
<Detail
|
||||
label={t`Grafana URL`}
|
||||
helpText={helpText.grafanaUrl}
|
||||
value={configuration.grafana_url}
|
||||
dataCy="nt-detail-grafana-url"
|
||||
/>
|
||||
@@ -193,6 +196,7 @@ function NotificationTemplateDetail({ template, defaultMessages }) {
|
||||
/>
|
||||
<ArrayDetail
|
||||
label={t`Tags for the Annotation`}
|
||||
helpText={helpText.grafanaTags}
|
||||
value={configuration.annotation_tags}
|
||||
dataCy="nt-detail-"
|
||||
/>
|
||||
@@ -222,6 +226,7 @@ function NotificationTemplateDetail({ template, defaultMessages }) {
|
||||
/>
|
||||
<ArrayDetail
|
||||
label={t`Destination Channels or Users`}
|
||||
helpText={helpText.ircTargets}
|
||||
value={configuration.targets}
|
||||
dataCy="nt-detail-channels"
|
||||
/>
|
||||
@@ -311,11 +316,13 @@ function NotificationTemplateDetail({ template, defaultMessages }) {
|
||||
{template.notification_type === 'slack' && (
|
||||
<>
|
||||
<ArrayDetail
|
||||
helpText={helpText.slackChannels}
|
||||
label={t`Destination Channels`}
|
||||
value={configuration.channels}
|
||||
dataCy="nt-detail-slack-channels"
|
||||
/>
|
||||
<Detail
|
||||
helpText={helpText.slackColor}
|
||||
label={t`Notification Color`}
|
||||
value={configuration.hex_color}
|
||||
dataCy="nt-detail-slack-color"
|
||||
@@ -326,11 +333,13 @@ function NotificationTemplateDetail({ template, defaultMessages }) {
|
||||
<>
|
||||
<Detail
|
||||
label={t`Source Phone Number`}
|
||||
helpText={helpText.twilioSourcePhoneNumber}
|
||||
value={configuration.from_number}
|
||||
dataCy="nt-detail-twilio-source-phone"
|
||||
/>
|
||||
<ArrayDetail
|
||||
label={t`Destination SMS Number(s)`}
|
||||
helpText={helpText.twilioDestinationNumbers}
|
||||
value={configuration.to_numbers}
|
||||
dataCy="nt-detail-twilio-destination-numbers"
|
||||
/>
|
||||
@@ -367,6 +376,7 @@ function NotificationTemplateDetail({ template, defaultMessages }) {
|
||||
/>
|
||||
<CodeDetail
|
||||
label={t`HTTP Headers`}
|
||||
helpText={helpText.webhookHeaders}
|
||||
value={JSON.stringify(configuration.headers)}
|
||||
mode="json"
|
||||
rows={6}
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
import React from 'react';
|
||||
import { t } from '@lingui/macro';
|
||||
|
||||
const helpText = {
|
||||
emailRecepients: t`Use one email address per line to create a recipient list for this type of notification.`,
|
||||
emailTimeout: t`The amount of time (in seconds) before the email
|
||||
notification stops trying to reach the host and times out. Ranges
|
||||
from 1 to 120 seconds.`,
|
||||
grafanaUrl: t`The base URL of the Grafana server - the
|
||||
/api/annotations endpoint will be added automatically to the base
|
||||
Grafana URL.`,
|
||||
grafanaTags: t`Use one Annotation Tag per line, without commas.`,
|
||||
ircTargets: t`Use one IRC channel or username per line. The pound
|
||||
symbol (#) for channels, and the at (@) symbol for users, are not
|
||||
required.`,
|
||||
slackChannels: (
|
||||
<>
|
||||
{t`One Slack channel per line. The pound symbol (#)
|
||||
is required for channels. To respond to or start a thread to a specific message add the parent message Id to the channel where the parent message Id is 16 digits. A dot (.) must be manually inserted after the 10th digit. ie:#destination-channel, 1231257890.006423. See Slack`}{' '}
|
||||
<a href="https://api.slack.com/messaging/retrieving#individual_messages">{t`documentation`}</a>{' '}
|
||||
<span>{t`for more information.`}</span>
|
||||
</>
|
||||
),
|
||||
slackColor: t`Specify a notification color. Acceptable colors are hex
|
||||
color code (example: #3af or #789abc).`,
|
||||
twilioSourcePhoneNumber: t`The number associated with the "Messaging
|
||||
Service" in Twilio with the format +18005550199.`,
|
||||
twilioDestinationNumbers: t`Use one phone number per line to specify where to
|
||||
route SMS messages. Phone numbers should be formatted +11231231234. For more information see Twilio documentation`,
|
||||
webhookHeaders: t`Specify HTTP Headers in JSON format. Refer to
|
||||
the Ansible Tower documentation for example syntax.`,
|
||||
};
|
||||
|
||||
export default helpText;
|
||||
@@ -25,6 +25,7 @@ import {
|
||||
twilioPhoneNumber,
|
||||
} from 'util/validators';
|
||||
import { NotificationType } from 'types';
|
||||
import helpText from './Notifications.helptext';
|
||||
|
||||
const TypeFields = {
|
||||
email: EmailFields,
|
||||
@@ -37,7 +38,6 @@ const TypeFields = {
|
||||
twilio: TwilioFields,
|
||||
webhook: WebhookFields,
|
||||
};
|
||||
|
||||
function TypeInputsSubForm({ type }) {
|
||||
const Fields = TypeFields[type];
|
||||
return (
|
||||
@@ -87,8 +87,7 @@ function EmailFields() {
|
||||
validate={required(null)}
|
||||
isRequired
|
||||
rows={3}
|
||||
tooltip={t`Enter one email address per line to create a recipient
|
||||
list for this type of notification.`}
|
||||
tooltip={helpText.emailRecepients}
|
||||
/>
|
||||
<FormField
|
||||
id="email-sender"
|
||||
@@ -117,9 +116,7 @@ function EmailFields() {
|
||||
isRequired
|
||||
min="1"
|
||||
max="120"
|
||||
tooltip={t`The amount of time (in seconds) before the email
|
||||
notification stops trying to reach the host and times out. Ranges
|
||||
from 1 to 120 seconds.`}
|
||||
tooltip={helpText.emailTimeout}
|
||||
/>
|
||||
<FormGroup fieldId="email-options" label={t`E-mail options`}>
|
||||
<FormCheckboxLayout>
|
||||
@@ -149,9 +146,7 @@ function GrafanaFields() {
|
||||
type="text"
|
||||
validate={required(null)}
|
||||
isRequired
|
||||
tooltip={t`The base URL of the Grafana server - the
|
||||
/api/annotations endpoint will be added automatically to the base
|
||||
Grafana URL.`}
|
||||
tooltip={helpText.grafanaUrl}
|
||||
/>
|
||||
<PasswordField
|
||||
id="grafana-key"
|
||||
@@ -178,7 +173,7 @@ function GrafanaFields() {
|
||||
name="notification_configuration.annotation_tags"
|
||||
type="textarea"
|
||||
rows={3}
|
||||
tooltip={t`Enter one Annotation Tag per line, without commas.`}
|
||||
tooltip={helpText.grafanaTags}
|
||||
/>
|
||||
<CheckboxField
|
||||
id="grafana-ssl"
|
||||
@@ -229,9 +224,7 @@ function IRCFields() {
|
||||
type="textarea"
|
||||
validate={required(null)}
|
||||
isRequired
|
||||
tooltip={t`Enter one IRC channel or username per line. The pound
|
||||
symbol (#) for channels, and the at (@) symbol for users, are not
|
||||
required.`}
|
||||
tooltip={helpText.ircTargets}
|
||||
/>
|
||||
<CheckboxField
|
||||
id="grafana-ssl"
|
||||
@@ -362,14 +355,7 @@ function SlackFields() {
|
||||
type="textarea"
|
||||
validate={required(null)}
|
||||
isRequired
|
||||
tooltip={
|
||||
<>
|
||||
{t`Enter one Slack channel per line. The pound symbol (#)
|
||||
is required for channels. To respond to or start a thread to a specific message add the parent message Id to the channel where the parent message Id is 16 digits. A dot (.) must be manually inserted after the 10th digit. ie:#destination-channel, 1231257890.006423. See Slack`}{' '}
|
||||
<a href="https://api.slack.com/messaging/retrieving#individual_messages">{t`documentation`}</a>{' '}
|
||||
<span>{t`for more information.`}</span>
|
||||
</>
|
||||
}
|
||||
tooltip={helpText.slackChannels}
|
||||
/>
|
||||
<PasswordField
|
||||
id="slack-token"
|
||||
@@ -383,8 +369,7 @@ function SlackFields() {
|
||||
label={t`Notification color`}
|
||||
name="notification_configuration.hex_color"
|
||||
type="text"
|
||||
tooltip={t`Specify a notification color. Acceptable colors are hex
|
||||
color code (example: #3af or #789abc).`}
|
||||
tooltip={helpText.slackColor}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
@@ -407,8 +392,7 @@ function TwilioFields() {
|
||||
type="text"
|
||||
validate={combine([required(null), twilioPhoneNumber()])}
|
||||
isRequired
|
||||
tooltip={t`Enter the number associated with the "Messaging
|
||||
Service" in Twilio in the format +18005550199.`}
|
||||
tooltip={helpText.twilioSourcePhoneNumber}
|
||||
/>
|
||||
<ArrayTextField
|
||||
id="twilio-destination-numbers"
|
||||
@@ -417,8 +401,7 @@ function TwilioFields() {
|
||||
type="textarea"
|
||||
validate={combine([required(null), twilioPhoneNumber()])}
|
||||
isRequired
|
||||
tooltip={t`Enter one phone number per line to specify where to
|
||||
route SMS messages. Phone numbers should be formatted +11231231234. For more information see Twilio documentation`}
|
||||
tooltip={helpText.twilioDestinationNumbers}
|
||||
/>
|
||||
<FormField
|
||||
id="twilio-account-sid"
|
||||
@@ -469,8 +452,7 @@ function WebhookFields() {
|
||||
name="notification_configuration.headers"
|
||||
label={t`HTTP Headers`}
|
||||
mode="javascript"
|
||||
tooltip={t`Specify HTTP Headers in JSON format. Refer to
|
||||
the Ansible Tower documentation for example syntax.`}
|
||||
tooltip={helpText.webhookHeaders}
|
||||
rows={5}
|
||||
/>
|
||||
</FormFullWidthLayout>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user