mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 19:44:43 -03:30
Compare commits
40 Commits
21.8.0
...
UI-Feature
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0933a96d60 | ||
|
|
721e19e1c8 | ||
|
|
87363af615 | ||
|
|
332c433b6e | ||
|
|
e029cf7196 | ||
|
|
a1d34462b0 | ||
|
|
e4283841d6 | ||
|
|
477a63d1b4 | ||
|
|
4a30cc244f | ||
|
|
1f939aa25e | ||
|
|
86b0a3d4f1 | ||
|
|
fe1b37afaf | ||
|
|
87dd8c118d | ||
|
|
d6004fd2d3 | ||
|
|
3d3e4ad150 | ||
|
|
81821fd378 | ||
|
|
8e5af2b5f2 | ||
|
|
918db89dc8 | ||
|
|
6e25a552d3 | ||
|
|
83c48bb5fa | ||
|
|
1c65339a24 | ||
|
|
75e6366c5e | ||
|
|
af6fec5592 | ||
|
|
893dba7076 | ||
|
|
d571b9bbbc | ||
|
|
b28cc34ff3 | ||
|
|
776d39f057 | ||
|
|
61b242d194 | ||
|
|
22b81f5dd3 | ||
|
|
99e1920d42 | ||
|
|
2218fd5c25 | ||
|
|
3c656842f0 | ||
|
|
bd7635e74e | ||
|
|
0faa999ceb | ||
|
|
577f102e53 | ||
|
|
6538d34b48 | ||
|
|
487efb77ce | ||
|
|
aae57378f0 | ||
|
|
cfce31419d | ||
|
|
8e83f9b134 |
24
.github/workflows/feature_branch_deletion.yml
vendored
Normal file
24
.github/workflows/feature_branch_deletion.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
---
|
||||
name: Feature branch deletion cleanup
|
||||
on:
|
||||
delete:
|
||||
branches:
|
||||
- feature_**
|
||||
jobs:
|
||||
push:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Delete API Schema
|
||||
env:
|
||||
AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
|
||||
AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
|
||||
AWS_REGION: 'us-east-1'
|
||||
run: |
|
||||
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||
ansible localhost -c local -m aws_s3 \
|
||||
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delete permission=public-read"
|
||||
|
||||
|
||||
1
.github/workflows/upload_schema.yml
vendored
1
.github/workflows/upload_schema.yml
vendored
@@ -5,6 +5,7 @@ on:
|
||||
branches:
|
||||
- devel
|
||||
- release_**
|
||||
- feature_**
|
||||
jobs:
|
||||
push:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -2221,6 +2221,15 @@ class InventorySourceUpdateSerializer(InventorySourceSerializer):
|
||||
class Meta:
|
||||
fields = ('can_update',)
|
||||
|
||||
def validate(self, attrs):
|
||||
project = self.instance.source_project
|
||||
if project:
|
||||
failed_reason = project.get_reason_if_failed()
|
||||
if failed_reason:
|
||||
raise serializers.ValidationError(failed_reason)
|
||||
|
||||
return super(InventorySourceUpdateSerializer, self).validate(attrs)
|
||||
|
||||
|
||||
class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSerializer):
|
||||
|
||||
@@ -4272,17 +4281,10 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
# Basic validation - cannot run a playbook without a playbook
|
||||
if not template.project:
|
||||
errors['project'] = _("A project is required to run a job.")
|
||||
elif template.project.status in ('error', 'failed'):
|
||||
errors['playbook'] = _("Missing a revision to run due to failed project update.")
|
||||
|
||||
latest_update = template.project.project_updates.last()
|
||||
if latest_update is not None and latest_update.failed:
|
||||
failed_validation_tasks = latest_update.project_update_events.filter(
|
||||
event='runner_on_failed',
|
||||
play="Perform project signature/checksum verification",
|
||||
)
|
||||
if failed_validation_tasks:
|
||||
errors['playbook'] = _("Last project update failed due to signature validation failure.")
|
||||
else:
|
||||
failure_reason = template.project.get_reason_if_failed()
|
||||
if failure_reason:
|
||||
errors['playbook'] = failure_reason
|
||||
|
||||
# cannot run a playbook without an inventory
|
||||
if template.inventory and template.inventory.pending_deletion is True:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
Launch a Job Template:
|
||||
|
||||
{% ifmeth GET %}
|
||||
Make a GET request to this resource to determine if the job_template can be
|
||||
launched and whether any passwords are required to launch the job_template.
|
||||
The response will include the following fields:
|
||||
@@ -29,8 +29,8 @@ The response will include the following fields:
|
||||
* `inventory_needed_to_start`: Flag indicating the presence of an inventory
|
||||
associated with the job template. If not then one should be supplied when
|
||||
launching the job (boolean, read-only)
|
||||
|
||||
Make a POST request to this resource to launch the job_template. If any
|
||||
{% endifmeth %}
|
||||
{% ifmeth POST %}Make a POST request to this resource to launch the job_template. If any
|
||||
passwords, inventory, or extra variables (extra_vars) are required, they must
|
||||
be passed via POST data, with extra_vars given as a YAML or JSON string and
|
||||
escaped parentheses. If the `inventory_needed_to_start` is `True` then the
|
||||
@@ -41,3 +41,4 @@ are not provided, a 400 status code will be returned. If the job cannot be
|
||||
launched, a 405 status code will be returned. If the provided credential or
|
||||
inventory are not allowed to be used by the user, then a 403 status code will
|
||||
be returned.
|
||||
{% endifmeth %}
|
||||
@@ -5,6 +5,7 @@
|
||||
import dateutil
|
||||
import functools
|
||||
import html
|
||||
import itertools
|
||||
import logging
|
||||
import re
|
||||
import requests
|
||||
@@ -20,9 +21,10 @@ from urllib3.exceptions import ConnectTimeoutError
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import FieldError, ObjectDoesNotExist
|
||||
from django.db.models import Q, Sum
|
||||
from django.db.models import Q, Sum, Count
|
||||
from django.db import IntegrityError, ProgrammingError, transaction, connection
|
||||
from django.db.models.fields.related import ManyToManyField, ForeignKey
|
||||
from django.db.models.functions import Trunc
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.utils.timezone import now
|
||||
@@ -47,9 +49,6 @@ from rest_framework import status
|
||||
from rest_framework_yaml.parsers import YAMLParser
|
||||
from rest_framework_yaml.renderers import YAMLRenderer
|
||||
|
||||
# QSStats
|
||||
import qsstats
|
||||
|
||||
# ANSIConv
|
||||
import ansiconv
|
||||
|
||||
@@ -283,30 +282,50 @@ class DashboardJobsGraphView(APIView):
|
||||
success_query = success_query.filter(instance_of=models.ProjectUpdate)
|
||||
failed_query = failed_query.filter(instance_of=models.ProjectUpdate)
|
||||
|
||||
success_qss = qsstats.QuerySetStats(success_query, 'finished')
|
||||
failed_qss = qsstats.QuerySetStats(failed_query, 'finished')
|
||||
|
||||
start_date = now()
|
||||
end = now()
|
||||
interval = 'day'
|
||||
if period == 'month':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(months=1)
|
||||
interval = 'days'
|
||||
start = end - dateutil.relativedelta.relativedelta(months=1)
|
||||
elif period == 'two_weeks':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(weeks=2)
|
||||
interval = 'days'
|
||||
start = end - dateutil.relativedelta.relativedelta(weeks=2)
|
||||
elif period == 'week':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(weeks=1)
|
||||
interval = 'days'
|
||||
start = end - dateutil.relativedelta.relativedelta(weeks=1)
|
||||
elif period == 'day':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(days=1)
|
||||
interval = 'hours'
|
||||
start = end - dateutil.relativedelta.relativedelta(days=1)
|
||||
interval = 'hour'
|
||||
else:
|
||||
return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
dashboard_data = {"jobs": {"successful": [], "failed": []}}
|
||||
for element in success_qss.time_series(end_date, start_date, interval=interval):
|
||||
dashboard_data['jobs']['successful'].append([time.mktime(element[0].timetuple()), element[1]])
|
||||
for element in failed_qss.time_series(end_date, start_date, interval=interval):
|
||||
dashboard_data['jobs']['failed'].append([time.mktime(element[0].timetuple()), element[1]])
|
||||
|
||||
succ_list = dashboard_data['jobs']['successful']
|
||||
fail_list = dashboard_data['jobs']['failed']
|
||||
|
||||
qs_s = (
|
||||
success_query.filter(finished__range=(start, end))
|
||||
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
|
||||
.order_by()
|
||||
.values('d')
|
||||
.annotate(agg=Count('id', distinct=True))
|
||||
)
|
||||
data_s = {item['d']: item['agg'] for item in qs_s}
|
||||
qs_f = (
|
||||
failed_query.filter(finished__range=(start, end))
|
||||
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
|
||||
.order_by()
|
||||
.values('d')
|
||||
.annotate(agg=Count('id', distinct=True))
|
||||
)
|
||||
data_f = {item['d']: item['agg'] for item in qs_f}
|
||||
|
||||
start_date = start.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
for d in itertools.count():
|
||||
date = start_date + dateutil.relativedelta.relativedelta(days=d)
|
||||
if date > end:
|
||||
break
|
||||
succ_list.append([time.mktime(date.timetuple()), data_s.get(date, 0)])
|
||||
fail_list.append([time.mktime(date.timetuple()), data_f.get(date, 0)])
|
||||
|
||||
return Response(dashboard_data)
|
||||
|
||||
|
||||
@@ -2221,6 +2240,8 @@ class InventorySourceUpdateView(RetrieveAPIView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
serializer = self.get_serializer(instance=obj, data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
if obj.can_update:
|
||||
update = obj.update()
|
||||
if not update:
|
||||
|
||||
@@ -238,7 +238,9 @@ def instance_info(since, include_hostnames=False, **kwargs):
|
||||
info = {}
|
||||
# Use same method that the TaskManager does to compute consumed capacity without querying all running jobs for each Instance
|
||||
active_tasks = models.UnifiedJob.objects.filter(status__in=['running', 'waiting']).only('task_impact', 'controller_node', 'execution_node')
|
||||
tm_instances = TaskManagerInstances(active_tasks, instance_fields=['uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'enabled'])
|
||||
tm_instances = TaskManagerInstances(
|
||||
active_tasks, instance_fields=['uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'enabled', 'node_type']
|
||||
)
|
||||
for tm_instance in tm_instances.instances_by_hostname.values():
|
||||
instance = tm_instance.obj
|
||||
instance_info = {
|
||||
@@ -251,6 +253,7 @@ def instance_info(since, include_hostnames=False, **kwargs):
|
||||
'enabled': instance.enabled,
|
||||
'consumed_capacity': tm_instance.consumed_capacity,
|
||||
'remaining_capacity': instance.capacity - tm_instance.consumed_capacity,
|
||||
'node_type': instance.node_type,
|
||||
}
|
||||
if include_hostnames is True:
|
||||
instance_info['hostname'] = instance.hostname
|
||||
|
||||
@@ -57,6 +57,7 @@ def metrics():
|
||||
[
|
||||
'hostname',
|
||||
'instance_uuid',
|
||||
'node_type',
|
||||
],
|
||||
registry=REGISTRY,
|
||||
)
|
||||
@@ -84,6 +85,7 @@ def metrics():
|
||||
[
|
||||
'hostname',
|
||||
'instance_uuid',
|
||||
'node_type',
|
||||
],
|
||||
registry=REGISTRY,
|
||||
)
|
||||
@@ -111,6 +113,7 @@ def metrics():
|
||||
[
|
||||
'hostname',
|
||||
'instance_uuid',
|
||||
'node_type',
|
||||
],
|
||||
registry=REGISTRY,
|
||||
)
|
||||
@@ -120,6 +123,7 @@ def metrics():
|
||||
[
|
||||
'hostname',
|
||||
'instance_uuid',
|
||||
'node_type',
|
||||
],
|
||||
registry=REGISTRY,
|
||||
)
|
||||
@@ -180,12 +184,13 @@ def metrics():
|
||||
instance_data = instance_info(None, include_hostnames=True)
|
||||
for uuid, info in instance_data.items():
|
||||
hostname = info['hostname']
|
||||
INSTANCE_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['capacity'])
|
||||
node_type = info['node_type']
|
||||
INSTANCE_CAPACITY.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).set(instance_data[uuid]['capacity'])
|
||||
INSTANCE_CPU.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['cpu'])
|
||||
INSTANCE_MEMORY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['memory'])
|
||||
INSTANCE_CONSUMED_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['consumed_capacity'])
|
||||
INSTANCE_REMAINING_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['remaining_capacity'])
|
||||
INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid).info(
|
||||
INSTANCE_CONSUMED_CAPACITY.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).set(instance_data[uuid]['consumed_capacity'])
|
||||
INSTANCE_REMAINING_CAPACITY.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).set(instance_data[uuid]['remaining_capacity'])
|
||||
INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).info(
|
||||
{
|
||||
'enabled': str(instance_data[uuid]['enabled']),
|
||||
'managed_by_policy': str(instance_data[uuid]['managed_by_policy']),
|
||||
|
||||
@@ -5,7 +5,9 @@ import logging
|
||||
|
||||
from django.conf import settings
|
||||
from django.apps import apps
|
||||
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
from awx.main.utils import is_testing
|
||||
|
||||
root_key = 'awx_metrics'
|
||||
logger = logging.getLogger('awx.main.analytics')
|
||||
@@ -163,7 +165,7 @@ class Metrics:
|
||||
Instance = apps.get_model('main', 'Instance')
|
||||
if instance_name:
|
||||
self.instance_name = instance_name
|
||||
elif settings.IS_TESTING():
|
||||
elif is_testing():
|
||||
self.instance_name = "awx_testing"
|
||||
else:
|
||||
self.instance_name = Instance.objects.my_hostname()
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
||||
|
||||
import base64
|
||||
from urllib.parse import urljoin, quote
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -61,7 +60,7 @@ def conjur_backend(**kwargs):
|
||||
cacert = kwargs.get('cacert', None)
|
||||
|
||||
auth_kwargs = {
|
||||
'headers': {'Content-Type': 'text/plain'},
|
||||
'headers': {'Content-Type': 'text/plain', 'Accept-Encoding': 'base64'},
|
||||
'data': api_key,
|
||||
'allow_redirects': False,
|
||||
}
|
||||
@@ -69,9 +68,9 @@ def conjur_backend(**kwargs):
|
||||
with CertFiles(cacert) as cert:
|
||||
# https://www.conjur.org/api.html#authentication-authenticate-post
|
||||
auth_kwargs['verify'] = cert
|
||||
resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||
resp = requests.post(urljoin(url, '/'.join(['api', 'authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||
raise_for_status(resp)
|
||||
token = base64.b64encode(resp.content).decode('utf-8')
|
||||
token = resp.content.decode('utf-8')
|
||||
|
||||
lookup_kwargs = {
|
||||
'headers': {'Authorization': 'Token token="{}"'.format(token)},
|
||||
@@ -79,9 +78,10 @@ def conjur_backend(**kwargs):
|
||||
}
|
||||
|
||||
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
|
||||
path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path]))
|
||||
path = urljoin(url, '/'.join(['api', 'secrets', account, 'variable', secret_path]))
|
||||
if version:
|
||||
path = '?'.join([path, version])
|
||||
ver = "version={}".format(version)
|
||||
path = '?'.join([path, ver])
|
||||
|
||||
with CertFiles(cacert) as cert:
|
||||
lookup_kwargs['verify'] = cert
|
||||
@@ -90,4 +90,4 @@ def conjur_backend(**kwargs):
|
||||
return resp.text
|
||||
|
||||
|
||||
conjur_plugin = CredentialPlugin('CyberArk Conjur Secret Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
||||
conjur_plugin = CredentialPlugin('CyberArk Conjur Secrets Manager Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
||||
|
||||
@@ -466,7 +466,7 @@ class AutoscalePool(WorkerPool):
|
||||
task_name = 'unknown'
|
||||
if isinstance(body, dict):
|
||||
task_name = body.get('task')
|
||||
logger.warn(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
|
||||
logger.warning(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
|
||||
return super(AutoscalePool, self).write(preferred_queue, body)
|
||||
except Exception:
|
||||
for conn in connections.all():
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
import inspect
|
||||
import logging
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
from uuid import uuid4
|
||||
|
||||
from django.conf import settings
|
||||
from django_guid import get_guid
|
||||
|
||||
from . import pg_bus_conn
|
||||
from awx.main.utils import is_testing
|
||||
|
||||
logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
@@ -93,7 +92,7 @@ class task:
|
||||
obj.update(**kw)
|
||||
if callable(queue):
|
||||
queue = queue()
|
||||
if not settings.IS_TESTING(sys.argv):
|
||||
if not is_testing():
|
||||
with pg_bus_conn() as conn:
|
||||
conn.notify(queue, json.dumps(obj))
|
||||
return (obj, queue)
|
||||
|
||||
@@ -233,11 +233,12 @@ class Instance(HasPolicyEditsMixin, BaseModel):
|
||||
if not isinstance(vargs.get('grace_period'), int):
|
||||
vargs['grace_period'] = 60 # grace period of 60 minutes, need to set because CLI default will not take effect
|
||||
if 'exclude_strings' not in vargs and vargs.get('file_pattern'):
|
||||
active_pks = list(
|
||||
UnifiedJob.objects.filter(
|
||||
(models.Q(execution_node=self.hostname) | models.Q(controller_node=self.hostname)) & models.Q(status__in=('running', 'waiting'))
|
||||
).values_list('pk', flat=True)
|
||||
)
|
||||
active_job_qs = UnifiedJob.objects.filter(status__in=('running', 'waiting'))
|
||||
if self.node_type == 'execution':
|
||||
active_job_qs = active_job_qs.filter(execution_node=self.hostname)
|
||||
else:
|
||||
active_job_qs = active_job_qs.filter(controller_node=self.hostname)
|
||||
active_pks = list(active_job_qs.values_list('pk', flat=True))
|
||||
if active_pks:
|
||||
vargs['exclude_strings'] = [JOB_FOLDER_PREFIX % job_id for job_id in active_pks]
|
||||
if 'remove_images' in vargs or 'image_prune' in vargs:
|
||||
|
||||
@@ -471,6 +471,29 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:project_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
def get_reason_if_failed(self):
|
||||
"""
|
||||
If the project is in a failed or errored state, return a human-readable
|
||||
error message explaining why. Otherwise return None.
|
||||
|
||||
This is used during validation in the serializer and also by
|
||||
RunProjectUpdate/RunInventoryUpdate.
|
||||
"""
|
||||
|
||||
if self.status not in ('error', 'failed'):
|
||||
return None
|
||||
|
||||
latest_update = self.project_updates.last()
|
||||
if latest_update is not None and latest_update.failed:
|
||||
failed_validation_tasks = latest_update.project_update_events.filter(
|
||||
event='runner_on_failed',
|
||||
play="Perform project signature/checksum verification",
|
||||
)
|
||||
if failed_validation_tasks:
|
||||
return _("Last project update failed due to signature validation failure.")
|
||||
|
||||
return _("Missing a revision to run due to failed project update.")
|
||||
|
||||
'''
|
||||
RelatedJobsMixin
|
||||
'''
|
||||
|
||||
@@ -5,9 +5,6 @@ import json
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.utils import get_awx_http_client_headers
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
@@ -17,6 +14,8 @@ logger = logging.getLogger('awx.main.notifications.webhook_backend')
|
||||
|
||||
class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
MAX_RETRIES = 5
|
||||
|
||||
init_parameters = {
|
||||
"url": {"label": "Target URL", "type": "string"},
|
||||
"http_method": {"label": "HTTP Method", "type": "string", "default": "POST"},
|
||||
@@ -64,20 +63,67 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
if self.http_method.lower() not in ['put', 'post']:
|
||||
raise ValueError("HTTP method must be either 'POST' or 'PUT'.")
|
||||
chosen_method = getattr(requests, self.http_method.lower(), None)
|
||||
|
||||
for m in messages:
|
||||
|
||||
auth = None
|
||||
if self.username or self.password:
|
||||
auth = (self.username, self.password)
|
||||
r = chosen_method(
|
||||
"{}".format(m.recipients()[0]),
|
||||
auth=auth,
|
||||
data=json.dumps(m.body, ensure_ascii=False).encode('utf-8'),
|
||||
headers=dict(list(get_awx_http_client_headers().items()) + list((self.headers or {}).items())),
|
||||
verify=(not self.disable_ssl_verification),
|
||||
)
|
||||
if r.status_code >= 400:
|
||||
logger.error(smart_str(_("Error sending notification webhook: {}").format(r.status_code)))
|
||||
|
||||
# the constructor for EmailMessage - https://docs.djangoproject.com/en/4.1/_modules/django/core/mail/message will turn an empty dictionary to an empty string
|
||||
# sometimes an empty dict is intentional and we added this conditional to enforce that
|
||||
if not m.body:
|
||||
m.body = {}
|
||||
|
||||
url = str(m.recipients()[0])
|
||||
data = json.dumps(m.body, ensure_ascii=False).encode('utf-8')
|
||||
headers = {**(get_awx_http_client_headers()), **(self.headers or {})}
|
||||
|
||||
err = None
|
||||
|
||||
for retries in range(self.MAX_RETRIES):
|
||||
|
||||
# Sometimes we hit redirect URLs. We must account for this. We still extract the redirect URL from the response headers and try again. Max retires == 5
|
||||
resp = chosen_method(
|
||||
url=url,
|
||||
auth=auth,
|
||||
data=data,
|
||||
headers=headers,
|
||||
verify=(not self.disable_ssl_verification),
|
||||
allow_redirects=False, # override default behaviour for redirects
|
||||
)
|
||||
|
||||
# either success or error reached if this conditional fires
|
||||
if resp.status_code not in [301, 307]:
|
||||
break
|
||||
|
||||
# we've hit a redirect. extract the redirect URL out of the first response header and try again
|
||||
logger.warning(
|
||||
f"Received a {resp.status_code} from {url}, trying to reach redirect url {resp.headers.get('Location', None)}; attempt #{retries+1}"
|
||||
)
|
||||
|
||||
# take the first redirect URL in the response header and try that
|
||||
url = resp.headers.get("Location", None)
|
||||
|
||||
if url is None:
|
||||
err = f"Webhook notification received redirect to a blank URL from {url}. Response headers={resp.headers}"
|
||||
break
|
||||
else:
|
||||
# no break condition in the loop encountered; therefore we have hit the maximum number of retries
|
||||
err = f"Webhook notification max number of retries [{self.MAX_RETRIES}] exceeded. Failed to send webhook notification to {url}"
|
||||
|
||||
if resp.status_code >= 400:
|
||||
err = f"Error sending webhook notification: {resp.status_code}"
|
||||
|
||||
# log error message
|
||||
if err:
|
||||
logger.error(err)
|
||||
if not self.fail_silently:
|
||||
raise Exception(smart_str(_("Error sending notification webhook: {}").format(r.status_code)))
|
||||
sent_messages += 1
|
||||
raise Exception(err)
|
||||
|
||||
# no errors were encountered therefore we successfully sent off the notification webhook
|
||||
if resp.status_code in range(200, 299):
|
||||
logger.debug(f"Notification webhook successfully sent to {url}. Received {resp.status_code}")
|
||||
sent_messages += 1
|
||||
|
||||
return sent_messages
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
|
||||
from django.db.models.signals import pre_save, post_save, pre_delete, m2m_changed
|
||||
|
||||
from taggit.managers import TaggableManager
|
||||
|
||||
|
||||
class ActivityStreamRegistrar(object):
|
||||
def __init__(self):
|
||||
@@ -19,6 +21,8 @@ class ActivityStreamRegistrar(object):
|
||||
pre_delete.connect(activity_stream_delete, sender=model, dispatch_uid=str(self.__class__) + str(model) + "_delete")
|
||||
|
||||
for m2mfield in model._meta.many_to_many:
|
||||
if isinstance(m2mfield, TaggableManager):
|
||||
continue # Special case for taggit app
|
||||
try:
|
||||
m2m_attr = getattr(model, m2mfield.name)
|
||||
m2m_changed.connect(
|
||||
|
||||
@@ -39,7 +39,7 @@ from awx.main.utils import (
|
||||
ScheduleTaskManager,
|
||||
ScheduleWorkflowManager,
|
||||
)
|
||||
from awx.main.utils.common import task_manager_bulk_reschedule
|
||||
from awx.main.utils.common import task_manager_bulk_reschedule, is_testing
|
||||
from awx.main.signals import disable_activity_stream
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.scheduler.dependency_graph import DependencyGraph
|
||||
@@ -97,7 +97,7 @@ class TaskBase:
|
||||
self.all_tasks = [t for t in qs]
|
||||
|
||||
def record_aggregate_metrics(self, *args):
|
||||
if not settings.IS_TESTING():
|
||||
if not is_testing():
|
||||
# increment task_manager_schedule_calls regardless if the other
|
||||
# metrics are recorded
|
||||
s_metrics.Metrics(auto_pipe_execute=True).inc(f"{self.prefix}__schedule_calls", 1)
|
||||
|
||||
@@ -2,8 +2,6 @@ import json
|
||||
import time
|
||||
import logging
|
||||
from collections import deque
|
||||
import os
|
||||
import stat
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -206,21 +204,6 @@ class RunnerCallback:
|
||||
self.instance = self.update_model(self.instance.pk, job_args=json.dumps(runner_config.command), job_cwd=runner_config.cwd, job_env=job_env)
|
||||
# We opened a connection just for that save, close it here now
|
||||
connections.close_all()
|
||||
elif status_data['status'] == 'failed':
|
||||
# For encrypted ssh_key_data, ansible-runner worker will open and write the
|
||||
# ssh_key_data to a named pipe. Then, once the podman container starts, ssh-agent will
|
||||
# read from this named pipe so that the key can be used in ansible-playbook.
|
||||
# Once the podman container exits, the named pipe is deleted.
|
||||
# However, if the podman container fails to start in the first place, e.g. the image
|
||||
# name is incorrect, then this pipe is not cleaned up. Eventually ansible-runner
|
||||
# processor will attempt to write artifacts to the private data dir via unstream_dir, requiring
|
||||
# that it open this named pipe. This leads to a hang. Thus, before any artifacts
|
||||
# are written by the processor, it's important to remove this ssh_key_data pipe.
|
||||
private_data_dir = self.instance.job_env.get('AWX_PRIVATE_DATA_DIR', None)
|
||||
if private_data_dir:
|
||||
key_data_file = os.path.join(private_data_dir, 'artifacts', str(self.instance.id), 'ssh_key_data')
|
||||
if os.path.exists(key_data_file) and stat.S_ISFIFO(os.stat(key_data_file).st_mode):
|
||||
os.remove(key_data_file)
|
||||
elif status_data['status'] == 'error':
|
||||
result_traceback = status_data.get('result_traceback', None)
|
||||
if result_traceback:
|
||||
|
||||
@@ -767,6 +767,10 @@ class SourceControlMixin(BaseTask):
|
||||
|
||||
try:
|
||||
original_branch = None
|
||||
failed_reason = project.get_reason_if_failed()
|
||||
if failed_reason:
|
||||
self.update_model(self.instance.pk, status='failed', job_explanation=failed_reason)
|
||||
raise RuntimeError(failed_reason)
|
||||
project_path = project.get_project_path(check_if_exists=False)
|
||||
if project.scm_type == 'git' and (scm_branch and scm_branch != project.scm_branch):
|
||||
if os.path.exists(project_path):
|
||||
@@ -1056,10 +1060,6 @@ class RunJob(SourceControlMixin, BaseTask):
|
||||
error = _('Job could not start because no Execution Environment could be found.')
|
||||
self.update_model(job.pk, status='error', job_explanation=error)
|
||||
raise RuntimeError(error)
|
||||
elif job.project.status in ('error', 'failed'):
|
||||
msg = _('The project revision for this job template is unknown due to a failed update.')
|
||||
job = self.update_model(job.pk, status='failed', job_explanation=msg)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
if job.inventory.kind == 'smart':
|
||||
# cache smart inventory memberships so that the host_filter query is not
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import pytest
|
||||
from unittest import mock
|
||||
|
||||
from awx.main.models import AdHocCommand, InventoryUpdate, JobTemplate
|
||||
from awx.main.models import AdHocCommand, InventoryUpdate, JobTemplate, Job
|
||||
from awx.main.models.activity_stream import ActivityStream
|
||||
from awx.main.models.ha import Instance, InstanceGroup
|
||||
from awx.main.tasks.system import apply_cluster_membership_policies
|
||||
@@ -15,6 +15,24 @@ def test_default_tower_instance_group(default_instance_group, job_factory):
|
||||
assert default_instance_group in job_factory().preferred_instance_groups
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('node_type', ('execution', 'control'))
|
||||
@pytest.mark.parametrize('active', (True, False))
|
||||
def test_get_cleanup_task_kwargs_active_jobs(node_type, active):
|
||||
instance = Instance.objects.create(hostname='foobar', node_type=node_type)
|
||||
job_kwargs = dict()
|
||||
job_kwargs['controller_node' if node_type == 'control' else 'execution_node'] = instance.hostname
|
||||
job_kwargs['status'] = 'running' if active else 'successful'
|
||||
|
||||
job = Job.objects.create(**job_kwargs)
|
||||
kwargs = instance.get_cleanup_task_kwargs()
|
||||
|
||||
if active:
|
||||
assert kwargs['exclude_strings'] == [f'awx_{job.pk}_']
|
||||
else:
|
||||
assert 'exclude_strings' not in kwargs
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestPolicyTaskScheduling:
|
||||
"""Tests make assertions about when the policy task gets scheduled"""
|
||||
|
||||
@@ -75,6 +75,7 @@ def test_encrypted_subfields(get, post, user, organization):
|
||||
url = reverse('api:notification_template_detail', kwargs={'pk': response.data['id']})
|
||||
response = get(url, u)
|
||||
assert response.data['notification_configuration']['account_token'] == "$encrypted$"
|
||||
|
||||
with mock.patch.object(notification_template_actual.notification_class, "send_messages", assert_send):
|
||||
notification_template_actual.send("Test", {'body': "Test"})
|
||||
|
||||
@@ -175,3 +176,46 @@ def test_custom_environment_injection(post, user, organization):
|
||||
|
||||
fake_send.side_effect = _send_side_effect
|
||||
template.send('subject', 'message')
|
||||
|
||||
|
||||
def mock_post(*args, **kwargs):
|
||||
class MockGoodResponse:
|
||||
def __init__(self):
|
||||
self.status_code = 200
|
||||
|
||||
class MockRedirectResponse:
|
||||
def __init__(self):
|
||||
self.status_code = 301
|
||||
self.headers = {"Location": "http://goodendpoint"}
|
||||
|
||||
if kwargs['url'] == "http://goodendpoint":
|
||||
return MockGoodResponse()
|
||||
else:
|
||||
return MockRedirectResponse()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('requests.post', side_effect=mock_post)
|
||||
def test_webhook_notification_pointed_to_a_redirect_launch_endpoint(post, admin, organization):
|
||||
|
||||
n1 = NotificationTemplate.objects.create(
|
||||
name="test-webhook",
|
||||
description="test webhook",
|
||||
organization=organization,
|
||||
notification_type="webhook",
|
||||
notification_configuration=dict(
|
||||
url="http://some.fake.url",
|
||||
disable_ssl_verification=True,
|
||||
http_method="POST",
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
username=admin.username,
|
||||
password=admin.password,
|
||||
),
|
||||
messages={
|
||||
"success": {"message": "", "body": "{}"},
|
||||
},
|
||||
)
|
||||
|
||||
assert n1.send("", n1.messages.get("success").get("body")) == 1
|
||||
|
||||
@@ -27,11 +27,12 @@ def test_send_messages_as_POST():
|
||||
]
|
||||
)
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'http://example.com',
|
||||
url='http://example.com',
|
||||
auth=None,
|
||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||
verify=True,
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -57,11 +58,12 @@ def test_send_messages_as_PUT():
|
||||
]
|
||||
)
|
||||
requests_mock.put.assert_called_once_with(
|
||||
'http://example.com',
|
||||
url='http://example.com',
|
||||
auth=None,
|
||||
data=json.dumps({'text': 'test body 2'}, ensure_ascii=False).encode('utf-8'),
|
||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||
verify=True,
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -87,11 +89,12 @@ def test_send_messages_with_username():
|
||||
]
|
||||
)
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'http://example.com',
|
||||
url='http://example.com',
|
||||
auth=('userstring', None),
|
||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||
verify=True,
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -117,11 +120,12 @@ def test_send_messages_with_password():
|
||||
]
|
||||
)
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'http://example.com',
|
||||
url='http://example.com',
|
||||
auth=(None, 'passwordstring'),
|
||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||
verify=True,
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -147,11 +151,12 @@ def test_send_messages_with_username_and_password():
|
||||
]
|
||||
)
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'http://example.com',
|
||||
url='http://example.com',
|
||||
auth=('userstring', 'passwordstring'),
|
||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||
verify=True,
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -177,11 +182,12 @@ def test_send_messages_with_no_verify_ssl():
|
||||
]
|
||||
)
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'http://example.com',
|
||||
url='http://example.com',
|
||||
auth=None,
|
||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||
verify=False,
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -207,7 +213,7 @@ def test_send_messages_with_additional_headers():
|
||||
]
|
||||
)
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'http://example.com',
|
||||
url='http://example.com',
|
||||
auth=None,
|
||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||
headers={
|
||||
@@ -217,5 +223,6 @@ def test_send_messages_with_additional_headers():
|
||||
'X-Test-Header2': 'test-content-2',
|
||||
},
|
||||
verify=True,
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -11,11 +11,12 @@ import os
|
||||
import subprocess
|
||||
import re
|
||||
import stat
|
||||
import sys
|
||||
import urllib.parse
|
||||
import threading
|
||||
import contextlib
|
||||
import tempfile
|
||||
from functools import reduce, wraps
|
||||
import functools
|
||||
|
||||
# Django
|
||||
from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
|
||||
@@ -73,6 +74,7 @@ __all__ = [
|
||||
'NullablePromptPseudoField',
|
||||
'model_instance_diff',
|
||||
'parse_yaml_or_json',
|
||||
'is_testing',
|
||||
'RequireDebugTrueOrTest',
|
||||
'has_model_field_prefetched',
|
||||
'set_environ',
|
||||
@@ -144,6 +146,19 @@ def underscore_to_camelcase(s):
|
||||
return ''.join(x.capitalize() or '_' for x in s.split('_'))
|
||||
|
||||
|
||||
@functools.cache
|
||||
def is_testing(argv=None):
|
||||
'''Return True if running django or py.test unit tests.'''
|
||||
if 'PYTEST_CURRENT_TEST' in os.environ.keys():
|
||||
return True
|
||||
argv = sys.argv if argv is None else argv
|
||||
if len(argv) >= 1 and ('py.test' in argv[0] or 'py/test.py' in argv[0]):
|
||||
return True
|
||||
elif len(argv) >= 2 and argv[1] == 'test':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class RequireDebugTrueOrTest(logging.Filter):
|
||||
"""
|
||||
Logging filter to output when in DEBUG mode or running tests.
|
||||
@@ -152,7 +167,7 @@ class RequireDebugTrueOrTest(logging.Filter):
|
||||
def filter(self, record):
|
||||
from django.conf import settings
|
||||
|
||||
return settings.DEBUG or settings.IS_TESTING()
|
||||
return settings.DEBUG or is_testing()
|
||||
|
||||
|
||||
class IllegalArgumentError(ValueError):
|
||||
@@ -174,7 +189,7 @@ def memoize(ttl=60, cache_key=None, track_function=False, cache=None):
|
||||
cache = cache or get_memoize_cache()
|
||||
|
||||
def memoize_decorator(f):
|
||||
@wraps(f)
|
||||
@functools.wraps(f)
|
||||
def _memoizer(*args, **kwargs):
|
||||
if track_function:
|
||||
cache_dict_key = slugify('%r %r' % (args, kwargs))
|
||||
@@ -992,7 +1007,7 @@ def getattrd(obj, name, default=NoDefaultProvided):
|
||||
"""
|
||||
|
||||
try:
|
||||
return reduce(getattr, name.split("."), obj)
|
||||
return functools.reduce(getattr, name.split("."), obj)
|
||||
except AttributeError:
|
||||
if default != NoDefaultProvided:
|
||||
return default
|
||||
@@ -1188,7 +1203,7 @@ def cleanup_new_process(func):
|
||||
Cleanup django connection, cache connection, before executing new thread or processes entry point, func.
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
@functools.wraps(func)
|
||||
def wrapper_cleanup_new_process(*args, **kwargs):
|
||||
from awx.conf.settings import SettingsWrapper # noqa
|
||||
|
||||
@@ -1202,7 +1217,7 @@ def cleanup_new_process(func):
|
||||
|
||||
def log_excess_runtime(func_logger, cutoff=5.0):
|
||||
def log_excess_runtime_decorator(func):
|
||||
@wraps(func)
|
||||
@functools.wraps(func)
|
||||
def _new_func(*args, **kwargs):
|
||||
start_time = time.time()
|
||||
return_value = func(*args, **kwargs)
|
||||
|
||||
@@ -10,28 +10,6 @@ import socket
|
||||
from datetime import timedelta
|
||||
|
||||
|
||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||
|
||||
|
||||
def is_testing(argv=None):
|
||||
import sys
|
||||
|
||||
'''Return True if running django or py.test unit tests.'''
|
||||
if 'PYTEST_CURRENT_TEST' in os.environ.keys():
|
||||
return True
|
||||
argv = sys.argv if argv is None else argv
|
||||
if len(argv) >= 1 and ('py.test' in argv[0] or 'py/test.py' in argv[0]):
|
||||
return True
|
||||
elif len(argv) >= 2 and argv[1] == 'test':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def IS_TESTING(argv=None):
|
||||
return is_testing(argv)
|
||||
|
||||
|
||||
if "pytest" in sys.modules:
|
||||
from unittest import mock
|
||||
|
||||
@@ -40,9 +18,13 @@ if "pytest" in sys.modules:
|
||||
else:
|
||||
import ldap
|
||||
|
||||
|
||||
DEBUG = True
|
||||
SQL_DEBUG = DEBUG
|
||||
|
||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||
|
||||
# FIXME: it would be nice to cycle back around and allow this to be
|
||||
# BigAutoField going forward, but we'd have to be explicit about our
|
||||
# existing models.
|
||||
@@ -254,6 +236,14 @@ START_TASK_LIMIT = 100
|
||||
TASK_MANAGER_TIMEOUT = 300
|
||||
TASK_MANAGER_TIMEOUT_GRACE_PERIOD = 60
|
||||
|
||||
# Number of seconds _in addition to_ the task manager timeout a job can stay
|
||||
# in waiting without being reaped
|
||||
JOB_WAITING_GRACE_PERIOD = 60
|
||||
|
||||
# Number of seconds after a container group job finished time to wait
|
||||
# before the awx_k8s_reaper task will tear down the pods
|
||||
K8S_POD_REAPER_GRACE_PERIOD = 60
|
||||
|
||||
# Disallow sending session cookies over insecure connections
|
||||
SESSION_COOKIE_SECURE = True
|
||||
|
||||
@@ -1004,16 +994,5 @@ DEFAULT_CONTAINER_RUN_OPTIONS = ['--network', 'slirp4netns:enable_ipv6=true']
|
||||
# Mount exposed paths as hostPath resource in k8s/ocp
|
||||
AWX_MOUNT_ISOLATED_PATHS_ON_K8S = False
|
||||
|
||||
# Time out task managers if they take longer than this many seconds
|
||||
TASK_MANAGER_TIMEOUT = 300
|
||||
|
||||
# Number of seconds _in addition to_ the task manager timeout a job can stay
|
||||
# in waiting without being reaped
|
||||
JOB_WAITING_GRACE_PERIOD = 60
|
||||
|
||||
# Number of seconds after a container group job finished time to wait
|
||||
# before the awx_k8s_reaper task will tear down the pods
|
||||
K8S_POD_REAPER_GRACE_PERIOD = 60
|
||||
|
||||
# This is overridden downstream via /etc/tower/conf.d/cluster_host_id.py
|
||||
CLUSTER_HOST_ID = socket.gethostname()
|
||||
|
||||
86
awx/ui/package-lock.json
generated
86
awx/ui/package-lock.json
generated
@@ -7,9 +7,9 @@
|
||||
"name": "ui",
|
||||
"dependencies": {
|
||||
"@lingui/react": "3.14.0",
|
||||
"@patternfly/patternfly": "4.210.2",
|
||||
"@patternfly/react-core": "^4.239.0",
|
||||
"@patternfly/react-icons": "4.90.0",
|
||||
"@patternfly/patternfly": "4.217.1",
|
||||
"@patternfly/react-core": "^4.250.1",
|
||||
"@patternfly/react-icons": "4.92.10",
|
||||
"@patternfly/react-table": "4.108.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
@@ -3747,26 +3747,26 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@patternfly/patternfly": {
|
||||
"version": "4.210.2",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.210.2.tgz",
|
||||
"integrity": "sha512-aZiW24Bxi6uVmk5RyNTp+6q6ThtlJZotNRJfWVeGuwu1UlbBuV4DFa1bpjA6jfTZpfEpX2YL5+R+4ZVSCFAVdw=="
|
||||
"version": "4.217.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.217.1.tgz",
|
||||
"integrity": "sha512-uN7JgfQsyR16YHkuGRCTIcBcnyKIqKjGkB2SGk9x1XXH3yYGenL83kpAavX9Xtozqp17KppOlybJuzcKvZMrgw=="
|
||||
},
|
||||
"node_modules/@patternfly/react-core": {
|
||||
"version": "4.239.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.239.0.tgz",
|
||||
"integrity": "sha512-6CmYABCJLUXTlzCk6C3WouMNZpS0BCT+aHU8CvYpFQ/NrpYp3MJaDsYbqgCRWV42rmIO5iXun/4WhXBJzJEoQg==",
|
||||
"version": "4.250.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.250.1.tgz",
|
||||
"integrity": "sha512-vAOZPQdZzYXl/vkHnHMIt1eC3nrPDdsuuErPatkNPwmSvilXuXmWP5wxoJ36FbSNRRURkprFwx52zMmWS3iHJA==",
|
||||
"dependencies": {
|
||||
"@patternfly/react-icons": "^4.90.0",
|
||||
"@patternfly/react-styles": "^4.89.0",
|
||||
"@patternfly/react-tokens": "^4.91.0",
|
||||
"@patternfly/react-icons": "^4.92.6",
|
||||
"@patternfly/react-styles": "^4.91.6",
|
||||
"@patternfly/react-tokens": "^4.93.6",
|
||||
"focus-trap": "6.9.2",
|
||||
"react-dropzone": "9.0.0",
|
||||
"tippy.js": "5.1.2",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0",
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
"react": "^16.8 || ^17 || ^18",
|
||||
"react-dom": "^16.8 || ^17 || ^18"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-core/node_modules/tslib": {
|
||||
@@ -3775,18 +3775,18 @@
|
||||
"integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw=="
|
||||
},
|
||||
"node_modules/@patternfly/react-icons": {
|
||||
"version": "4.90.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.90.0.tgz",
|
||||
"integrity": "sha512-qEnQKbxbUgyiosiKSkeKEBwmhgJwWEqniIAFyoxj+kpzAdeu7ueWe5iBbqo06mvDOedecFiM5mIE1N0MXwk8Yw==",
|
||||
"version": "4.92.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.92.10.tgz",
|
||||
"integrity": "sha512-vwCy7b+OyyuvLDSLqLUG2DkJZgMDogjld8tJTdAaG8HiEhC1sJPZac+5wD7AuS3ym/sQolS4vYtNiVDnMEORxA==",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0",
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
"react": "^16.8 || ^17 || ^18",
|
||||
"react-dom": "^16.8 || ^17 || ^18"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-styles": {
|
||||
"version": "4.89.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.89.0.tgz",
|
||||
"integrity": "sha512-SkT+qx3Xqu70T5s+i/AUT2hI2sKAPDX4ffeiJIUDu/oyWiFdk+/9DEivnLSyJMruroXXN33zKibvzb5rH7DKTQ=="
|
||||
"version": "4.91.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.91.10.tgz",
|
||||
"integrity": "sha512-fAG4Vjp63ohiR92F4e/Gkw5q1DSSckHKqdnEF75KUpSSBORzYP0EKMpupSd6ItpQFJw3iWs3MJi3/KIAAfU1Jw=="
|
||||
},
|
||||
"node_modules/@patternfly/react-table": {
|
||||
"version": "4.108.0",
|
||||
@@ -3811,9 +3811,9 @@
|
||||
"integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ=="
|
||||
},
|
||||
"node_modules/@patternfly/react-tokens": {
|
||||
"version": "4.91.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.91.0.tgz",
|
||||
"integrity": "sha512-QeQCy8o8E/16fAr8mxqXIYRmpTsjCHJXi5p5jmgEDFmYMesN6Pqfv6N5D0FHb+CIaNOZWRps7GkWvlIMIE81sw=="
|
||||
"version": "4.93.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.93.10.tgz",
|
||||
"integrity": "sha512-F+j1irDc9M6zvY6qNtDryhbpnHz3R8ymHRdGelNHQzPTIK88YSWEnT1c9iUI+uM/iuZol7sJmO5STtg2aPIDRQ=="
|
||||
},
|
||||
"node_modules/@pmmmwh/react-refresh-webpack-plugin": {
|
||||
"version": "0.5.4",
|
||||
@@ -25089,18 +25089,18 @@
|
||||
"dev": true
|
||||
},
|
||||
"@patternfly/patternfly": {
|
||||
"version": "4.210.2",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.210.2.tgz",
|
||||
"integrity": "sha512-aZiW24Bxi6uVmk5RyNTp+6q6ThtlJZotNRJfWVeGuwu1UlbBuV4DFa1bpjA6jfTZpfEpX2YL5+R+4ZVSCFAVdw=="
|
||||
"version": "4.217.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.217.1.tgz",
|
||||
"integrity": "sha512-uN7JgfQsyR16YHkuGRCTIcBcnyKIqKjGkB2SGk9x1XXH3yYGenL83kpAavX9Xtozqp17KppOlybJuzcKvZMrgw=="
|
||||
},
|
||||
"@patternfly/react-core": {
|
||||
"version": "4.239.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.239.0.tgz",
|
||||
"integrity": "sha512-6CmYABCJLUXTlzCk6C3WouMNZpS0BCT+aHU8CvYpFQ/NrpYp3MJaDsYbqgCRWV42rmIO5iXun/4WhXBJzJEoQg==",
|
||||
"version": "4.250.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.250.1.tgz",
|
||||
"integrity": "sha512-vAOZPQdZzYXl/vkHnHMIt1eC3nrPDdsuuErPatkNPwmSvilXuXmWP5wxoJ36FbSNRRURkprFwx52zMmWS3iHJA==",
|
||||
"requires": {
|
||||
"@patternfly/react-icons": "^4.90.0",
|
||||
"@patternfly/react-styles": "^4.89.0",
|
||||
"@patternfly/react-tokens": "^4.91.0",
|
||||
"@patternfly/react-icons": "^4.92.6",
|
||||
"@patternfly/react-styles": "^4.91.6",
|
||||
"@patternfly/react-tokens": "^4.93.6",
|
||||
"focus-trap": "6.9.2",
|
||||
"react-dropzone": "9.0.0",
|
||||
"tippy.js": "5.1.2",
|
||||
@@ -25115,15 +25115,15 @@
|
||||
}
|
||||
},
|
||||
"@patternfly/react-icons": {
|
||||
"version": "4.90.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.90.0.tgz",
|
||||
"integrity": "sha512-qEnQKbxbUgyiosiKSkeKEBwmhgJwWEqniIAFyoxj+kpzAdeu7ueWe5iBbqo06mvDOedecFiM5mIE1N0MXwk8Yw==",
|
||||
"version": "4.92.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.92.10.tgz",
|
||||
"integrity": "sha512-vwCy7b+OyyuvLDSLqLUG2DkJZgMDogjld8tJTdAaG8HiEhC1sJPZac+5wD7AuS3ym/sQolS4vYtNiVDnMEORxA==",
|
||||
"requires": {}
|
||||
},
|
||||
"@patternfly/react-styles": {
|
||||
"version": "4.89.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.89.0.tgz",
|
||||
"integrity": "sha512-SkT+qx3Xqu70T5s+i/AUT2hI2sKAPDX4ffeiJIUDu/oyWiFdk+/9DEivnLSyJMruroXXN33zKibvzb5rH7DKTQ=="
|
||||
"version": "4.91.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.91.10.tgz",
|
||||
"integrity": "sha512-fAG4Vjp63ohiR92F4e/Gkw5q1DSSckHKqdnEF75KUpSSBORzYP0EKMpupSd6ItpQFJw3iWs3MJi3/KIAAfU1Jw=="
|
||||
},
|
||||
"@patternfly/react-table": {
|
||||
"version": "4.108.0",
|
||||
@@ -25146,9 +25146,9 @@
|
||||
}
|
||||
},
|
||||
"@patternfly/react-tokens": {
|
||||
"version": "4.91.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.91.0.tgz",
|
||||
"integrity": "sha512-QeQCy8o8E/16fAr8mxqXIYRmpTsjCHJXi5p5jmgEDFmYMesN6Pqfv6N5D0FHb+CIaNOZWRps7GkWvlIMIE81sw=="
|
||||
"version": "4.93.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.93.10.tgz",
|
||||
"integrity": "sha512-F+j1irDc9M6zvY6qNtDryhbpnHz3R8ymHRdGelNHQzPTIK88YSWEnT1c9iUI+uM/iuZol7sJmO5STtg2aPIDRQ=="
|
||||
},
|
||||
"@pmmmwh/react-refresh-webpack-plugin": {
|
||||
"version": "0.5.4",
|
||||
|
||||
@@ -7,9 +7,9 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@lingui/react": "3.14.0",
|
||||
"@patternfly/patternfly": "4.210.2",
|
||||
"@patternfly/react-core": "^4.239.0",
|
||||
"@patternfly/react-icons": "4.90.0",
|
||||
"@patternfly/patternfly": "4.217.1",
|
||||
"@patternfly/react-core": "^4.250.1",
|
||||
"@patternfly/react-icons": "4.92.10",
|
||||
"@patternfly/react-table": "4.108.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
|
||||
@@ -282,7 +282,7 @@ const mockInputSources = {
|
||||
summary_fields: {
|
||||
source_credential: {
|
||||
id: 20,
|
||||
name: 'CyberArk Conjur Secret Lookup',
|
||||
name: 'CyberArk Conjur Secrets Manager Lookup',
|
||||
description: '',
|
||||
kind: 'conjur',
|
||||
cloud: false,
|
||||
@@ -301,7 +301,7 @@ const mockInputSources = {
|
||||
summary_fields: {
|
||||
source_credential: {
|
||||
id: 20,
|
||||
name: 'CyberArk Conjur Secret Lookup',
|
||||
name: 'CyberArk Conjur Secrets Manager Lookup',
|
||||
description: '',
|
||||
kind: 'conjur',
|
||||
cloud: false,
|
||||
|
||||
@@ -36,14 +36,14 @@ const mockCredentialTypeDetail = {
|
||||
url: '/api/v2/credential_types/20/',
|
||||
related: {
|
||||
named_url:
|
||||
'/api/v2/credential_types/CyberArk Conjur Secret Lookup+external/',
|
||||
'/api/v2/credential_types/CyberArk Conjur Secrets Manager Lookup+external/',
|
||||
credentials: '/api/v2/credential_types/20/credentials/',
|
||||
activity_stream: '/api/v2/credential_types/20/activity_stream/',
|
||||
},
|
||||
summary_fields: { user_capabilities: { edit: false, delete: false } },
|
||||
created: '2020-05-18T21:53:35.398260Z',
|
||||
modified: '2020-05-18T21:54:05.451444Z',
|
||||
name: 'CyberArk Conjur Secret Lookup',
|
||||
name: 'CyberArk Conjur Secrets Manager Lookup',
|
||||
description: '',
|
||||
kind: 'external',
|
||||
namespace: 'conjur',
|
||||
|
||||
@@ -546,7 +546,7 @@
|
||||
},
|
||||
"created": "2020-05-18T21:53:35.398260Z",
|
||||
"modified": "2020-05-18T21:54:05.451444Z",
|
||||
"name": "CyberArk Conjur Secret Lookup",
|
||||
"name": "CyberArk Conjur Secrets Manager Lookup",
|
||||
"description": "",
|
||||
"kind": "external",
|
||||
"namespace": "conjur",
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"type": "credential",
|
||||
"url": "/api/v2/credentials/1/",
|
||||
"related": {
|
||||
"named_url": "/api/v2/credentials/CyberArk Conjur Secret Lookup++CyberArk Conjur Secret Lookup+external++/",
|
||||
"named_url": "/api/v2/credentials/CyberArk Conjur Secrets Manager Lookup+external++/",
|
||||
"created_by": "/api/v2/users/1/",
|
||||
"modified_by": "/api/v2/users/1/",
|
||||
"activity_stream": "/api/v2/credentials/1/activity_stream/",
|
||||
@@ -19,7 +19,7 @@
|
||||
"summary_fields": {
|
||||
"credential_type": {
|
||||
"id": 20,
|
||||
"name": "CyberArk Conjur Secret Lookup",
|
||||
"name": "CyberArk Conjur Secrets Manager Lookup",
|
||||
"description": ""
|
||||
},
|
||||
"created_by": {
|
||||
@@ -69,7 +69,7 @@
|
||||
},
|
||||
"created": "2020-05-19T12:51:36.956029Z",
|
||||
"modified": "2020-05-19T12:51:36.956086Z",
|
||||
"name": "CyberArk Conjur Secret Lookup",
|
||||
"name": "CyberArk Conjur Secrets Manager Lookup",
|
||||
"description": "",
|
||||
"organization": null,
|
||||
"credential_type": 20,
|
||||
|
||||
@@ -70,7 +70,6 @@ const getStdOutValue = (hostEvent) => {
|
||||
function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
|
||||
const [hostStatus, setHostStatus] = useState(null);
|
||||
const [activeTabKey, setActiveTabKey] = useState(0);
|
||||
|
||||
useEffect(() => {
|
||||
setHostStatus(processEventStatus(hostEvent));
|
||||
}, [setHostStatus, hostEvent]);
|
||||
@@ -108,11 +107,11 @@ function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
|
||||
style={{ alignItems: 'center', marginTop: '20px' }}
|
||||
gutter="sm"
|
||||
>
|
||||
<Detail label={t`Host`} value={hostEvent.host_name} />
|
||||
{hostEvent.summary_fields.host?.description ? (
|
||||
<Detail label={t`Host`} value={hostEvent.event_data?.host} />
|
||||
{hostEvent.summary_fields?.host?.description ? (
|
||||
<Detail
|
||||
label={t`Description`}
|
||||
value={hostEvent.summary_fields.host.description}
|
||||
value={hostEvent.summary_fields?.host?.description}
|
||||
/>
|
||||
) : null}
|
||||
{hostStatus ? (
|
||||
@@ -125,12 +124,9 @@ function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
|
||||
<Detail label={t`Task`} value={hostEvent.task} />
|
||||
<Detail
|
||||
label={t`Module`}
|
||||
value={hostEvent.event_data.task_action || t`No result found`}
|
||||
/>
|
||||
<Detail
|
||||
label={t`Command`}
|
||||
value={hostEvent?.event_data?.res?.cmd}
|
||||
value={hostEvent.event_data?.task_action || t`No result found`}
|
||||
/>
|
||||
<Detail label={t`Command`} value={hostEvent.event_data?.res?.cmd} />
|
||||
</DetailList>
|
||||
</Tab>
|
||||
<Tab
|
||||
|
||||
@@ -52,6 +52,47 @@ const hostEvent = {
|
||||
},
|
||||
};
|
||||
|
||||
const partialHostEvent = {
|
||||
changed: true,
|
||||
event: 'runner_on_ok',
|
||||
event_data: {
|
||||
host: 'foo',
|
||||
play: 'all',
|
||||
playbook: 'run_command.yml',
|
||||
res: {
|
||||
ansible_loop_var: 'item',
|
||||
changed: true,
|
||||
item: '1',
|
||||
msg: 'This is a debug message: 1',
|
||||
stdout:
|
||||
' total used free shared buff/cache available\nMem: 7973 3005 960 30 4007 4582\nSwap: 1023 0 1023',
|
||||
stderr: 'problems',
|
||||
cmd: ['free', '-m'],
|
||||
stderr_lines: [],
|
||||
stdout_lines: [
|
||||
' total used free shared buff/cache available',
|
||||
'Mem: 7973 3005 960 30 4007 4582',
|
||||
'Swap: 1023 0 1023',
|
||||
],
|
||||
},
|
||||
task: 'command',
|
||||
task_action: 'command',
|
||||
},
|
||||
event_display: 'Host OK',
|
||||
event_level: 3,
|
||||
failed: false,
|
||||
host: 1,
|
||||
id: 123,
|
||||
job: 4,
|
||||
play: 'all',
|
||||
playbook: 'run_command.yml',
|
||||
stdout: `stdout: "[0;33mchanged: [localhost] => {"changed": true, "cmd": ["free", "-m"], "delta": "0:00:01.479609", "end": "2019-09-10 14:21:45.469533", "rc": 0, "start": "2019-09-10 14:21:43.989924", "stderr": "", "stderr_lines": [], "stdout": " total used free shared buff/cache available\nMem: 7973 3005 960 30 4007 4582\nSwap: 1023 0 1023", "stdout_lines": [" total used free shared buff/cache available", "Mem: 7973 3005 960 30 4007 4582", "Swap: 1023 0 1023"]}[0m"
|
||||
`,
|
||||
task: 'command',
|
||||
type: 'job_event',
|
||||
url: '/api/v2/job_events/123/',
|
||||
};
|
||||
|
||||
/*
|
||||
Some libraries return a list of string in stdout
|
||||
Example: https://github.com/ansible-collections/cisco.ios/blob/main/plugins/modules/ios_command.py#L124-L128
|
||||
@@ -134,6 +175,13 @@ describe('HostEventModal', () => {
|
||||
expect(wrapper).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('renders successfully with partial data', () => {
|
||||
const wrapper = shallow(
|
||||
<HostEventModal hostEvent={partialHostEvent} onClose={() => {}} />
|
||||
);
|
||||
expect(wrapper).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should render all tabs', () => {
|
||||
const wrapper = shallow(
|
||||
<HostEventModal hostEvent={hostEvent} onClose={() => {}} isOpen />
|
||||
|
||||
@@ -52,7 +52,7 @@ options:
|
||||
- The credential type being created.
|
||||
- Can be a built-in credential type such as "Machine", or a custom credential type such as "My Credential Type"
|
||||
- Choices include Amazon Web Services, Ansible Galaxy/Automation Hub API Token, Centrify Vault Credential Provider Lookup,
|
||||
Container Registry, CyberArk AIM Central Credential Provider Lookup, CyberArk Conjur Secret Lookup, Google Compute Engine,
|
||||
Container Registry, CyberArk AIM Central Credential Provider Lookup, CyberArk Conjur Secrets Manager Lookup, Google Compute Engine,
|
||||
GitHub Personal Access Token, GitLab Personal Access Token, GPG Public Key, HashiCorp Vault Secret Lookup, HashiCorp Vault Signed SSH,
|
||||
Insights, Machine, Microsoft Azure Key Vault, Microsoft Azure Resource Manager, Network, OpenShift or Kubernetes API
|
||||
Bearer Token, OpenStack, Red Hat Ansible Automation Platform, Red Hat Satellite 6, Red Hat Virtualization, Source Control,
|
||||
|
||||
@@ -52,6 +52,7 @@ html_static_path = ['_static']
|
||||
|
||||
rst_epilog = '''
|
||||
.. |prog| replace:: awx
|
||||
.. |at| replace:: Ansible Tower
|
||||
.. |RHAT| replace:: Red Hat Ansible Tower
|
||||
.. |at| replace:: automation controller
|
||||
.. |At| replace:: Automation controller
|
||||
.. |RHAT| replace:: Red Hat Ansible Automation Platform controller
|
||||
'''
|
||||
|
||||
@@ -197,8 +197,10 @@ def parse_resource(client, skip_deprecated=False):
|
||||
|
||||
if hasattr(client, 'v2'):
|
||||
for k in client.v2.json.keys():
|
||||
if k in ('dashboard',):
|
||||
# the Dashboard API is deprecated and not supported
|
||||
if k in ('dashboard', 'config'):
|
||||
# - the Dashboard API is deprecated and not supported
|
||||
# - the Config command is already dealt with by the
|
||||
# CustomCommand section above
|
||||
continue
|
||||
|
||||
# argparse aliases are *only* supported in Python3 (not 2.7)
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
Copyright (c) 2010, Matt Croydon, Mikhail Korobov
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Neither the name of the tastypie nor the
|
||||
names of its contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL MATT CROYDON BE LIABLE FOR ANY
|
||||
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@@ -1,25 +1,22 @@
|
||||
# Dependency Management
|
||||
|
||||
The `requirements.txt` file is generated from `requirements.in`, using `pip-tools` `pip-compile`.
|
||||
The `requirements.txt` file is generated from `requirements.in` and `requirements_git.txt`, using `pip-tools` and `pip-compile`.
|
||||
|
||||
## How To Use
|
||||
|
||||
Commands should be run from inside the `./requirements` directory of the awx repository.
|
||||
Commands should be run in the awx container from inside the `./requirements` directory of the awx repository.
|
||||
|
||||
### Upgrading or Adding Select Libraries
|
||||
|
||||
If you need to add or upgrade one targeted library, then modify `requirements.in`,
|
||||
then run the script:
|
||||
|
||||
`./updater.sh`
|
||||
|
||||
NOTE: `./updater.sh` uses /usr/bin/python3.6, to match the current python version
|
||||
(3.6) used to build releases.
|
||||
`./updater.sh run`
|
||||
|
||||
#### Upgrading Unpinned Dependency
|
||||
|
||||
If you require a new version of a dependency that does not have a pinned version
|
||||
for a fix or feature, pin a minimum version and run `./updater.sh`. For example,
|
||||
for a fix or feature, pin a minimum version in `requirements.in` and run `./updater.sh run`. For example,
|
||||
replace the line `asgi-amqp` with `asgi-amqp>=1.1.4`, and consider leaving a
|
||||
note.
|
||||
|
||||
|
||||
@@ -19,7 +19,6 @@ django-guid==3.2.1
|
||||
django-oauth-toolkit==1.4.1
|
||||
django-polymorphic
|
||||
django-pglocks
|
||||
django-qsstats-magic
|
||||
django-redis
|
||||
django-solo
|
||||
django-split-settings
|
||||
|
||||
@@ -115,9 +115,6 @@ django-pglocks==1.0.4
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-polymorphic==3.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-qsstats-magic==1.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
django-redis==4.5.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-solo==2.0.0
|
||||
|
||||
@@ -33,11 +33,47 @@ generate_requirements() {
|
||||
|
||||
main() {
|
||||
base_dir=$(pwd)
|
||||
_tmp="$(mktemp -d --suffix .awx-requirements XXXX -p /tmp)"
|
||||
|
||||
_tmp=$(python -c "import tempfile; print(tempfile.mkdtemp(suffix='.awx-requirements', dir='/tmp'))")
|
||||
|
||||
trap _cleanup INT TERM EXIT
|
||||
|
||||
if [ "$1" = "upgrade" ]; then
|
||||
case $1 in
|
||||
"run")
|
||||
NEEDS_HELP=0
|
||||
;;
|
||||
"upgrade")
|
||||
NEEDS_HELP=0
|
||||
pip_compile="${pip_compile} --upgrade"
|
||||
;;
|
||||
"help")
|
||||
NEEDS_HELP=1
|
||||
;;
|
||||
*)
|
||||
echo ""
|
||||
echo "ERROR: Parameter $1 not valid"
|
||||
echo ""
|
||||
NEEDS_HELP=1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$NEEDS_HELP" == "1" ]] ; then
|
||||
echo "This script generates requirements.txt from requirements.in and requirements_git.in"
|
||||
echo "It should be run from within the awx container"
|
||||
echo ""
|
||||
echo "Usage: $0 [run|upgrade]"
|
||||
echo ""
|
||||
echo "Commands:"
|
||||
echo "help Print this message"
|
||||
echo "run Run the process only upgrading pinned libraries from requirements.in"
|
||||
echo "upgrade Upgrade all libraries to latest while respecting pinnings"
|
||||
echo ""
|
||||
exit
|
||||
fi
|
||||
|
||||
if [[ ! -d /awx_devel ]] ; then
|
||||
echo "This script should be run inside the awx container"
|
||||
exit
|
||||
fi
|
||||
|
||||
cp -vf requirements.txt "${_tmp}"
|
||||
|
||||
@@ -28,7 +28,7 @@ Here are the main `make` targets:
|
||||
Notable files:
|
||||
|
||||
- `tools/docker-compose/inventory` file - used to configure the AWX development environment.
|
||||
- `migrate.yml` - playbook for migrating data from Local Docker to the Development Environment
|
||||
- `tools/docker-compose/ansible/migrate.yml` - playbook for migrating data from Local Docker to the Development Environment
|
||||
|
||||
### Prerequisites
|
||||
|
||||
|
||||
Reference in New Issue
Block a user