mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 19:44:43 -03:30
Compare commits
25 Commits
feature_us
...
22.3.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
84f67c7f82 | ||
|
|
433c28caa8 | ||
|
|
fa05f55512 | ||
|
|
0d5c0bcb91 | ||
|
|
f3fa75d832 | ||
|
|
285b7b0e5f | ||
|
|
08e8147374 | ||
|
|
09bd398a9e | ||
|
|
8d6f50fae8 | ||
|
|
ecfbcb641e | ||
|
|
e434b1e0f3 | ||
|
|
66c3acf777 | ||
|
|
ed1983bd8c | ||
|
|
5c4277958c | ||
|
|
7e4da7efa2 | ||
|
|
7b1cb281c2 | ||
|
|
dee39f3f1c | ||
|
|
ba7f97f84b | ||
|
|
85e7189ee3 | ||
|
|
06430741ab | ||
|
|
cf091d7836 | ||
|
|
a66acd87e6 | ||
|
|
595b4e3876 | ||
|
|
74c46568c1 | ||
|
|
f1196fc019 |
2
Makefile
2
Makefile
@@ -660,10 +660,12 @@ awx-kube-dev-build: Dockerfile.kube-dev
|
||||
## generate UI .pot file, an empty template of strings yet to be translated
|
||||
pot: $(UI_BUILD_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-template --clean
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-template --clean
|
||||
|
||||
## generate UI .po files for each locale (will update translated strings for `en`)
|
||||
po: $(UI_BUILD_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-strings -- --clean
|
||||
|
||||
## generate API django .pot .po
|
||||
messages:
|
||||
|
||||
@@ -347,7 +347,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
args.append(Q(**{k: v}))
|
||||
for role_name in role_filters:
|
||||
if not hasattr(queryset.model, 'accessible_pk_qs'):
|
||||
raise ParseError(_('Cannot apply role_level filter to this list because its model ' 'does not use roles for access control.'))
|
||||
raise ParseError(_('Cannot apply role_level filter to this list because its model does not use roles for access control.'))
|
||||
args.append(Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name)))
|
||||
if or_filters:
|
||||
q = Q()
|
||||
|
||||
@@ -169,7 +169,7 @@ class APIView(views.APIView):
|
||||
self.__init_request_error__ = exc
|
||||
except UnsupportedMediaType as exc:
|
||||
exc.detail = _(
|
||||
'You did not use correct Content-Type in your HTTP request. ' 'If you are using our REST API, the Content-Type must be application/json'
|
||||
'You did not use correct Content-Type in your HTTP request. If you are using our REST API, the Content-Type must be application/json'
|
||||
)
|
||||
self.__init_request_error__ = exc
|
||||
return drf_request
|
||||
|
||||
@@ -71,7 +71,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
'url': _('URL for this {}.'),
|
||||
'related': _('Data structure with URLs of related resources.'),
|
||||
'summary_fields': _(
|
||||
'Data structure with name/description for related resources. ' 'The output for some objects may be limited for performance reasons.'
|
||||
'Data structure with name/description for related resources. The output for some objects may be limited for performance reasons.'
|
||||
),
|
||||
'created': _('Timestamp when this {} was created.'),
|
||||
'modified': _('Timestamp when this {} was last modified.'),
|
||||
|
||||
@@ -220,7 +220,7 @@ class CopySerializer(serializers.Serializer):
|
||||
view = self.context.get('view', None)
|
||||
obj = view.get_object()
|
||||
if name == obj.name:
|
||||
raise serializers.ValidationError(_('The original object is already named {}, a copy from' ' it cannot have the same name.'.format(name)))
|
||||
raise serializers.ValidationError(_('The original object is already named {}, a copy from it cannot have the same name.'.format(name)))
|
||||
return attrs
|
||||
|
||||
|
||||
@@ -760,7 +760,7 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
|
||||
class UnifiedJobSerializer(BaseSerializer):
|
||||
show_capabilities = ['start', 'delete']
|
||||
event_processing_finished = serializers.BooleanField(
|
||||
help_text=_('Indicates whether all of the events generated by this ' 'unified job have been saved to the database.'), read_only=True
|
||||
help_text=_('Indicates whether all of the events generated by this unified job have been saved to the database.'), read_only=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
@@ -1579,7 +1579,7 @@ class ProjectPlaybooksSerializer(ProjectSerializer):
|
||||
|
||||
|
||||
class ProjectInventoriesSerializer(ProjectSerializer):
|
||||
inventory_files = serializers.ReadOnlyField(help_text=_('Array of inventory files and directories available within this project, ' 'not comprehensive.'))
|
||||
inventory_files = serializers.ReadOnlyField(help_text=_('Array of inventory files and directories available within this project, not comprehensive.'))
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
@@ -2905,7 +2905,7 @@ class CredentialSerializer(BaseSerializer):
|
||||
):
|
||||
if getattr(self.instance, related_objects).count() > 0:
|
||||
raise ValidationError(
|
||||
_('You cannot change the credential type of the credential, as it may break the functionality' ' of the resources using it.')
|
||||
_('You cannot change the credential type of the credential, as it may break the functionality of the resources using it.')
|
||||
)
|
||||
|
||||
return credential_type
|
||||
@@ -2925,7 +2925,7 @@ class CredentialSerializerCreate(CredentialSerializer):
|
||||
default=None,
|
||||
write_only=True,
|
||||
allow_null=True,
|
||||
help_text=_('Write-only field used to add user to owner role. If provided, ' 'do not give either team or organization. Only valid for creation.'),
|
||||
help_text=_('Write-only field used to add user to owner role. If provided, do not give either team or organization. Only valid for creation.'),
|
||||
)
|
||||
team = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Team.objects.all(),
|
||||
@@ -2933,14 +2933,14 @@ class CredentialSerializerCreate(CredentialSerializer):
|
||||
default=None,
|
||||
write_only=True,
|
||||
allow_null=True,
|
||||
help_text=_('Write-only field used to add team to owner role. If provided, ' 'do not give either user or organization. Only valid for creation.'),
|
||||
help_text=_('Write-only field used to add team to owner role. If provided, do not give either user or organization. Only valid for creation.'),
|
||||
)
|
||||
organization = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Organization.objects.all(),
|
||||
required=False,
|
||||
default=None,
|
||||
allow_null=True,
|
||||
help_text=_('Inherit permissions from organization roles. If provided on creation, ' 'do not give either user or team.'),
|
||||
help_text=_('Inherit permissions from organization roles. If provided on creation, do not give either user or team.'),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
@@ -2962,7 +2962,7 @@ class CredentialSerializerCreate(CredentialSerializer):
|
||||
if len(owner_fields) > 1:
|
||||
received = ", ".join(sorted(owner_fields))
|
||||
raise serializers.ValidationError(
|
||||
{"detail": _("Only one of 'user', 'team', or 'organization' should be provided, " "received {} fields.".format(received))}
|
||||
{"detail": _("Only one of 'user', 'team', or 'organization' should be provided, received {} fields.".format(received))}
|
||||
)
|
||||
|
||||
if attrs.get('team'):
|
||||
@@ -3622,7 +3622,7 @@ class SystemJobSerializer(UnifiedJobSerializer):
|
||||
try:
|
||||
return obj.result_stdout
|
||||
except StdoutMaxBytesExceeded as e:
|
||||
return _("Standard Output too large to display ({text_size} bytes), " "only download supported for sizes over {supported_size} bytes.").format(
|
||||
return _("Standard Output too large to display ({text_size} bytes), only download supported for sizes over {supported_size} bytes.").format(
|
||||
text_size=e.total, supported_size=e.supported
|
||||
)
|
||||
|
||||
@@ -4536,7 +4536,7 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
if cred.unique_hash() in provided_mapping.keys():
|
||||
continue # User replaced credential with new of same type
|
||||
errors.setdefault('credentials', []).append(
|
||||
_('Removing {} credential at launch time without replacement is not supported. ' 'Provided list lacked credential(s): {}.').format(
|
||||
_('Removing {} credential at launch time without replacement is not supported. Provided list lacked credential(s): {}.').format(
|
||||
cred.unique_hash(display=True), ', '.join([str(c) for c in removed_creds])
|
||||
)
|
||||
)
|
||||
@@ -5019,7 +5019,7 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
for subevent in event_messages:
|
||||
if subevent not in ('running', 'approved', 'timed_out', 'denied'):
|
||||
error_list.append(
|
||||
_("Workflow Approval event '{}' invalid, must be one of " "'running', 'approved', 'timed_out', or 'denied'").format(subevent)
|
||||
_("Workflow Approval event '{}' invalid, must be one of 'running', 'approved', 'timed_out', or 'denied'").format(subevent)
|
||||
)
|
||||
continue
|
||||
subevent_messages = event_messages[subevent]
|
||||
@@ -5559,7 +5559,7 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
instances = serializers.SerializerMethodField()
|
||||
is_container_group = serializers.BooleanField(
|
||||
required=False,
|
||||
help_text=_('Indicates whether instances in this group are containerized.' 'Containerized groups have a designated Openshift or Kubernetes cluster.'),
|
||||
help_text=_('Indicates whether instances in this group are containerized.Containerized groups have a designated Openshift or Kubernetes cluster.'),
|
||||
)
|
||||
# NOTE: help_text is duplicated from field definitions, no obvious way of
|
||||
# both defining field details here and also getting the field's help_text
|
||||
@@ -5570,7 +5570,7 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
required=False,
|
||||
initial=0,
|
||||
label=_('Policy Instance Percentage'),
|
||||
help_text=_("Minimum percentage of all instances that will be automatically assigned to " "this group when new instances come online."),
|
||||
help_text=_("Minimum percentage of all instances that will be automatically assigned to this group when new instances come online."),
|
||||
)
|
||||
policy_instance_minimum = serializers.IntegerField(
|
||||
default=0,
|
||||
@@ -5578,7 +5578,7 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
required=False,
|
||||
initial=0,
|
||||
label=_('Policy Instance Minimum'),
|
||||
help_text=_("Static minimum number of Instances that will be automatically assign to " "this group when new instances come online."),
|
||||
help_text=_("Static minimum number of Instances that will be automatically assign to this group when new instances come online."),
|
||||
)
|
||||
max_concurrent_jobs = serializers.IntegerField(
|
||||
default=0,
|
||||
|
||||
@@ -565,7 +565,7 @@ class LaunchConfigCredentialsBase(SubListAttachDetachAPIView):
|
||||
if self.relationship not in ask_mapping:
|
||||
return {"msg": _("Related template cannot accept {} on launch.").format(self.relationship)}
|
||||
elif sub.passwords_needed:
|
||||
return {"msg": _("Credential that requires user input on launch " "cannot be used in saved launch configuration.")}
|
||||
return {"msg": _("Credential that requires user input on launch cannot be used in saved launch configuration.")}
|
||||
|
||||
ask_field_name = ask_mapping[self.relationship]
|
||||
|
||||
@@ -2501,7 +2501,7 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
return Response(
|
||||
dict(
|
||||
error=_(
|
||||
"$encrypted$ is a reserved keyword for password question defaults, " "survey question {idx} is type {survey_item[type]}."
|
||||
"$encrypted$ is a reserved keyword for password question defaults, survey question {idx} is type {survey_item[type]}."
|
||||
).format(**context)
|
||||
),
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -3333,7 +3333,6 @@ class JobLabelList(SubListAPIView):
|
||||
serializer_class = serializers.LabelSerializer
|
||||
parent_model = models.Job
|
||||
relationship = 'labels'
|
||||
parent_key = 'job'
|
||||
|
||||
|
||||
class WorkflowJobLabelList(JobLabelList):
|
||||
@@ -4056,7 +4055,7 @@ class UnifiedJobStdout(RetrieveAPIView):
|
||||
return super(UnifiedJobStdout, self).retrieve(request, *args, **kwargs)
|
||||
except models.StdoutMaxBytesExceeded as e:
|
||||
response_message = _(
|
||||
"Standard Output too large to display ({text_size} bytes), " "only download supported for sizes over {supported_size} bytes."
|
||||
"Standard Output too large to display ({text_size} bytes), only download supported for sizes over {supported_size} bytes."
|
||||
).format(text_size=e.total, supported_size=e.supported)
|
||||
if request.accepted_renderer.format == 'json':
|
||||
return Response({'range': {'start': 0, 'end': 1, 'absolute_end': 1}, 'content': response_message})
|
||||
|
||||
@@ -50,7 +50,7 @@ class UnifiedJobDeletionMixin(object):
|
||||
return Response({"error": _("Job has not finished processing events.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
# if it has been > 1 minute, events are probably lost
|
||||
logger.warning('Allowing deletion of {} through the API without all events ' 'processed.'.format(obj.log_format))
|
||||
logger.warning('Allowing deletion of {} through the API without all events processed.'.format(obj.log_format))
|
||||
|
||||
# Manually cascade delete events if unpartitioned job
|
||||
if obj.has_unpartitioned_events:
|
||||
|
||||
@@ -114,7 +114,7 @@ class WebhookReceiverBase(APIView):
|
||||
# Ensure that the full contents of the request are captured for multiple uses.
|
||||
request.body
|
||||
|
||||
logger.debug("headers: {}\n" "data: {}\n".format(request.headers, request.data))
|
||||
logger.debug("headers: {}\ndata: {}\n".format(request.headers, request.data))
|
||||
obj = self.get_object()
|
||||
self.check_signature(obj)
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ class TestStringListBooleanField:
|
||||
field = StringListBooleanField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_internal_value(value)
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list of strings but got {} instead.".format(type(value))
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_representation_valid(self, value_in, value_known):
|
||||
@@ -48,7 +48,7 @@ class TestStringListBooleanField:
|
||||
field = StringListBooleanField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_representation(value)
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list of strings but got {} instead.".format(type(value))
|
||||
|
||||
|
||||
class TestListTuplesField:
|
||||
@@ -67,7 +67,7 @@ class TestListTuplesField:
|
||||
field = ListTuplesField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_internal_value(value)
|
||||
assert e.value.detail[0] == "Expected a list of tuples of max length 2 " "but got {} instead.".format(t)
|
||||
assert e.value.detail[0] == "Expected a list of tuples of max length 2 but got {} instead.".format(t)
|
||||
|
||||
|
||||
class TestStringListPathField:
|
||||
|
||||
@@ -2234,7 +2234,7 @@ class WorkflowJobAccess(BaseAccess):
|
||||
if not node_access.can_add({'reference_obj': node}):
|
||||
wj_add_perm = False
|
||||
if not wj_add_perm and self.save_messages:
|
||||
self.messages['workflow_job_template'] = _('You do not have permission to the workflow job ' 'resources required for relaunch.')
|
||||
self.messages['workflow_job_template'] = _('You do not have permission to the workflow job resources required for relaunch.')
|
||||
return wj_add_perm
|
||||
|
||||
def can_cancel(self, obj):
|
||||
|
||||
@@ -87,7 +87,7 @@ class RecordedQueryLog(object):
|
||||
)
|
||||
log.commit()
|
||||
log.execute(
|
||||
'INSERT INTO queries (pid, version, argv, time, sql, explain, bt) ' 'VALUES (?, ?, ?, ?, ?, ?, ?);',
|
||||
'INSERT INTO queries (pid, version, argv, time, sql, explain, bt) VALUES (?, ?, ?, ?, ?, ?, ?);',
|
||||
(os.getpid(), version, ' '.join(sys.argv), seconds, sql, explain, bt),
|
||||
)
|
||||
log.commit()
|
||||
|
||||
@@ -800,7 +800,7 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
||||
def validate_env_var_allowed(self, env_var):
|
||||
if env_var.startswith('ANSIBLE_'):
|
||||
raise django_exceptions.ValidationError(
|
||||
_('Environment variable {} may affect Ansible configuration so its ' 'use is not allowed in credentials.').format(env_var),
|
||||
_('Environment variable {} may affect Ansible configuration so its use is not allowed in credentials.').format(env_var),
|
||||
code='invalid',
|
||||
params={'value': env_var},
|
||||
)
|
||||
|
||||
@@ -23,7 +23,7 @@ class Command(BaseCommand):
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--days', dest='days', type=int, default=90, metavar='N', help='Remove activity stream events more than N days old')
|
||||
parser.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, help='Dry run mode (show items that would ' 'be removed)')
|
||||
parser.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, help='Dry run mode (show items that would be removed)')
|
||||
|
||||
def init_logging(self):
|
||||
log_levels = dict(enumerate([logging.ERROR, logging.INFO, logging.DEBUG, 0]))
|
||||
|
||||
@@ -152,7 +152,7 @@ class Command(BaseCommand):
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--days', dest='days', type=int, default=90, metavar='N', help='Remove jobs/updates executed more than N days ago. Defaults to 90.')
|
||||
parser.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, help='Dry run mode (show items that would ' 'be removed)')
|
||||
parser.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, help='Dry run mode (show items that would be removed)')
|
||||
parser.add_argument('--jobs', dest='only_jobs', action='store_true', default=False, help='Remove jobs')
|
||||
parser.add_argument('--ad-hoc-commands', dest='only_ad_hoc_commands', action='store_true', default=False, help='Remove ad hoc commands')
|
||||
parser.add_argument('--project-updates', dest='only_project_updates', action='store_true', default=False, help='Remove project updates')
|
||||
|
||||
@@ -44,7 +44,7 @@ class Command(BaseCommand):
|
||||
'- To list all (now deprecated) custom virtual environments run:',
|
||||
'awx-manage list_custom_venvs',
|
||||
'',
|
||||
'- To export the contents of a (deprecated) virtual environment, ' 'run the following command while supplying the path as an argument:',
|
||||
'- To export the contents of a (deprecated) virtual environment, run the following command while supplying the path as an argument:',
|
||||
'awx-manage export_custom_venv /path/to/venv',
|
||||
'',
|
||||
'- Run these commands with `-q` to remove tool tips.',
|
||||
|
||||
@@ -13,7 +13,7 @@ class Command(BaseCommand):
|
||||
Deprovision a cluster node
|
||||
"""
|
||||
|
||||
help = 'Remove instance from the database. ' 'Specify `--hostname` to use this command.'
|
||||
help = 'Remove instance from the database. Specify `--hostname` to use this command.'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--hostname', dest='hostname', type=str, help='Hostname used during provisioning')
|
||||
|
||||
@@ -22,7 +22,7 @@ class Command(BaseCommand):
|
||||
'# Discovered Virtual Environments:',
|
||||
'\n'.join(venvs),
|
||||
'',
|
||||
'- To export the contents of a (deprecated) virtual environment, ' 'run the following command while supplying the path as an argument:',
|
||||
'- To export the contents of a (deprecated) virtual environment, run the following command while supplying the path as an argument:',
|
||||
'awx-manage export_custom_venv /path/to/venv',
|
||||
'',
|
||||
'- To view the connections a (deprecated) virtual environment had in the database, run the following command while supplying the path as an argument:',
|
||||
|
||||
@@ -122,7 +122,7 @@ class URLModificationMiddleware(MiddlewareMixin):
|
||||
field_class=fields.DictField,
|
||||
read_only=True,
|
||||
label=_('Formats of all available named urls'),
|
||||
help_text=_('Read-only list of key-value pairs that shows the standard format of all ' 'available named URLs.'),
|
||||
help_text=_('Read-only list of key-value pairs that shows the standard format of all available named URLs.'),
|
||||
category=_('Named URL'),
|
||||
category_slug='named-url',
|
||||
)
|
||||
|
||||
@@ -12,22 +12,17 @@ def migrate_event_data(apps, schema_editor):
|
||||
# https://www.postgresql.org/docs/9.1/datatype-numeric.html)
|
||||
for tblname in ('main_jobevent', 'main_inventoryupdateevent', 'main_projectupdateevent', 'main_adhoccommandevent', 'main_systemjobevent'):
|
||||
with connection.cursor() as cursor:
|
||||
# rename the current event table
|
||||
cursor.execute(f'ALTER TABLE {tblname} RENAME TO _old_{tblname};')
|
||||
# create a *new* table with the same schema
|
||||
cursor.execute(f'CREATE TABLE {tblname} (LIKE _old_{tblname} INCLUDING ALL);')
|
||||
# alter the *new* table so that the primary key is a big int
|
||||
# This loop used to do roughly the following:
|
||||
# Rename the table to _old_<tablename>
|
||||
# Create a new table form the old table (it would have no rows)
|
||||
# Drop the old sequnce and create a new on tied to the new table and set the sequence to the last number from the old table
|
||||
# This used to work with postgres spitting out a NOTICE and DETAIL
|
||||
# With the django 4.2 upgrade that changed to an ERROR and HINT
|
||||
# By the time we hit the 4.2 upgrade, no one should be upgrading a database this old directly to this new schema
|
||||
# So we no longer really care about having to do all of this work, we only need a table with a bigint ID field
|
||||
# And this can be achieved by just changing the id column type...
|
||||
cursor.execute(f'ALTER TABLE {tblname} ALTER COLUMN id TYPE bigint USING id::bigint;')
|
||||
|
||||
# recreate counter for the new table's primary key to
|
||||
# start where the *old* table left off (we have to do this because the
|
||||
# counter changed from an int to a bigint)
|
||||
cursor.execute(f'DROP SEQUENCE IF EXISTS "{tblname}_id_seq" CASCADE;')
|
||||
cursor.execute(f'CREATE SEQUENCE "{tblname}_id_seq";')
|
||||
cursor.execute(f'ALTER TABLE "{tblname}" ALTER COLUMN "id" ' f"SET DEFAULT nextval('{tblname}_id_seq');")
|
||||
cursor.execute(f"SELECT setval('{tblname}_id_seq', (SELECT MAX(id) FROM _old_{tblname}), true);")
|
||||
cursor.execute(f'DROP TABLE _old_{tblname};')
|
||||
|
||||
|
||||
class FakeAlterField(migrations.AlterField):
|
||||
def database_forwards(self, *args):
|
||||
|
||||
30
awx/main/migrations/0183_pre_django_upgrade.py
Normal file
30
awx/main/migrations/0183_pre_django_upgrade.py
Normal file
@@ -0,0 +1,30 @@
|
||||
# Generated by Django 3.2.16 on 2023-04-21 14:15
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.OAUTH2_PROVIDER_ID_TOKEN_MODEL),
|
||||
('main', '0182_constructed_inventory'),
|
||||
('oauth2_provider', '0005_auto_20211222_2352'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='oauth2accesstoken',
|
||||
name='id_token',
|
||||
field=models.OneToOneField(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='access_token', to=settings.OAUTH2_PROVIDER_ID_TOKEN_MODEL
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='oauth2application',
|
||||
name='algorithm',
|
||||
field=models.CharField(
|
||||
blank=True, choices=[('', 'No OIDC support'), ('RS256', 'RSA with SHA-2 256'), ('HS256', 'HMAC with SHA-2 256')], default='', max_length=5
|
||||
),
|
||||
),
|
||||
]
|
||||
1062
awx/main/migrations/0184_django_indexes.py
Normal file
1062
awx/main/migrations/0184_django_indexes.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,7 @@ logger = logging.getLogger('awx.main.migrations')
|
||||
def migrate_org_admin_to_use(apps, schema_editor):
|
||||
logger.info('Initiated migration from Org admin to use role')
|
||||
roles_added = 0
|
||||
for org in Organization.objects.prefetch_related('admin_role__members').iterator():
|
||||
for org in Organization.objects.prefetch_related('admin_role__members').iterator(chunk_size=1000):
|
||||
igs = list(org.instance_groups.all())
|
||||
if not igs:
|
||||
continue
|
||||
|
||||
@@ -158,7 +158,7 @@ class ec2(PluginFileInjector):
|
||||
return {
|
||||
# vars that change
|
||||
'ec2_block_devices': (
|
||||
"dict(block_device_mappings | map(attribute='device_name') | list | zip(block_device_mappings " "| map(attribute='ebs.volume_id') | list))"
|
||||
"dict(block_device_mappings | map(attribute='device_name') | list | zip(block_device_mappings | map(attribute='ebs.volume_id') | list))"
|
||||
),
|
||||
'ec2_dns_name': 'public_dns_name',
|
||||
'ec2_group_name': 'placement.group_name',
|
||||
@@ -635,7 +635,7 @@ class satellite6(PluginFileInjector):
|
||||
"environment": {
|
||||
"prefix": "{}environment_".format(group_prefix),
|
||||
"separator": "",
|
||||
"key": "foreman['environment_name'] | lower | regex_replace(' ', '') | " "regex_replace('[^A-Za-z0-9_]', '_') | regex_replace('none', '')",
|
||||
"key": "foreman['environment_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_') | regex_replace('none', '')",
|
||||
},
|
||||
"location": {
|
||||
"prefix": "{}location_".format(group_prefix),
|
||||
@@ -656,7 +656,7 @@ class satellite6(PluginFileInjector):
|
||||
"content_view": {
|
||||
"prefix": "{}content_view_".format(group_prefix),
|
||||
"separator": "",
|
||||
"key": "foreman['content_facet_attributes']['content_view_name'] | " "lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')",
|
||||
"key": "foreman['content_facet_attributes']['content_view_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -91,7 +91,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
related_name='credentials',
|
||||
null=False,
|
||||
on_delete=models.CASCADE,
|
||||
help_text=_('Specify the type of credential you want to create. Refer ' 'to the documentation for details on each type.'),
|
||||
help_text=_('Specify the type of credential you want to create. Refer to the documentation for details on each type.'),
|
||||
)
|
||||
managed = models.BooleanField(default=False, editable=False)
|
||||
organization = models.ForeignKey(
|
||||
@@ -103,7 +103,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
related_name='credentials',
|
||||
)
|
||||
inputs = CredentialInputField(
|
||||
blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. ' 'Refer to the documentation for example syntax.')
|
||||
blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. Refer to the documentation for example syntax.')
|
||||
)
|
||||
admin_role = ImplicitRoleField(
|
||||
parent_role=[
|
||||
@@ -195,6 +195,9 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
|
||||
@cached_property
|
||||
def dynamic_input_fields(self):
|
||||
# if the credential is not yet saved we can't access the input_sources
|
||||
if not self.id:
|
||||
return []
|
||||
return [obj.input_field_name for obj in self.input_sources.all()]
|
||||
|
||||
def _password_field_allows_ask(self, field):
|
||||
@@ -343,12 +346,12 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
managed = models.BooleanField(default=False, editable=False)
|
||||
namespace = models.CharField(max_length=1024, null=True, default=None, editable=False)
|
||||
inputs = CredentialTypeInputField(
|
||||
blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. ' 'Refer to the documentation for example syntax.')
|
||||
blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. Refer to the documentation for example syntax.')
|
||||
)
|
||||
injectors = CredentialTypeInjectorField(
|
||||
blank=True,
|
||||
default=dict,
|
||||
help_text=_('Enter injectors using either JSON or YAML syntax. ' 'Refer to the documentation for example syntax.'),
|
||||
help_text=_('Enter injectors using either JSON or YAML syntax. Refer to the documentation for example syntax.'),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -602,9 +605,7 @@ ManagedCredentialType(
|
||||
'id': 'become_method',
|
||||
'label': gettext_noop('Privilege Escalation Method'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop(
|
||||
'Specify a method for "become" operations. This is ' 'equivalent to specifying the --become-method ' 'Ansible parameter.'
|
||||
),
|
||||
'help_text': gettext_noop('Specify a method for "become" operations. This is equivalent to specifying the --become-method Ansible parameter.'),
|
||||
},
|
||||
{
|
||||
'id': 'become_username',
|
||||
@@ -746,7 +747,7 @@ ManagedCredentialType(
|
||||
'id': 'host',
|
||||
'label': gettext_noop('Host (Authentication URL)'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('The host to authenticate with. For example, ' 'https://openstack.business.com/v2.0/'),
|
||||
'help_text': gettext_noop('The host to authenticate with. For example, https://openstack.business.com/v2.0/'),
|
||||
},
|
||||
{
|
||||
'id': 'project',
|
||||
@@ -797,7 +798,7 @@ ManagedCredentialType(
|
||||
'id': 'host',
|
||||
'label': gettext_noop('VCenter Host'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('Enter the hostname or IP address that corresponds ' 'to your VMware vCenter.'),
|
||||
'help_text': gettext_noop('Enter the hostname or IP address that corresponds to your VMware vCenter.'),
|
||||
},
|
||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
||||
{
|
||||
@@ -822,7 +823,7 @@ ManagedCredentialType(
|
||||
'id': 'host',
|
||||
'label': gettext_noop('Satellite 6 URL'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('Enter the URL that corresponds to your Red Hat ' 'Satellite 6 server. For example, https://satellite.example.org'),
|
||||
'help_text': gettext_noop('Enter the URL that corresponds to your Red Hat Satellite 6 server. For example, https://satellite.example.org'),
|
||||
},
|
||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
||||
{
|
||||
@@ -847,7 +848,7 @@ ManagedCredentialType(
|
||||
'id': 'username',
|
||||
'label': gettext_noop('Service Account Email Address'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('The email address assigned to the Google Compute ' 'Engine service account.'),
|
||||
'help_text': gettext_noop('The email address assigned to the Google Compute Engine service account.'),
|
||||
},
|
||||
{
|
||||
'id': 'project',
|
||||
@@ -867,7 +868,7 @@ ManagedCredentialType(
|
||||
'format': 'ssh_private_key',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
'help_text': gettext_noop('Paste the contents of the PEM file associated ' 'with the service account email.'),
|
||||
'help_text': gettext_noop('Paste the contents of the PEM file associated with the service account email.'),
|
||||
},
|
||||
],
|
||||
'required': ['username', 'ssh_key_data'],
|
||||
@@ -885,7 +886,7 @@ ManagedCredentialType(
|
||||
'id': 'subscription',
|
||||
'label': gettext_noop('Subscription ID'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('Subscription ID is an Azure construct, which is ' 'mapped to a username.'),
|
||||
'help_text': gettext_noop('Subscription ID is an Azure construct, which is mapped to a username.'),
|
||||
},
|
||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
||||
{
|
||||
@@ -906,7 +907,7 @@ ManagedCredentialType(
|
||||
'id': 'cloud_environment',
|
||||
'label': gettext_noop('Azure Cloud Environment'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when' ' using Azure GovCloud or Azure stack.'),
|
||||
'help_text': gettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when using Azure GovCloud or Azure stack.'),
|
||||
},
|
||||
],
|
||||
'required': ['subscription'],
|
||||
@@ -1037,7 +1038,7 @@ ManagedCredentialType(
|
||||
'label': gettext_noop('Username'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop(
|
||||
'Red Hat Ansible Automation Platform username id to authenticate as.' 'This should not be set if an OAuth token is being used.'
|
||||
'Red Hat Ansible Automation Platform username id to authenticate as.This should not be set if an OAuth token is being used.'
|
||||
),
|
||||
},
|
||||
{
|
||||
@@ -1051,7 +1052,7 @@ ManagedCredentialType(
|
||||
'label': gettext_noop('OAuth Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': gettext_noop('An OAuth token to use to authenticate with.' 'This should not be set if username/password are being used.'),
|
||||
'help_text': gettext_noop('An OAuth token to use to authenticate with.This should not be set if username/password are being used.'),
|
||||
},
|
||||
{'id': 'verify_ssl', 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'secret': False},
|
||||
],
|
||||
@@ -1162,7 +1163,7 @@ ManagedCredentialType(
|
||||
'id': 'auth_url',
|
||||
'label': gettext_noop('Auth Server URL'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('The URL of a Keycloak server token_endpoint, if using ' 'SSO auth.'),
|
||||
'help_text': gettext_noop('The URL of a Keycloak server token_endpoint, if using SSO auth.'),
|
||||
},
|
||||
{
|
||||
'id': 'token',
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import datetime
|
||||
from datetime import timezone
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
|
||||
@@ -10,7 +11,7 @@ from django.db import models, DatabaseError
|
||||
from django.db.models.functions import Cast
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.text import Truncator
|
||||
from django.utils.timezone import utc, now
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.encoding import force_str
|
||||
|
||||
@@ -422,7 +423,7 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
if not isinstance(kwargs['created'], datetime.datetime):
|
||||
kwargs['created'] = parse_datetime(kwargs['created'])
|
||||
if not kwargs['created'].tzinfo:
|
||||
kwargs['created'] = kwargs['created'].replace(tzinfo=utc)
|
||||
kwargs['created'] = kwargs['created'].replace(tzinfo=timezone.utc)
|
||||
except (KeyError, ValueError):
|
||||
kwargs.pop('created', None)
|
||||
|
||||
@@ -432,7 +433,7 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
if not isinstance(kwargs['job_created'], datetime.datetime):
|
||||
kwargs['job_created'] = parse_datetime(kwargs['job_created'])
|
||||
if not kwargs['job_created'].tzinfo:
|
||||
kwargs['job_created'] = kwargs['job_created'].replace(tzinfo=utc)
|
||||
kwargs['job_created'] = kwargs['job_created'].replace(tzinfo=timezone.utc)
|
||||
except (KeyError, ValueError):
|
||||
kwargs.pop('job_created', None)
|
||||
|
||||
@@ -470,11 +471,11 @@ class JobEvent(BasePlaybookEvent):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('pk',)
|
||||
index_together = [
|
||||
('job', 'job_created', 'event'),
|
||||
('job', 'job_created', 'uuid'),
|
||||
('job', 'job_created', 'parent_uuid'),
|
||||
('job', 'job_created', 'counter'),
|
||||
indexes = [
|
||||
models.Index(fields=['job', 'job_created', 'event']),
|
||||
models.Index(fields=['job', 'job_created', 'uuid']),
|
||||
models.Index(fields=['job', 'job_created', 'parent_uuid']),
|
||||
models.Index(fields=['job', 'job_created', 'counter']),
|
||||
]
|
||||
|
||||
id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')
|
||||
@@ -632,10 +633,10 @@ class ProjectUpdateEvent(BasePlaybookEvent):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('pk',)
|
||||
index_together = [
|
||||
('project_update', 'job_created', 'event'),
|
||||
('project_update', 'job_created', 'uuid'),
|
||||
('project_update', 'job_created', 'counter'),
|
||||
indexes = [
|
||||
models.Index(fields=['project_update', 'job_created', 'event']),
|
||||
models.Index(fields=['project_update', 'job_created', 'uuid']),
|
||||
models.Index(fields=['project_update', 'job_created', 'counter']),
|
||||
]
|
||||
|
||||
id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')
|
||||
@@ -734,7 +735,7 @@ class BaseCommandEvent(CreatedModifiedModel):
|
||||
if not isinstance(kwargs['created'], datetime.datetime):
|
||||
kwargs['created'] = parse_datetime(kwargs['created'])
|
||||
if not kwargs['created'].tzinfo:
|
||||
kwargs['created'] = kwargs['created'].replace(tzinfo=utc)
|
||||
kwargs['created'] = kwargs['created'].replace(tzinfo=timezone.utc)
|
||||
except (KeyError, ValueError):
|
||||
kwargs.pop('created', None)
|
||||
|
||||
@@ -770,10 +771,10 @@ class AdHocCommandEvent(BaseCommandEvent):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('-pk',)
|
||||
index_together = [
|
||||
('ad_hoc_command', 'job_created', 'event'),
|
||||
('ad_hoc_command', 'job_created', 'uuid'),
|
||||
('ad_hoc_command', 'job_created', 'counter'),
|
||||
indexes = [
|
||||
models.Index(fields=['ad_hoc_command', 'job_created', 'event']),
|
||||
models.Index(fields=['ad_hoc_command', 'job_created', 'uuid']),
|
||||
models.Index(fields=['ad_hoc_command', 'job_created', 'counter']),
|
||||
]
|
||||
|
||||
EVENT_TYPES = [
|
||||
@@ -875,9 +876,9 @@ class InventoryUpdateEvent(BaseCommandEvent):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('-pk',)
|
||||
index_together = [
|
||||
('inventory_update', 'job_created', 'uuid'),
|
||||
('inventory_update', 'job_created', 'counter'),
|
||||
indexes = [
|
||||
models.Index(fields=['inventory_update', 'job_created', 'uuid']),
|
||||
models.Index(fields=['inventory_update', 'job_created', 'counter']),
|
||||
]
|
||||
|
||||
id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')
|
||||
@@ -920,9 +921,9 @@ class SystemJobEvent(BaseCommandEvent):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('-pk',)
|
||||
index_together = [
|
||||
('system_job', 'job_created', 'uuid'),
|
||||
('system_job', 'job_created', 'counter'),
|
||||
indexes = [
|
||||
models.Index(fields=['system_job', 'job_created', 'uuid']),
|
||||
models.Index(fields=['system_job', 'job_created', 'counter']),
|
||||
]
|
||||
|
||||
id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')
|
||||
|
||||
@@ -106,28 +106,28 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
has_active_failures = models.BooleanField(
|
||||
default=False,
|
||||
editable=False,
|
||||
help_text=_('This field is deprecated and will be removed in a future release. ' 'Flag indicating whether any hosts in this inventory have failed.'),
|
||||
help_text=_('This field is deprecated and will be removed in a future release. Flag indicating whether any hosts in this inventory have failed.'),
|
||||
)
|
||||
total_hosts = models.PositiveIntegerField(
|
||||
default=0,
|
||||
editable=False,
|
||||
help_text=_('This field is deprecated and will be removed in a future release. ' 'Total number of hosts in this inventory.'),
|
||||
help_text=_('This field is deprecated and will be removed in a future release. Total number of hosts in this inventory.'),
|
||||
)
|
||||
hosts_with_active_failures = models.PositiveIntegerField(
|
||||
default=0,
|
||||
editable=False,
|
||||
help_text=_('This field is deprecated and will be removed in a future release. ' 'Number of hosts in this inventory with active failures.'),
|
||||
help_text=_('This field is deprecated and will be removed in a future release. Number of hosts in this inventory with active failures.'),
|
||||
)
|
||||
total_groups = models.PositiveIntegerField(
|
||||
default=0,
|
||||
editable=False,
|
||||
help_text=_('This field is deprecated and will be removed in a future release. ' 'Total number of groups in this inventory.'),
|
||||
help_text=_('This field is deprecated and will be removed in a future release. Total number of groups in this inventory.'),
|
||||
)
|
||||
has_inventory_sources = models.BooleanField(
|
||||
default=False,
|
||||
editable=False,
|
||||
help_text=_(
|
||||
'This field is deprecated and will be removed in a future release. ' 'Flag indicating whether this inventory has any external inventory sources.'
|
||||
'This field is deprecated and will be removed in a future release. Flag indicating whether this inventory has any external inventory sources.'
|
||||
),
|
||||
)
|
||||
total_inventory_sources = models.PositiveIntegerField(
|
||||
@@ -424,7 +424,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
for t in tasks:
|
||||
t.task_impact = t._get_task_impact()
|
||||
UnifiedJob.objects.bulk_update(tasks, ['task_impact'])
|
||||
logger.debug("Finished updating inventory computed fields, pk={0}, in " "{1:.3f} seconds".format(self.pk, time.time() - start_time))
|
||||
logger.debug("Finished updating inventory computed fields, pk={0}, in {1:.3f} seconds".format(self.pk, time.time() - start_time))
|
||||
|
||||
def websocket_emit_status(self, status):
|
||||
connection.on_commit(
|
||||
@@ -1055,16 +1055,16 @@ class InventorySourceOptions(BaseModel):
|
||||
# the actual inventory source being used (Amazon requires Amazon
|
||||
# credentials; Rackspace requires Rackspace credentials; etc...)
|
||||
if source.replace('ec2', 'aws') != cred.kind:
|
||||
return _('Cloud-based inventory sources (such as %s) require ' 'credentials for the matching cloud service.') % source
|
||||
return _('Cloud-based inventory sources (such as %s) require credentials for the matching cloud service.') % source
|
||||
# Allow an EC2 source to omit the credential. If Tower is running on
|
||||
# an EC2 instance with an IAM Role assigned, boto will use credentials
|
||||
# from the instance metadata instead of those explicitly provided.
|
||||
elif source in CLOUD_PROVIDERS and source != 'ec2':
|
||||
return _('Credential is required for a cloud source.')
|
||||
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
|
||||
return _('Credentials of type machine, source control, insights and vault are ' 'disallowed for custom inventory sources.')
|
||||
return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.')
|
||||
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
|
||||
return _('Credentials of type insights and vault are ' 'disallowed for scm inventory sources.')
|
||||
return _('Credentials of type insights and vault are disallowed for scm inventory sources.')
|
||||
return None
|
||||
|
||||
def get_cloud_credential(self):
|
||||
|
||||
@@ -101,7 +101,7 @@ class JobOptions(BaseModel):
|
||||
max_length=1024,
|
||||
default='',
|
||||
blank=True,
|
||||
help_text=_('Branch to use in job run. Project default used if blank. ' 'Only allowed if project allow_override field is set to true.'),
|
||||
help_text=_('Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.'),
|
||||
)
|
||||
forks = models.PositiveIntegerField(
|
||||
blank=True,
|
||||
@@ -253,7 +253,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
job_slice_count = models.PositiveIntegerField(
|
||||
blank=True,
|
||||
default=1,
|
||||
help_text=_("The number of jobs to slice into at runtime. " "Will cause the Job Template to launch a workflow if value is greater than 1."),
|
||||
help_text=_("The number of jobs to slice into at runtime. Will cause the Job Template to launch a workflow if value is greater than 1."),
|
||||
)
|
||||
|
||||
admin_role = ImplicitRoleField(parent_role=['organization.job_template_admin_role'])
|
||||
@@ -596,12 +596,12 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
job_slice_number = models.PositiveIntegerField(
|
||||
blank=True,
|
||||
default=0,
|
||||
help_text=_("If part of a sliced job, the ID of the inventory slice operated on. " "If not part of sliced job, parameter is not used."),
|
||||
help_text=_("If part of a sliced job, the ID of the inventory slice operated on. If not part of sliced job, parameter is not used."),
|
||||
)
|
||||
job_slice_count = models.PositiveIntegerField(
|
||||
blank=True,
|
||||
default=1,
|
||||
help_text=_("If ran as part of sliced jobs, the total number of slices. " "If 1, job is not part of a sliced job."),
|
||||
help_text=_("If ran as part of sliced jobs, the total number of slices. If 1, job is not part of a sliced job."),
|
||||
)
|
||||
|
||||
def _get_parent_field_name(self):
|
||||
|
||||
@@ -675,4 +675,4 @@ class WebhookMixin(models.Model):
|
||||
if response.status_code < 400:
|
||||
logger.debug("Webhook status update sent.")
|
||||
else:
|
||||
logger.error("Posting webhook status failed, code: {}\n" "{}\n" "Payload sent: {}".format(response.status_code, response.text, json.dumps(data)))
|
||||
logger.error("Posting webhook status failed, code: {}\n" "{}\nPayload sent: {}".format(response.status_code, response.text, json.dumps(data)))
|
||||
|
||||
@@ -74,7 +74,7 @@ class ProjectOptions(models.Model):
|
||||
return []
|
||||
|
||||
local_path = models.CharField(
|
||||
max_length=1024, blank=True, help_text=_('Local path (relative to PROJECTS_ROOT) containing ' 'playbooks and related files for this project.')
|
||||
max_length=1024, blank=True, help_text=_('Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.')
|
||||
)
|
||||
|
||||
scm_type = models.CharField(
|
||||
@@ -276,11 +276,11 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
scm_update_cache_timeout = models.PositiveIntegerField(
|
||||
default=0,
|
||||
blank=True,
|
||||
help_text=_('The number of seconds after the last project update ran that a new ' 'project update will be launched as a job dependency.'),
|
||||
help_text=_('The number of seconds after the last project update ran that a new project update will be launched as a job dependency.'),
|
||||
)
|
||||
allow_override = models.BooleanField(
|
||||
default=False,
|
||||
help_text=_('Allow changing the SCM branch or revision in a job template ' 'that uses this project.'),
|
||||
help_text=_('Allow changing the SCM branch or revision in a job template that uses this project.'),
|
||||
)
|
||||
|
||||
# credential (keys) used to validate content signature
|
||||
|
||||
@@ -141,7 +141,7 @@ class Role(models.Model):
|
||||
app_label = 'main'
|
||||
verbose_name_plural = _('roles')
|
||||
db_table = 'main_rbac_roles'
|
||||
index_together = [("content_type", "object_id")]
|
||||
indexes = [models.Index(fields=["content_type", "object_id"])]
|
||||
ordering = ("content_type", "object_id")
|
||||
|
||||
role_field = models.TextField(null=False)
|
||||
@@ -447,10 +447,10 @@ class RoleAncestorEntry(models.Model):
|
||||
app_label = 'main'
|
||||
verbose_name_plural = _('role_ancestors')
|
||||
db_table = 'main_rbac_role_ancestors'
|
||||
index_together = [
|
||||
("ancestor", "content_type_id", "object_id"), # used by get_roles_on_resource
|
||||
("ancestor", "content_type_id", "role_field"), # used by accessible_objects
|
||||
("ancestor", "descendent"), # used by rebuild_role_ancestor_list in the NOT EXISTS clauses.
|
||||
indexes = [
|
||||
models.Index(fields=["ancestor", "content_type_id", "object_id"]), # used by get_roles_on_resource
|
||||
models.Index(fields=["ancestor", "content_type_id", "role_field"]), # used by accessible_objects
|
||||
models.Index(fields=["ancestor", "descendent"]), # used by rebuild_role_ancestor_list in the NOT EXISTS clauses.
|
||||
]
|
||||
|
||||
descendent = models.ForeignKey(Role, null=False, on_delete=models.CASCADE, related_name='+')
|
||||
|
||||
@@ -82,7 +82,7 @@ class WorkflowNodeBase(CreatedModifiedModel, LaunchTimeConfig):
|
||||
related_name='%(class)ss_always',
|
||||
)
|
||||
all_parents_must_converge = models.BooleanField(
|
||||
default=False, help_text=_("If enabled then the node will only run if all of the parent nodes " "have met the criteria to reach this node")
|
||||
default=False, help_text=_("If enabled then the node will only run if all of the parent nodes have met the criteria to reach this node")
|
||||
)
|
||||
unified_job_template = models.ForeignKey(
|
||||
'UnifiedJobTemplate',
|
||||
@@ -181,7 +181,7 @@ class WorkflowJobTemplateNode(WorkflowNodeBase):
|
||||
max_length=512,
|
||||
default=uuid4,
|
||||
blank=False,
|
||||
help_text=_('An identifier for this node that is unique within its workflow. ' 'It is copied to workflow job nodes corresponding to this node.'),
|
||||
help_text=_('An identifier for this node that is unique within its workflow. It is copied to workflow job nodes corresponding to this node.'),
|
||||
)
|
||||
instance_groups = OrderedManyToManyField(
|
||||
'InstanceGroup',
|
||||
@@ -334,7 +334,7 @@ class WorkflowJobNode(WorkflowNodeBase):
|
||||
accepted_fields, ignored_fields, errors = ujt_obj._accept_or_ignore_job_kwargs(**node_prompts_data)
|
||||
if errors:
|
||||
logger.info(
|
||||
_('Bad launch configuration starting template {template_pk} as part of ' 'workflow {workflow_pk}. Errors:\n{error_text}').format(
|
||||
_('Bad launch configuration starting template {template_pk} as part of workflow {workflow_pk}. Errors:\n{error_text}').format(
|
||||
template_pk=ujt_obj.pk, workflow_pk=self.pk, error_text=errors
|
||||
)
|
||||
)
|
||||
@@ -647,7 +647,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
|
||||
null=True,
|
||||
default=None,
|
||||
on_delete=models.SET_NULL,
|
||||
help_text=_("If automatically created for a sliced job run, the job template " "the workflow job was created from."),
|
||||
help_text=_("If automatically created for a sliced job run, the job template the workflow job was created from."),
|
||||
)
|
||||
is_sliced_job = models.BooleanField(default=False)
|
||||
is_bulk_job = models.BooleanField(default=False)
|
||||
@@ -714,7 +714,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
|
||||
wj = self.get_workflow_job()
|
||||
while wj and wj.workflow_job_template_id:
|
||||
if wj.pk in wj_ids:
|
||||
logger.critical('Cycles detected in the workflow jobs graph, ' 'this is not normal and suggests task manager degeneracy.')
|
||||
logger.critical('Cycles detected in the workflow jobs graph, this is not normal and suggests task manager degeneracy.')
|
||||
break
|
||||
wj_ids.add(wj.pk)
|
||||
ancestors.append(wj.workflow_job_template)
|
||||
|
||||
@@ -8,7 +8,7 @@ class CustomNotificationBase(object):
|
||||
|
||||
DEFAULT_APPROVAL_RUNNING_MSG = 'The approval node "{{ approval_node_name }}" needs review. This node can be viewed at: {{ workflow_url }}'
|
||||
DEFAULT_APPROVAL_RUNNING_BODY = (
|
||||
'The approval node "{{ approval_node_name }}" needs review. ' 'This approval node can be viewed at: {{ workflow_url }}\n\n{{ job_metadata }}'
|
||||
'The approval node "{{ approval_node_name }}" needs review. This approval node can be viewed at: {{ workflow_url }}\n\n{{ job_metadata }}'
|
||||
)
|
||||
|
||||
DEFAULT_APPROVAL_APPROVED_MSG = 'The approval node "{{ approval_node_name }}" was approved. {{ workflow_url }}'
|
||||
|
||||
@@ -32,7 +32,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
"success": {"body": DEFAULT_BODY},
|
||||
"error": {"body": DEFAULT_BODY},
|
||||
"workflow_approval": {
|
||||
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. ' 'This node can be viewed at: {{ workflow_url }}"}'},
|
||||
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
|
||||
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
||||
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
||||
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
|
||||
|
||||
@@ -77,7 +77,7 @@ def test_credential_validation_error_with_multiple_owner_fields(post, admin, ali
|
||||
}
|
||||
response = post(reverse('api:credential_list'), params, admin)
|
||||
assert response.status_code == 400
|
||||
assert response.data['detail'][0] == ("Only one of 'user', 'team', or 'organization' should be provided, " "received organization, team, user fields.")
|
||||
assert response.data['detail'][0] == ("Only one of 'user', 'team', or 'organization' should be provided, received organization, team, user fields.")
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -925,7 +925,7 @@ def test_credential_type_mutability(patch, organization, admin, credentialtype_s
|
||||
|
||||
response = _change_credential_type()
|
||||
assert response.status_code == 400
|
||||
expected = ['You cannot change the credential type of the credential, ' 'as it may break the functionality of the resources using it.']
|
||||
expected = ['You cannot change the credential type of the credential, as it may break the functionality of the resources using it.']
|
||||
assert response.data['credential_type'] == expected
|
||||
|
||||
response = patch(reverse('api:credential_detail', kwargs={'pk': cred.pk}), {'name': 'Worst credential ever'}, admin)
|
||||
@@ -962,7 +962,7 @@ def test_vault_credential_type_mutability(patch, organization, admin, credential
|
||||
|
||||
response = _change_credential_type()
|
||||
assert response.status_code == 400
|
||||
expected = ['You cannot change the credential type of the credential, ' 'as it may break the functionality of the resources using it.']
|
||||
expected = ['You cannot change the credential type of the credential, as it may break the functionality of the resources using it.']
|
||||
assert response.data['credential_type'] == expected
|
||||
|
||||
response = patch(reverse('api:credential_detail', kwargs={'pk': cred.pk}), {'name': 'Worst credential ever'}, admin)
|
||||
@@ -994,7 +994,7 @@ def test_cloud_credential_type_mutability(patch, organization, admin, credential
|
||||
|
||||
response = _change_credential_type()
|
||||
assert response.status_code == 400
|
||||
expected = ['You cannot change the credential type of the credential, ' 'as it may break the functionality of the resources using it.']
|
||||
expected = ['You cannot change the credential type of the credential, as it may break the functionality of the resources using it.']
|
||||
assert response.data['credential_type'] == expected
|
||||
|
||||
response = patch(reverse('api:credential_detail', kwargs={'pk': cred.pk}), {'name': 'Worst credential ever'}, admin)
|
||||
|
||||
@@ -51,6 +51,16 @@ def test_job_relaunch_permission_denied_response(post, get, inventory, project,
|
||||
r = post(reverse('api:job_relaunch', kwargs={'pk': job.pk}), {}, jt_user, expect=201)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_label_sublist(get, admin_user, organization):
|
||||
job = Job.objects.create()
|
||||
label = Label.objects.create(organization=organization, name='Steve')
|
||||
job.labels.add(label)
|
||||
r = get(url=reverse('api:job_label_list', kwargs={'pk': job.pk}), user=admin_user, expect=200)
|
||||
assert r.data['count'] == 1
|
||||
assert r.data['results'].pop()['id'] == label.id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_relaunch_prompts_not_accepted_response(post, get, inventory, project, credential, net_credential, machine_credential):
|
||||
jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project)
|
||||
|
||||
@@ -271,6 +271,7 @@ def test_inventory_update_excessively_long_name(inventory, inventory_source):
|
||||
class TestHostManager:
|
||||
def test_host_filter_not_smart(self, setup_ec2_gce, organization):
|
||||
smart_inventory = Inventory(name='smart', organization=organization, host_filter='inventory_sources__source=ec2')
|
||||
smart_inventory.save()
|
||||
assert len(smart_inventory.hosts.all()) == 0
|
||||
|
||||
def test_host_distinctness(self, setup_inventory_groups, organization):
|
||||
|
||||
@@ -121,7 +121,7 @@ def read_content(private_data_dir, raw_env, inventory_update):
|
||||
break
|
||||
alias = 'file_reference_{}'.format(i)
|
||||
else:
|
||||
raise RuntimeError('Test not able to cope with >10 references by env vars. ' 'Something probably went very wrong.')
|
||||
raise RuntimeError('Test not able to cope with >10 references by env vars. Something probably went very wrong.')
|
||||
file_aliases[abs_file_path] = alias
|
||||
for env_key in inverse_env[runner_path]:
|
||||
env[env_key] = '{{{{ {} }}}}'.format(alias)
|
||||
@@ -234,7 +234,7 @@ def test_inventory_update_injected_content(this_kind, inventory, fake_credential
|
||||
source_dir = os.path.join(base_dir, this_kind) # this_kind is a global
|
||||
|
||||
if not os.path.exists(source_dir):
|
||||
raise FileNotFoundError('Maybe you never made reference files? ' 'MAKE_INVENTORY_REFERENCE_FILES=true py.test ...\noriginal: {}')
|
||||
raise FileNotFoundError('Maybe you never made reference files? MAKE_INVENTORY_REFERENCE_FILES=true py.test ...\noriginal: {}')
|
||||
files_dir = os.path.join(source_dir, 'files')
|
||||
try:
|
||||
expected_file_list = os.listdir(files_dir)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from datetime import datetime
|
||||
from django.utils.timezone import utc
|
||||
from datetime import timezone
|
||||
import pytest
|
||||
|
||||
from awx.main.models import JobEvent, ProjectUpdateEvent, AdHocCommandEvent, InventoryUpdateEvent, SystemJobEvent
|
||||
@@ -18,7 +18,7 @@ from awx.main.models import JobEvent, ProjectUpdateEvent, AdHocCommandEvent, Inv
|
||||
@pytest.mark.parametrize('created', [datetime(2018, 1, 1).isoformat(), datetime(2018, 1, 1)])
|
||||
def test_event_parse_created(job_identifier, cls, created):
|
||||
event = cls.create_from_data(**{job_identifier: 123, 'created': created})
|
||||
assert event.created == datetime(2018, 1, 1).replace(tzinfo=utc)
|
||||
assert event.created == datetime(2018, 1, 1).replace(tzinfo=timezone.utc)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -371,7 +371,7 @@ class TestExtraVarSanitation(TestJobExecution):
|
||||
# are deemed trustable, because they can only be added by users w/ enough
|
||||
# privilege to add/modify a Job Template)
|
||||
|
||||
UNSAFE = '{{ lookup(' 'pipe' ',' 'ls -la' ') }}'
|
||||
UNSAFE = "{{ lookup('pipe', 'ls -la') }}"
|
||||
|
||||
def test_vars_unsafe_by_default(self, job, private_data_dir, mock_me):
|
||||
job.created_by = User(pk=123, username='angry-spud')
|
||||
|
||||
@@ -88,6 +88,6 @@ def test_global_creation_always_possible(all_views):
|
||||
creatable_view = View
|
||||
if not creatable or not global_view:
|
||||
continue
|
||||
assert 'POST' in global_view().allowed_methods, 'Resource {} should be creatable in global list view {}. ' 'Can be created now in {}'.format(
|
||||
assert 'POST' in global_view().allowed_methods, 'Resource {} should be creatable in global list view {}. Can be created now in {}'.format(
|
||||
model, global_view, creatable_view
|
||||
)
|
||||
|
||||
@@ -93,7 +93,7 @@ class TestSmartFilterQueryFromString:
|
||||
@pytest.mark.parametrize(
|
||||
"filter_string",
|
||||
[
|
||||
'ansible_facts__facts__facts__blank=' 'ansible_facts__a__b__c__ space =ggg',
|
||||
'ansible_facts__facts__facts__blank=ansible_facts__a__b__c__ space =ggg',
|
||||
],
|
||||
)
|
||||
def test_invalid_filter_strings(self, mock_get_host_model, filter_string):
|
||||
@@ -104,7 +104,7 @@ class TestSmartFilterQueryFromString:
|
||||
@pytest.mark.parametrize(
|
||||
"filter_string",
|
||||
[
|
||||
'created_by__password__icontains=pbkdf2' 'search=foo or created_by__password__icontains=pbkdf2',
|
||||
'created_by__password__icontains=pbkdf2search=foo or created_by__password__icontains=pbkdf2',
|
||||
'created_by__password__icontains=pbkdf2 or search=foo',
|
||||
],
|
||||
)
|
||||
|
||||
@@ -716,7 +716,7 @@ def parse_yaml_or_json(vars_str, silent_failure=True):
|
||||
if silent_failure:
|
||||
return {}
|
||||
raise ParseError(
|
||||
_('Cannot parse as JSON (error: {json_error}) or ' 'YAML (error: {yaml_error}).').format(json_error=str(json_err), yaml_error=str(yaml_err))
|
||||
_('Cannot parse as JSON (error: {json_error}) or YAML (error: {yaml_error}).').format(json_error=str(json_err), yaml_error=str(yaml_err))
|
||||
)
|
||||
return vars_dict
|
||||
|
||||
|
||||
@@ -253,7 +253,7 @@ def dict_to_mem_data(data, inventory=None):
|
||||
if isinstance(hv, dict):
|
||||
host.variables.update(hv)
|
||||
else:
|
||||
logger.warning('Expected dict of vars for ' 'host "%s", got %s instead', hk, str(type(hv)))
|
||||
logger.warning('Expected dict of vars for host "%s", got %s instead', hk, str(type(hv)))
|
||||
group.add_host(host)
|
||||
elif isinstance(hosts, (list, tuple)):
|
||||
for hk in hosts:
|
||||
@@ -262,13 +262,13 @@ def dict_to_mem_data(data, inventory=None):
|
||||
continue
|
||||
group.add_host(host)
|
||||
else:
|
||||
logger.warning('Expected dict or list of "hosts" for ' 'group "%s", got %s instead', k, str(type(hosts)))
|
||||
logger.warning('Expected dict or list of "hosts" for group "%s", got %s instead', k, str(type(hosts)))
|
||||
# Process group variables.
|
||||
vars = v.get('vars', {})
|
||||
if isinstance(vars, dict):
|
||||
group.variables.update(vars)
|
||||
else:
|
||||
logger.warning('Expected dict of vars for ' 'group "%s", got %s instead', k, str(type(vars)))
|
||||
logger.warning('Expected dict of vars for group "%s", got %s instead', k, str(type(vars)))
|
||||
# Process child groups.
|
||||
children = v.get('children', [])
|
||||
if isinstance(children, (list, tuple)):
|
||||
@@ -277,7 +277,7 @@ def dict_to_mem_data(data, inventory=None):
|
||||
if child and c != 'ungrouped':
|
||||
group.add_child_group(child)
|
||||
else:
|
||||
logger.warning('Expected list of children for ' 'group "%s", got %s instead', k, str(type(children)))
|
||||
logger.warning('Expected list of children for group "%s", got %s instead', k, str(type(children)))
|
||||
|
||||
# Load host names from a list.
|
||||
elif isinstance(v, (list, tuple)):
|
||||
@@ -288,7 +288,7 @@ def dict_to_mem_data(data, inventory=None):
|
||||
group.add_host(host)
|
||||
else:
|
||||
logger.warning('')
|
||||
logger.warning('Expected dict or list for group "%s", ' 'got %s instead', k, str(type(v)))
|
||||
logger.warning('Expected dict or list for group "%s", got %s instead', k, str(type(v)))
|
||||
|
||||
if k not in ['all', 'ungrouped']:
|
||||
inventory.all_group.add_child_group(group)
|
||||
@@ -299,6 +299,6 @@ def dict_to_mem_data(data, inventory=None):
|
||||
if isinstance(meta_hostvars, dict):
|
||||
v.variables.update(meta_hostvars)
|
||||
else:
|
||||
logger.warning('Expected dict of vars for ' 'host "%s", got %s instead', k, str(type(meta_hostvars)))
|
||||
logger.warning('Expected dict of vars for host "%s", got %s instead', k, str(type(meta_hostvars)))
|
||||
|
||||
return inventory
|
||||
|
||||
@@ -5,13 +5,14 @@ from typing import Dict
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import client_exceptions
|
||||
import aioredis
|
||||
|
||||
from channels.layers import get_channel_layer
|
||||
|
||||
from django.conf import settings
|
||||
from django.apps import apps
|
||||
|
||||
import psycopg
|
||||
import asyncpg
|
||||
|
||||
from awx.main.analytics.broadcast_websocket import (
|
||||
RelayWebsocketStats,
|
||||
@@ -180,6 +181,9 @@ class WebsocketRelayConnection:
|
||||
return
|
||||
|
||||
continue
|
||||
except aioredis.errors.ConnectionClosedError:
|
||||
logger.info(f"Producer {name} lost connection to Redis, shutting down.")
|
||||
return
|
||||
|
||||
await websocket.send_json(wrap_broadcast_msg(group, msg))
|
||||
except ConnectionResetError:
|
||||
@@ -205,64 +209,92 @@ class WebSocketRelayManager(object):
|
||||
# hostname -> ip
|
||||
self.known_hosts: Dict[str, str] = dict()
|
||||
|
||||
async def pg_consumer(self, conn):
|
||||
async def on_heartbeet(self, conn, pid, channel, payload):
|
||||
try:
|
||||
await conn.execute("LISTEN web_heartbeet")
|
||||
async for notif in conn.notifies():
|
||||
if notif is not None and notif.channel == "web_heartbeet":
|
||||
try:
|
||||
payload = json.loads(notif.payload)
|
||||
except json.JSONDecodeError:
|
||||
logmsg = "Failed to decode message from pg_notify channel `web_heartbeet`"
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logmsg = "{} {}".format(logmsg, payload)
|
||||
logger.warning(logmsg)
|
||||
continue
|
||||
if not payload or channel != "web_heartbeet":
|
||||
return
|
||||
|
||||
# Skip if the message comes from the same host we are running on
|
||||
# In this case, we'll be sharing a redis, no need to relay.
|
||||
if payload.get("hostname") == self.local_hostname:
|
||||
continue
|
||||
try:
|
||||
payload = json.loads(payload)
|
||||
except json.JSONDecodeError:
|
||||
logmsg = "Failed to decode message from pg_notify channel `web_heartbeet`"
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logmsg = "{} {}".format(logmsg, payload)
|
||||
logger.warning(logmsg)
|
||||
return
|
||||
|
||||
if payload.get("action") == "online":
|
||||
hostname = payload["hostname"]
|
||||
ip = payload["ip"]
|
||||
if ip is None:
|
||||
# If we don't get an IP, just try the hostname, maybe it resolves
|
||||
ip = hostname
|
||||
self.known_hosts[hostname] = ip
|
||||
logger.debug(f"Web host {hostname} ({ip}) online heartbeat received.")
|
||||
elif payload.get("action") == "offline":
|
||||
hostname = payload["hostname"]
|
||||
del self.known_hosts[hostname]
|
||||
logger.debug(f"Web host {hostname} ({ip}) offline heartbeat received.")
|
||||
# Skip if the message comes from the same host we are running on
|
||||
# In this case, we'll be sharing a redis, no need to relay.
|
||||
if payload.get("hostname") == self.local_hostname:
|
||||
return
|
||||
|
||||
if payload.get("action") == "online":
|
||||
hostname = payload.get("hostname")
|
||||
ip = payload.get("ip")
|
||||
if ip is None:
|
||||
# If we don't get an IP, just try the hostname, maybe it resolves
|
||||
ip = hostname
|
||||
if ip is None:
|
||||
logger.warning(f"Received invalid online heartbeet, missing hostname and ip: {payload}")
|
||||
return
|
||||
self.known_hosts[hostname] = ip
|
||||
logger.debug(f"Web host {hostname} ({ip}) online heartbeat received.")
|
||||
elif payload.get("action") == "offline":
|
||||
hostname = payload.get("hostname")
|
||||
ip = payload.get("ip")
|
||||
if ip is None:
|
||||
# If we don't get an IP, just try the hostname, maybe it resolves
|
||||
ip = hostname
|
||||
if ip is None:
|
||||
logger.warning(f"Received invalid offline heartbeet, missing hostname and ip: {payload}")
|
||||
return
|
||||
self.cleanup_offline_host(ip)
|
||||
logger.debug(f"Web host {hostname} ({ip}) offline heartbeat received.")
|
||||
except Exception as e:
|
||||
# This catch-all is the same as the one above. asyncio will eat the exception
|
||||
# but we want to know about it.
|
||||
logger.exception(f"pg_consumer exception: {e}")
|
||||
logger.exception(f"on_heartbeet exception: {e}")
|
||||
|
||||
def cleanup_offline_host(self, hostname):
|
||||
"""
|
||||
Given a hostname, try to cancel its task/connection and remove it from
|
||||
the list of hosts we know about.
|
||||
If the host isn't in the list, assume that it was already deleted and
|
||||
don't error.
|
||||
"""
|
||||
if hostname in self.relay_connections:
|
||||
self.relay_connections[hostname].cancel()
|
||||
del self.relay_connections[hostname]
|
||||
|
||||
if hostname in self.known_hosts:
|
||||
del self.known_hosts[hostname]
|
||||
|
||||
try:
|
||||
self.stats_mgr.delete_remote_host_stats(hostname)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
async def run(self):
|
||||
event_loop = asyncio.get_running_loop()
|
||||
|
||||
stats_mgr = RelayWebsocketStatsManager(event_loop, self.local_hostname)
|
||||
stats_mgr.start()
|
||||
self.stats_mgr = RelayWebsocketStatsManager(event_loop, self.local_hostname)
|
||||
self.stats_mgr.start()
|
||||
|
||||
# Set up a pg_notify consumer for allowing web nodes to "provision" and "deprovision" themselves gracefully.
|
||||
database_conf = settings.DATABASES['default']
|
||||
async_conn = await psycopg.AsyncConnection.connect(
|
||||
dbname=database_conf['NAME'],
|
||||
async_conn = await asyncpg.connect(
|
||||
database=database_conf['NAME'],
|
||||
host=database_conf['HOST'],
|
||||
user=database_conf['USER'],
|
||||
password=database_conf['PASSWORD'],
|
||||
port=database_conf['PORT'],
|
||||
**database_conf.get("OPTIONS", {}),
|
||||
# We cannot include these because asyncpg doesn't allow all the options that psycopg does.
|
||||
# **database_conf.get("OPTIONS", {}),
|
||||
)
|
||||
await async_conn.set_autocommit(True)
|
||||
event_loop.create_task(self.pg_consumer(async_conn))
|
||||
await async_conn.add_listener("web_heartbeet", self.on_heartbeet)
|
||||
|
||||
# Establishes a websocket connection to /websocket/relay on all API servers
|
||||
while True:
|
||||
# logger.info("Current known hosts: {}".format(self.known_hosts))
|
||||
future_remote_hosts = self.known_hosts.keys()
|
||||
current_remote_hosts = self.relay_connections.keys()
|
||||
deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts)
|
||||
@@ -291,13 +323,10 @@ class WebSocketRelayManager(object):
|
||||
logger.info(f"Adding {new_remote_hosts} to websocket broadcast list")
|
||||
|
||||
for h in deleted_remote_hosts:
|
||||
self.relay_connections[h].cancel()
|
||||
del self.relay_connections[h]
|
||||
del self.known_hosts[h]
|
||||
stats_mgr.delete_remote_host_stats(h)
|
||||
self.cleanup_offline_host(h)
|
||||
|
||||
for h in new_remote_hosts:
|
||||
stats = stats_mgr.new_remote_host_stats(h)
|
||||
stats = self.stats_mgr.new_remote_host_stats(h)
|
||||
relay_connection = WebsocketRelayConnection(name=self.local_hostname, stats=stats, remote_host=self.known_hosts[h])
|
||||
relay_connection.start()
|
||||
self.relay_connections[h] = relay_connection
|
||||
|
||||
@@ -58,7 +58,7 @@ class ActionModule(ActionBase):
|
||||
|
||||
if res.status_code != 200:
|
||||
result['failed'] = True
|
||||
result['msg'] = 'Expected {} to return a status code of 200 but returned status ' 'code "{}" instead with content "{}".'.format(
|
||||
result['msg'] = 'Expected {} to return a status code of 200 but returned status code "{}" instead with content "{}".'.format(
|
||||
url, res.status_code, res.content
|
||||
)
|
||||
return result
|
||||
@@ -87,7 +87,7 @@ class ActionModule(ActionBase):
|
||||
continue
|
||||
elif res.status_code != 200:
|
||||
result['failed'] = True
|
||||
result['msg'] = 'Expected {} to return a status code of 200 but returned status ' 'code "{}" instead with content "{}".'.format(
|
||||
result['msg'] = 'Expected {} to return a status code of 200 but returned status code "{}" instead with content "{}".'.format(
|
||||
playbook_url, res.status_code, res.content
|
||||
)
|
||||
return result
|
||||
|
||||
@@ -395,6 +395,7 @@ AUTHENTICATION_BACKENDS = (
|
||||
OAUTH2_PROVIDER_APPLICATION_MODEL = 'main.OAuth2Application'
|
||||
OAUTH2_PROVIDER_ACCESS_TOKEN_MODEL = 'main.OAuth2AccessToken'
|
||||
OAUTH2_PROVIDER_REFRESH_TOKEN_MODEL = 'oauth2_provider.RefreshToken'
|
||||
OAUTH2_PROVIDER_ID_TOKEN_MODEL = "oauth2_provider.IDToken"
|
||||
|
||||
OAUTH2_PROVIDER = {'ACCESS_TOKEN_EXPIRE_SECONDS': 31536000000, 'AUTHORIZATION_CODE_EXPIRE_SECONDS': 600, 'REFRESH_TOKEN_EXPIRE_SECONDS': 2628000}
|
||||
ALLOW_OAUTH2_FOR_EXTERNAL_USERS = False
|
||||
|
||||
@@ -269,7 +269,7 @@ class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider):
|
||||
value = value[0]
|
||||
if conf_key in ('attr_first_name', 'attr_last_name', 'attr_username', 'attr_email') and value is None:
|
||||
logger.warning(
|
||||
"Could not map user detail '%s' from SAML attribute '%s'; " "update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.",
|
||||
"Could not map user detail '%s' from SAML attribute '%s'; update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.",
|
||||
conf_key[5:],
|
||||
key,
|
||||
self.name,
|
||||
|
||||
@@ -100,7 +100,7 @@ register(
|
||||
'AUTHENTICATION_BACKENDS',
|
||||
field_class=AuthenticationBackendsField,
|
||||
label=_('Authentication Backends'),
|
||||
help_text=_('List of authentication backends that are enabled based on ' 'license features and other authentication settings.'),
|
||||
help_text=_('List of authentication backends that are enabled based on license features and other authentication settings.'),
|
||||
read_only=True,
|
||||
depends_on=AuthenticationBackendsField.get_all_required_settings(),
|
||||
category=_('Authentication'),
|
||||
@@ -360,7 +360,7 @@ def _register_ldap(append=None):
|
||||
default=None,
|
||||
label=_('LDAP Deny Group'),
|
||||
help_text=_(
|
||||
'Group DN denied from login. If specified, user will not be ' 'allowed to login if a member of this group. Only one deny group ' 'is supported.'
|
||||
'Group DN denied from login. If specified, user will not be allowed to login if a member of this group. Only one deny group is supported.'
|
||||
),
|
||||
category=_('LDAP'),
|
||||
category_slug='ldap',
|
||||
@@ -426,7 +426,7 @@ def _register_ldap(append=None):
|
||||
field_class=LDAPTeamMapField,
|
||||
default={},
|
||||
label=_('LDAP Team Map'),
|
||||
help_text=_('Mapping between team members (users) and LDAP groups. Configuration' ' details are available in the documentation.'),
|
||||
help_text=_('Mapping between team members (users) and LDAP groups. Configuration details are available in the documentation.'),
|
||||
category=_('LDAP'),
|
||||
category_slug='ldap',
|
||||
placeholder=collections.OrderedDict(
|
||||
@@ -461,7 +461,7 @@ register(
|
||||
allow_blank=True,
|
||||
default='',
|
||||
label=_('RADIUS Server'),
|
||||
help_text=_('Hostname/IP of RADIUS server. RADIUS authentication is ' 'disabled if this setting is empty.'),
|
||||
help_text=_('Hostname/IP of RADIUS server. RADIUS authentication is disabled if this setting is empty.'),
|
||||
category=_('RADIUS'),
|
||||
category_slug='radius',
|
||||
placeholder='radius.example.com',
|
||||
@@ -564,9 +564,7 @@ register(
|
||||
read_only=True,
|
||||
default=SocialAuthCallbackURL('google-oauth2'),
|
||||
label=_('Google OAuth2 Callback URL'),
|
||||
help_text=_(
|
||||
'Provide this URL as the callback URL for your application as part ' 'of your registration process. Refer to the ' 'documentation for more detail.'
|
||||
),
|
||||
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail.'),
|
||||
category=_('Google OAuth2'),
|
||||
category_slug='google-oauth2',
|
||||
depends_on=['TOWER_URL_BASE'],
|
||||
@@ -602,7 +600,7 @@ register(
|
||||
field_class=fields.StringListField,
|
||||
default=[],
|
||||
label=_('Google OAuth2 Allowed Domains'),
|
||||
help_text=_('Update this setting to restrict the domains who are allowed to ' 'login using Google OAuth2.'),
|
||||
help_text=_('Update this setting to restrict the domains who are allowed to login using Google OAuth2.'),
|
||||
category=_('Google OAuth2'),
|
||||
category_slug='google-oauth2',
|
||||
placeholder=['example.com'],
|
||||
@@ -658,9 +656,7 @@ register(
|
||||
read_only=True,
|
||||
default=SocialAuthCallbackURL('github'),
|
||||
label=_('GitHub OAuth2 Callback URL'),
|
||||
help_text=_(
|
||||
'Provide this URL as the callback URL for your application as part ' 'of your registration process. Refer to the ' 'documentation for more detail.'
|
||||
),
|
||||
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail.'),
|
||||
category=_('GitHub OAuth2'),
|
||||
category_slug='github',
|
||||
depends_on=['TOWER_URL_BASE'],
|
||||
@@ -723,9 +719,7 @@ register(
|
||||
read_only=True,
|
||||
default=SocialAuthCallbackURL('github-org'),
|
||||
label=_('GitHub Organization OAuth2 Callback URL'),
|
||||
help_text=_(
|
||||
'Provide this URL as the callback URL for your application as part ' 'of your registration process. Refer to the ' 'documentation for more detail.'
|
||||
),
|
||||
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail.'),
|
||||
category=_('GitHub Organization OAuth2'),
|
||||
category_slug='github-org',
|
||||
depends_on=['TOWER_URL_BASE'],
|
||||
@@ -760,7 +754,7 @@ register(
|
||||
allow_blank=True,
|
||||
default='',
|
||||
label=_('GitHub Organization Name'),
|
||||
help_text=_('The name of your GitHub organization, as used in your ' 'organization\'s URL: https://github.com/<yourorg>/.'),
|
||||
help_text=_('The name of your GitHub organization, as used in your organization\'s URL: https://github.com/<yourorg>/.'),
|
||||
category=_('GitHub Organization OAuth2'),
|
||||
category_slug='github-org',
|
||||
)
|
||||
@@ -839,7 +833,7 @@ register(
|
||||
allow_blank=True,
|
||||
default='',
|
||||
label=_('GitHub Team ID'),
|
||||
help_text=_('Find the numeric team ID using the Github API: ' 'http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
|
||||
help_text=_('Find the numeric team ID using the Github API: http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
|
||||
category=_('GitHub Team OAuth2'),
|
||||
category_slug='github-team',
|
||||
)
|
||||
@@ -878,9 +872,7 @@ register(
|
||||
read_only=True,
|
||||
default=SocialAuthCallbackURL('github-enterprise'),
|
||||
label=_('GitHub Enterprise OAuth2 Callback URL'),
|
||||
help_text=_(
|
||||
'Provide this URL as the callback URL for your application as part ' 'of your registration process. Refer to the ' 'documentation for more detail.'
|
||||
),
|
||||
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail.'),
|
||||
category=_('GitHub Enterprise OAuth2'),
|
||||
category_slug='github-enterprise',
|
||||
depends_on=['TOWER_URL_BASE'],
|
||||
@@ -892,7 +884,7 @@ register(
|
||||
allow_blank=True,
|
||||
default='',
|
||||
label=_('GitHub Enterprise URL'),
|
||||
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise ' 'documentation for more details.'),
|
||||
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise documentation for more details.'),
|
||||
category=_('GitHub Enterprise OAuth2'),
|
||||
category_slug='github-enterprise',
|
||||
)
|
||||
@@ -904,7 +896,7 @@ register(
|
||||
default='',
|
||||
label=_('GitHub Enterprise API URL'),
|
||||
help_text=_(
|
||||
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github ' 'Enterprise documentation for more details.'
|
||||
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github Enterprise documentation for more details.'
|
||||
),
|
||||
category=_('GitHub Enterprise OAuth2'),
|
||||
category_slug='github-enterprise',
|
||||
@@ -967,9 +959,7 @@ register(
|
||||
read_only=True,
|
||||
default=SocialAuthCallbackURL('github-enterprise-org'),
|
||||
label=_('GitHub Enterprise Organization OAuth2 Callback URL'),
|
||||
help_text=_(
|
||||
'Provide this URL as the callback URL for your application as part ' 'of your registration process. Refer to the ' 'documentation for more detail.'
|
||||
),
|
||||
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail.'),
|
||||
category=_('GitHub Enterprise Organization OAuth2'),
|
||||
category_slug='github-enterprise-org',
|
||||
depends_on=['TOWER_URL_BASE'],
|
||||
@@ -981,7 +971,7 @@ register(
|
||||
allow_blank=True,
|
||||
default='',
|
||||
label=_('GitHub Enterprise Organization URL'),
|
||||
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise ' 'documentation for more details.'),
|
||||
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise documentation for more details.'),
|
||||
category=_('GitHub Enterprise OAuth2'),
|
||||
category_slug='github-enterprise-org',
|
||||
)
|
||||
@@ -993,7 +983,7 @@ register(
|
||||
default='',
|
||||
label=_('GitHub Enterprise Organization API URL'),
|
||||
help_text=_(
|
||||
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github ' 'Enterprise documentation for more details.'
|
||||
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github Enterprise documentation for more details.'
|
||||
),
|
||||
category=_('GitHub Enterprise OAuth2'),
|
||||
category_slug='github-enterprise-org',
|
||||
@@ -1028,7 +1018,7 @@ register(
|
||||
allow_blank=True,
|
||||
default='',
|
||||
label=_('GitHub Enterprise Organization Name'),
|
||||
help_text=_('The name of your GitHub Enterprise organization, as used in your ' 'organization\'s URL: https://github.com/<yourorg>/.'),
|
||||
help_text=_('The name of your GitHub Enterprise organization, as used in your organization\'s URL: https://github.com/<yourorg>/.'),
|
||||
category=_('GitHub Enterprise Organization OAuth2'),
|
||||
category_slug='github-enterprise-org',
|
||||
)
|
||||
@@ -1084,7 +1074,7 @@ register(
|
||||
allow_blank=True,
|
||||
default='',
|
||||
label=_('GitHub Enterprise Team URL'),
|
||||
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise ' 'documentation for more details.'),
|
||||
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise documentation for more details.'),
|
||||
category=_('GitHub Enterprise OAuth2'),
|
||||
category_slug='github-enterprise-team',
|
||||
)
|
||||
@@ -1096,7 +1086,7 @@ register(
|
||||
default='',
|
||||
label=_('GitHub Enterprise Team API URL'),
|
||||
help_text=_(
|
||||
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github ' 'Enterprise documentation for more details.'
|
||||
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github Enterprise documentation for more details.'
|
||||
),
|
||||
category=_('GitHub Enterprise OAuth2'),
|
||||
category_slug='github-enterprise-team',
|
||||
@@ -1131,7 +1121,7 @@ register(
|
||||
allow_blank=True,
|
||||
default='',
|
||||
label=_('GitHub Enterprise Team ID'),
|
||||
help_text=_('Find the numeric team ID using the Github Enterprise API: ' 'http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
|
||||
help_text=_('Find the numeric team ID using the Github Enterprise API: http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
|
||||
category=_('GitHub Enterprise Team OAuth2'),
|
||||
category_slug='github-enterprise-team',
|
||||
)
|
||||
@@ -1170,9 +1160,7 @@ register(
|
||||
read_only=True,
|
||||
default=SocialAuthCallbackURL('azuread-oauth2'),
|
||||
label=_('Azure AD OAuth2 Callback URL'),
|
||||
help_text=_(
|
||||
'Provide this URL as the callback URL for your application as part' ' of your registration process. Refer to the' ' documentation for more detail. '
|
||||
),
|
||||
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail. '),
|
||||
category=_('Azure AD OAuth2'),
|
||||
category_slug='azuread-oauth2',
|
||||
depends_on=['TOWER_URL_BASE'],
|
||||
@@ -1291,7 +1279,7 @@ register(
|
||||
field_class=fields.BooleanField,
|
||||
default=True,
|
||||
label=_('Automatically Create Organizations and Teams on SAML Login'),
|
||||
help_text=_('When enabled (the default), mapped Organizations and Teams ' 'will be created automatically on successful SAML login.'),
|
||||
help_text=_('When enabled (the default), mapped Organizations and Teams will be created automatically on successful SAML login.'),
|
||||
category=_('SAML'),
|
||||
category_slug='saml',
|
||||
)
|
||||
@@ -1318,7 +1306,7 @@ register(
|
||||
read_only=True,
|
||||
default=get_saml_metadata_url,
|
||||
label=_('SAML Service Provider Metadata URL'),
|
||||
help_text=_('If your identity provider (IdP) allows uploading an XML ' 'metadata file, you can download one from this URL.'),
|
||||
help_text=_('If your identity provider (IdP) allows uploading an XML metadata file, you can download one from this URL.'),
|
||||
category=_('SAML'),
|
||||
category_slug='saml',
|
||||
)
|
||||
@@ -1346,7 +1334,7 @@ register(
|
||||
required=True,
|
||||
validators=[validate_certificate],
|
||||
label=_('SAML Service Provider Public Certificate'),
|
||||
help_text=_('Create a keypair to use as a service provider (SP) ' 'and include the certificate content here.'),
|
||||
help_text=_('Create a keypair to use as a service provider (SP) and include the certificate content here.'),
|
||||
category=_('SAML'),
|
||||
category_slug='saml',
|
||||
)
|
||||
@@ -1358,7 +1346,7 @@ register(
|
||||
required=True,
|
||||
validators=[validate_private_key],
|
||||
label=_('SAML Service Provider Private Key'),
|
||||
help_text=_('Create a keypair to use as a service provider (SP) ' 'and include the private key content here.'),
|
||||
help_text=_('Create a keypair to use as a service provider (SP) and include the private key content here.'),
|
||||
category=_('SAML'),
|
||||
category_slug='saml',
|
||||
encrypted=True,
|
||||
@@ -1369,7 +1357,7 @@ register(
|
||||
field_class=SAMLOrgInfoField,
|
||||
required=True,
|
||||
label=_('SAML Service Provider Organization Info'),
|
||||
help_text=_('Provide the URL, display name, and the name of your app. Refer to' ' the documentation for example syntax.'),
|
||||
help_text=_('Provide the URL, display name, and the name of your app. Refer to the documentation for example syntax.'),
|
||||
category=_('SAML'),
|
||||
category_slug='saml',
|
||||
placeholder=collections.OrderedDict(
|
||||
@@ -1383,7 +1371,7 @@ register(
|
||||
allow_blank=True,
|
||||
required=True,
|
||||
label=_('SAML Service Provider Technical Contact'),
|
||||
help_text=_('Provide the name and email address of the technical contact for' ' your service provider. Refer to the documentation' ' for example syntax.'),
|
||||
help_text=_('Provide the name and email address of the technical contact for your service provider. Refer to the documentation for example syntax.'),
|
||||
category=_('SAML'),
|
||||
category_slug='saml',
|
||||
placeholder=collections.OrderedDict([('givenName', 'Technical Contact'), ('emailAddress', 'techsup@example.com')]),
|
||||
@@ -1395,7 +1383,7 @@ register(
|
||||
allow_blank=True,
|
||||
required=True,
|
||||
label=_('SAML Service Provider Support Contact'),
|
||||
help_text=_('Provide the name and email address of the support contact for your' ' service provider. Refer to the documentation for' ' example syntax.'),
|
||||
help_text=_('Provide the name and email address of the support contact for your service provider. Refer to the documentation for example syntax.'),
|
||||
category=_('SAML'),
|
||||
category_slug='saml',
|
||||
placeholder=collections.OrderedDict([('givenName', 'Support Contact'), ('emailAddress', 'support@example.com')]),
|
||||
@@ -1457,9 +1445,7 @@ register(
|
||||
allow_null=True,
|
||||
default={'requestedAuthnContext': False},
|
||||
label=_('SAML Security Config'),
|
||||
help_text=_(
|
||||
'A dict of key value pairs that are passed to the underlying' ' python-saml security setting' ' https://github.com/onelogin/python-saml#settings'
|
||||
),
|
||||
help_text=_('A dict of key value pairs that are passed to the underlying python-saml security setting https://github.com/onelogin/python-saml#settings'),
|
||||
category=_('SAML'),
|
||||
category_slug='saml',
|
||||
placeholder=collections.OrderedDict(
|
||||
@@ -1491,7 +1477,7 @@ register(
|
||||
allow_null=True,
|
||||
default=None,
|
||||
label=_('SAML Service Provider extra configuration data'),
|
||||
help_text=_('A dict of key value pairs to be passed to the underlying' ' python-saml Service Provider configuration setting.'),
|
||||
help_text=_('A dict of key value pairs to be passed to the underlying python-saml Service Provider configuration setting.'),
|
||||
category=_('SAML'),
|
||||
category_slug='saml',
|
||||
placeholder=collections.OrderedDict(),
|
||||
|
||||
@@ -390,7 +390,7 @@ class LDAPSearchUnionField(fields.ListField):
|
||||
search_args = []
|
||||
for i in range(len(data)):
|
||||
if not isinstance(data[i], list):
|
||||
raise ValidationError('In order to ultilize LDAP Union, input element No. %d' ' should be a search query array.' % (i + 1))
|
||||
raise ValidationError('In order to ultilize LDAP Union, input element No. %d should be a search query array.' % (i + 1))
|
||||
try:
|
||||
search_args.append(self.ldap_search_field_class().run_validation(data[i]))
|
||||
except Exception as e:
|
||||
|
||||
@@ -56,7 +56,7 @@ register(
|
||||
field_class=fields.IntegerField,
|
||||
min_value=100,
|
||||
label=_('Max Job Events Retrieved by UI'),
|
||||
help_text=_('Maximum number of job events for the UI to retrieve within a ' 'single request.'),
|
||||
help_text=_('Maximum number of job events for the UI to retrieve within a single request.'),
|
||||
category=_('UI'),
|
||||
category_slug='ui',
|
||||
)
|
||||
@@ -65,7 +65,7 @@ register(
|
||||
'UI_LIVE_UPDATES_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Enable Live Updates in the UI'),
|
||||
help_text=_('If disabled, the page will not refresh when events are received. ' 'Reloading the page will be required to get the latest details.'),
|
||||
help_text=_('If disabled, the page will not refresh when events are received. Reloading the page will be required to get the latest details.'),
|
||||
category=_('UI'),
|
||||
category_slug='ui',
|
||||
)
|
||||
|
||||
@@ -264,7 +264,6 @@ options:
|
||||
description:
|
||||
- Maximum time in seconds to wait for a job to finish (server-side).
|
||||
type: int
|
||||
default: 0
|
||||
job_slice_count:
|
||||
description:
|
||||
- The number of jobs to slice into at runtime. Will cause the Job Template to launch a workflow if value is greater than 1.
|
||||
|
||||
@@ -9,168 +9,156 @@
|
||||
jt_name: "AWX-Collection-tests-job_wait-long_running-{{ test_id }}"
|
||||
proj_name: "AWX-Collection-tests-job_wait-long_running-{{ test_id }}"
|
||||
|
||||
- name: Assure that the demo project exists
|
||||
project:
|
||||
name: "{{ proj_name }}"
|
||||
scm_type: 'git'
|
||||
scm_url: 'https://github.com/ansible/test-playbooks.git'
|
||||
scm_update_on_launch: true
|
||||
organization: Default
|
||||
- block:
|
||||
- name: Create a project
|
||||
project:
|
||||
name: "{{ proj_name }}"
|
||||
scm_type: 'git'
|
||||
scm_url: 'https://github.com/ansible/test-playbooks.git'
|
||||
scm_update_on_launch: true
|
||||
organization: Default
|
||||
|
||||
- name: Create a job template
|
||||
job_template:
|
||||
name: "{{ jt_name }}"
|
||||
playbook: "sleep.yml"
|
||||
job_type: run
|
||||
project: "{{ proj_name }}"
|
||||
inventory: "Demo Inventory"
|
||||
extra_vars:
|
||||
sleep_interval: 300
|
||||
- name: Create a job template
|
||||
job_template:
|
||||
name: "{{ jt_name }}"
|
||||
playbook: "sleep.yml"
|
||||
job_type: run
|
||||
project: "{{ proj_name }}"
|
||||
inventory: "Demo Inventory"
|
||||
extra_vars:
|
||||
sleep_interval: 600
|
||||
|
||||
- name: Validate that interval superceeds min/max
|
||||
job_wait:
|
||||
min_interval: 10
|
||||
max_interval: 20
|
||||
interval: 12
|
||||
job_id: "99999999"
|
||||
register: result
|
||||
ignore_errors: true
|
||||
- name: Check module fails with correct msg
|
||||
job_wait:
|
||||
job_id: "99999999"
|
||||
register: result
|
||||
ignore_errors: true
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result.msg =='Unable to wait on job 99999999; that ID does not exist.' or
|
||||
'min and max interval have been depricated, please use interval instead, interval will be set to 12'"
|
||||
- assert:
|
||||
that:
|
||||
- result is failed
|
||||
- "result.msg =='Unable to wait, no job_id 99999999 found: The requested object could not be found.' or
|
||||
'Unable to wait on job 99999999; that ID does not exist.'"
|
||||
|
||||
- name: Check module fails with correct msg
|
||||
job_wait:
|
||||
job_id: "99999999"
|
||||
register: result
|
||||
ignore_errors: true
|
||||
- name: Launch Demo Job Template (take happy path)
|
||||
job_launch:
|
||||
job_template: "Demo Job Template"
|
||||
register: job
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- result is failed
|
||||
- "result.msg =='Unable to wait, no job_id 99999999 found: The requested object could not be found.' or
|
||||
'Unable to wait on job 99999999; that ID does not exist.'"
|
||||
- assert:
|
||||
that:
|
||||
- job is changed
|
||||
|
||||
- name: Launch Demo Job Template (take happy path)
|
||||
job_launch:
|
||||
job_template: "Demo Job Template"
|
||||
register: job
|
||||
- name: Wait for the Job to finish
|
||||
job_wait:
|
||||
job_id: "{{ job.id }}"
|
||||
register: wait_results
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- job is changed
|
||||
# Make sure it worked and that we have some data in our results
|
||||
- assert:
|
||||
that:
|
||||
- wait_results is successful
|
||||
- "'elapsed' in wait_results"
|
||||
- "'id' in wait_results"
|
||||
|
||||
- name: Wait for the Job to finish
|
||||
job_wait:
|
||||
job_id: "{{ job.id }}"
|
||||
register: wait_results
|
||||
- name: Launch a long running job
|
||||
job_launch:
|
||||
job_template: "{{ jt_name }}"
|
||||
register: job
|
||||
|
||||
# Make sure it worked and that we have some data in our results
|
||||
- assert:
|
||||
that:
|
||||
- wait_results is successful
|
||||
- "'elapsed' in wait_results"
|
||||
- "'id' in wait_results"
|
||||
- assert:
|
||||
that:
|
||||
- job is changed
|
||||
|
||||
- name: Launch a long running job
|
||||
job_launch:
|
||||
job_template: "{{ jt_name }}"
|
||||
register: job
|
||||
- name: Timeout waiting for the job to complete
|
||||
job_wait:
|
||||
job_id: "{{ job.id }}"
|
||||
timeout: 5
|
||||
ignore_errors: true
|
||||
register: wait_results
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- job is changed
|
||||
# Make sure that we failed and that we have some data in our results
|
||||
- assert:
|
||||
that:
|
||||
- "wait_results.msg == 'Monitoring aborted due to timeout' or 'Timeout waiting for job to finish.'"
|
||||
- "'id' in wait_results"
|
||||
|
||||
- name: Timeout waiting for the job to complete
|
||||
job_wait:
|
||||
job_id: "{{ job.id }}"
|
||||
timeout: 5
|
||||
ignore_errors: true
|
||||
register: wait_results
|
||||
- name: Async cancel the long running job
|
||||
job_cancel:
|
||||
job_id: "{{ job.id }}"
|
||||
async: 3600
|
||||
poll: 0
|
||||
|
||||
# Make sure that we failed and that we have some data in our results
|
||||
- assert:
|
||||
that:
|
||||
- "wait_results.msg == 'Monitoring aborted due to timeout' or 'Timeout waiting for job to finish.'"
|
||||
- "'id' in wait_results"
|
||||
- name: Wait for the job to exit on cancel
|
||||
job_wait:
|
||||
job_id: "{{ job.id }}"
|
||||
register: wait_results
|
||||
ignore_errors: true
|
||||
|
||||
- name: Async cancel the long running job
|
||||
job_cancel:
|
||||
job_id: "{{ job.id }}"
|
||||
async: 3600
|
||||
poll: 0
|
||||
- assert:
|
||||
that:
|
||||
- wait_results is failed
|
||||
- 'wait_results.status == "canceled"'
|
||||
- "wait_results.msg == 'Job with id {{ job.id }} failed' or 'Job with id={{ job.id }} failed, error: Job failed.'"
|
||||
|
||||
- name: Wait for the job to exit on cancel
|
||||
job_wait:
|
||||
job_id: "{{ job.id }}"
|
||||
register: wait_results
|
||||
ignore_errors: true
|
||||
# workflow wait test
|
||||
- name: Generate a random string for test
|
||||
set_fact:
|
||||
test_id1: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id1 is not defined
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- wait_results is failed
|
||||
- 'wait_results.status == "canceled"'
|
||||
- "wait_results.msg == 'Job with id {{ job.id }} failed' or 'Job with id={{ job.id }} failed, error: Job failed.'"
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
wfjt_name2: "AWX-Collection-tests-workflow_launch--wfjt1-{{ test_id1 }}"
|
||||
|
||||
- name: Delete the job template
|
||||
job_template:
|
||||
name: "{{ jt_name }}"
|
||||
playbook: "sleep.yml"
|
||||
job_type: run
|
||||
project: "{{ proj_name }}"
|
||||
inventory: "Demo Inventory"
|
||||
state: absent
|
||||
- name: Create our workflow
|
||||
workflow_job_template:
|
||||
name: "{{ wfjt_name2 }}"
|
||||
state: present
|
||||
|
||||
- name: Delete the project
|
||||
project:
|
||||
name: "{{ proj_name }}"
|
||||
organization: Default
|
||||
state: absent
|
||||
- name: Add a node
|
||||
workflow_job_template_node:
|
||||
workflow_job_template: "{{ wfjt_name2 }}"
|
||||
unified_job_template: "Demo Job Template"
|
||||
identifier: leaf
|
||||
register: new_node
|
||||
|
||||
# workflow wait test
|
||||
- name: Generate a random string for test
|
||||
set_fact:
|
||||
test_id1: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id1 is not defined
|
||||
- name: Kick off a workflow
|
||||
workflow_launch:
|
||||
workflow_template: "{{ wfjt_name2 }}"
|
||||
ignore_errors: true
|
||||
register: workflow
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
wfjt_name2: "AWX-Collection-tests-workflow_launch--wfjt1-{{ test_id1 }}"
|
||||
- name: Wait for the Workflow Job to finish
|
||||
job_wait:
|
||||
job_id: "{{ workflow.job_info.id }}"
|
||||
job_type: "workflow_jobs"
|
||||
register: wait_workflow_results
|
||||
|
||||
- name: Create our workflow
|
||||
workflow_job_template:
|
||||
name: "{{ wfjt_name2 }}"
|
||||
state: present
|
||||
# Make sure it worked and that we have some data in our results
|
||||
- assert:
|
||||
that:
|
||||
- wait_workflow_results is successful
|
||||
- "'elapsed' in wait_workflow_results"
|
||||
- "'id' in wait_workflow_results"
|
||||
|
||||
- name: Add a node
|
||||
workflow_job_template_node:
|
||||
workflow_job_template: "{{ wfjt_name2 }}"
|
||||
unified_job_template: "Demo Job Template"
|
||||
identifier: leaf
|
||||
register: new_node
|
||||
always:
|
||||
- name: Clean up test workflow
|
||||
workflow_job_template:
|
||||
name: "{{ wfjt_name2 }}"
|
||||
state: absent
|
||||
|
||||
- name: Kick off a workflow
|
||||
workflow_launch:
|
||||
workflow_template: "{{ wfjt_name2 }}"
|
||||
ignore_errors: true
|
||||
register: workflow
|
||||
- name: Delete the job template
|
||||
job_template:
|
||||
name: "{{ jt_name }}"
|
||||
playbook: "sleep.yml"
|
||||
job_type: run
|
||||
project: "{{ proj_name }}"
|
||||
inventory: "Demo Inventory"
|
||||
state: absent
|
||||
|
||||
- name: Wait for the Workflow Job to finish
|
||||
job_wait:
|
||||
job_id: "{{ workflow.job_info.id }}"
|
||||
job_type: "workflow_jobs"
|
||||
register: wait_workflow_results
|
||||
|
||||
# Make sure it worked and that we have some data in our results
|
||||
- assert:
|
||||
that:
|
||||
- wait_workflow_results is successful
|
||||
- "'elapsed' in wait_workflow_results"
|
||||
- "'id' in wait_workflow_results"
|
||||
|
||||
- name: Clean up test workflow
|
||||
workflow_job_template:
|
||||
name: "{{ wfjt_name2 }}"
|
||||
state: absent
|
||||
- name: Delete the project
|
||||
project:
|
||||
name: "{{ proj_name }}"
|
||||
organization: Default
|
||||
state: absent
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
plugins/modules/export.py validate-modules:nonexistent-parameter-documented # needs awxkit to construct argspec
|
||||
plugins/modules/import.py pylint:unused-import # Simply used as a feature flag conditional
|
||||
test/awx/conftest.py pylint:unused-import # Used to make sure we are importing the right awxkit, see comment in conftest.py near imports
|
||||
|
||||
3
awx_collection/tests/sanity/ignore-2.16.txt
Normal file
3
awx_collection/tests/sanity/ignore-2.16.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
plugins/modules/export.py validate-modules:nonexistent-parameter-documented # needs awxkit to construct argspec
|
||||
plugins/modules/import.py pylint:unused-import # Simply used as a feature flag conditional
|
||||
test/awx/conftest.py pylint:unused-import # Used to make sure we are importing the right awxkit, see comment in conftest.py near imports
|
||||
@@ -253,7 +253,13 @@ class ApiV2(base.Base):
|
||||
# Import methods
|
||||
|
||||
def _dependent_resources(self):
|
||||
page_resource = {getattr(self, resource)._create().__item_class__: resource for resource in self.json}
|
||||
page_resource = {}
|
||||
for resource in self.json:
|
||||
# The /api/v2/constructed_inventories endpoint is for the UI but will register as an Inventory endpoint
|
||||
# We want to map the type to /api/v2/inventories/ which works for constructed too
|
||||
if resource == 'constructed_inventory':
|
||||
continue
|
||||
page_resource[getattr(self, resource)._create().__item_class__] = resource
|
||||
data_pages = [getattr(self, resource)._create().__item_class__ for resource in EXPORTABLE_RESOURCES]
|
||||
|
||||
for page_cls in itertools.chain(*has_create.page_creation_order(*data_pages)):
|
||||
@@ -282,7 +288,18 @@ class ApiV2(base.Base):
|
||||
if asset['natural_key']['type'] == 'user':
|
||||
# We should only impose a default password if the resource doesn't exist.
|
||||
post_data.setdefault('password', 'abc123')
|
||||
_page = endpoint.post(post_data)
|
||||
try:
|
||||
_page = endpoint.post(post_data)
|
||||
except exc.NoContent:
|
||||
# desired exception under some circumstances, e.g. labels that already exist
|
||||
if _page is None and 'name' in post_data:
|
||||
results = endpoint.get(all_pages=True).results
|
||||
for item in results:
|
||||
if item['name'] == post_data['name']:
|
||||
_page = item.get()
|
||||
break
|
||||
else:
|
||||
raise
|
||||
changed = True
|
||||
if asset['natural_key']['type'] == 'project':
|
||||
# When creating a project, we need to wait for its
|
||||
@@ -302,8 +319,6 @@ class ApiV2(base.Base):
|
||||
|
||||
_page = _page.put(post_data)
|
||||
changed = True
|
||||
except exc.NoContent: # desired exception under some circumstances, e.g. labels that already exist
|
||||
pass
|
||||
except (exc.Common, AssertionError) as e:
|
||||
identifier = asset.get("name", None) or asset.get("username", None) or asset.get("hostname", None)
|
||||
log.error(f'{endpoint} "{identifier}": {e}.')
|
||||
@@ -404,6 +419,7 @@ class ApiV2(base.Base):
|
||||
|
||||
for resource in self._dependent_resources():
|
||||
endpoint = getattr(self, resource)
|
||||
|
||||
# Load up existing objects, so that we can try to update or link to them
|
||||
self._cache.get_page(endpoint)
|
||||
imported = self._import_list(endpoint, data.get(resource) or [])
|
||||
|
||||
@@ -13,7 +13,7 @@ class SystemJobTemplate(UnifiedJobTemplate, HasNotifications):
|
||||
|
||||
# return job
|
||||
jobs_pg = self.get_related('jobs', id=result.json['system_job'])
|
||||
assert jobs_pg.count == 1, "system_job_template launched (id:%s) but unable to find matching " "job at %s/jobs/" % (result.json['job'], self.url)
|
||||
assert jobs_pg.count == 1, "system_job_template launched (id:%s) but unable to find matching job at %s/jobs/" % (result.json['job'], self.url)
|
||||
return jobs_pg.results[0]
|
||||
|
||||
|
||||
|
||||
@@ -40,7 +40,7 @@ class UnifiedJob(HasStatus, base.Base):
|
||||
Default behavior is to replace newline characters with a space, but this can be modified, including replacement
|
||||
with ''. Pass replace_newlines=None to disable.
|
||||
|
||||
Additionally, you may replace any ' ' with another character (including ''). This is applied after the newline
|
||||
Additionally, you may replace any with another character (including ''). This is applied after the newline
|
||||
replacement. Default behavior is to not replace spaces.
|
||||
"""
|
||||
self.wait_until_completed()
|
||||
@@ -147,7 +147,7 @@ class UnifiedJob(HasStatus, base.Base):
|
||||
if host_loc.startswith(expected_prefix):
|
||||
return host_loc
|
||||
raise RuntimeError(
|
||||
'Could not find a controller private_data_dir for this job. ' 'Searched for volume mount to {} inside of args {}'.format(expected_prefix, job_args)
|
||||
'Could not find a controller private_data_dir for this job. Searched for volume mount to {} inside of args {}'.format(expected_prefix, job_args)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -139,7 +139,7 @@ def format_jq(output, fmt):
|
||||
if fmt == '.':
|
||||
return output
|
||||
raise ImportError(
|
||||
'To use `-f jq`, you must install the optional jq dependency.\n' '`pip install jq`\n',
|
||||
'To use `-f jq`, you must install the optional jq dependency.\n`pip install jq`\n',
|
||||
'Note that some platforms may require additional programs to '
|
||||
'build jq from source (like `libtool`).\n'
|
||||
'See https://pypi.org/project/jq/ for instructions.',
|
||||
|
||||
@@ -55,7 +55,7 @@ def pk_or_name(v2, model_name, value, page=None):
|
||||
return int(results.results[0].id)
|
||||
if results.count > 1:
|
||||
raise argparse.ArgumentTypeError(
|
||||
'Multiple {0} exist with that {1}. ' 'To look up an ID, run:\n' 'awx {0} list --{1} "{2}" -f human'.format(model_name, identity, value)
|
||||
'Multiple {0} exist with that {1}. To look up an ID, run:\nawx {0} list --{1} "{2}" -f human'.format(model_name, identity, value)
|
||||
)
|
||||
raise argparse.ArgumentTypeError('Could not find any {0} with that {1}.'.format(model_name, identity))
|
||||
|
||||
@@ -119,7 +119,7 @@ class ResourceOptionsParser(object):
|
||||
'--all',
|
||||
dest='all_pages',
|
||||
action='store_true',
|
||||
help=('fetch all pages of content from the API when ' 'returning results (instead of just the first page)'),
|
||||
help=('fetch all pages of content from the API when returning results (instead of just the first page)'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--order_by',
|
||||
|
||||
@@ -22,7 +22,7 @@ class CustomAutoprogramDirective(AutoprogramDirective):
|
||||
nodes[0][0].children = [heading]
|
||||
|
||||
# add a descriptive top synopsis of the reference guide
|
||||
nodes[0].children.insert(1, paragraph(text=('This is an exhaustive guide of every available command in ' 'the awx CLI tool.')))
|
||||
nodes[0].children.insert(1, paragraph(text=('This is an exhaustive guide of every available command in the awx CLI tool.')))
|
||||
disclaimer = (
|
||||
'The commands and parameters documented here can (and will) '
|
||||
'vary based on a variety of factors, such as the AWX API '
|
||||
|
||||
@@ -33,7 +33,7 @@ def parse_args():
|
||||
'--project-file',
|
||||
dest='project_file',
|
||||
default=os.getenv('AWXKIT_PROJECT_FILE'),
|
||||
help='Path for yml project config file.' 'If not provided or set by AWXKIT_PROJECT_FILE, projects will not have default SCM_URL',
|
||||
help='Path for yml project config file.If not provided or set by AWXKIT_PROJECT_FILE, projects will not have default SCM_URL',
|
||||
)
|
||||
parser.add_argument('-f', '--file', dest='akit_script', default=False, help='akit script file to run in interactive session.')
|
||||
parser.add_argument('-x', '--non-interactive', action='store_true', dest='non_interactive', help='Do not run in interactive mode.')
|
||||
|
||||
204
licenses/asyncpg.txt
Normal file
204
licenses/asyncpg.txt
Normal file
@@ -0,0 +1,204 @@
|
||||
Copyright (C) 2016-present the asyncpg authors and contributors.
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright (C) 2016-present the asyncpg authors and contributors
|
||||
<see AUTHORS file>
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
9
licenses/deprecated.txt
Normal file
9
licenses/deprecated.txt
Normal file
@@ -0,0 +1,9 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017 Laurent LAPORTE
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
BIN
licenses/jwcrypto-1.4.2.tar.gz
Normal file
BIN
licenses/jwcrypto-1.4.2.tar.gz
Normal file
Binary file not shown.
@@ -1,7 +1,7 @@
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
@@ -162,4 +162,4 @@ General Public License ever published by the Free Software Foundation.
|
||||
whether future versions of the GNU Lesser General Public License shall
|
||||
apply, that proxy's public statement of acceptance of any version is
|
||||
permanent authorization for you to choose that version for the
|
||||
Library.
|
||||
Library.
|
||||
Binary file not shown.
24
licenses/wrapt.txt
Normal file
24
licenses/wrapt.txt
Normal file
@@ -0,0 +1,24 @@
|
||||
Copyright (c) 2013-2023, Graham Dumpleton
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
@@ -2,27 +2,28 @@ aiohttp
|
||||
ansiconv==1.0.0 # UPGRADE BLOCKER: from 2013, consider replacing instead of upgrading
|
||||
asciichartpy
|
||||
asn1
|
||||
asyncpg
|
||||
azure-keyvault==1.1.0 # see UPGRADE BLOCKERs
|
||||
channels
|
||||
channels-redis==3.4.1 # see UPGRADE BLOCKERs
|
||||
cryptography
|
||||
cryptography>=39.0.1 ## https://github.com/ansible/awx/security/dependabot/90
|
||||
Cython<3 # Since the bump to PyYAML 5.4.1 this is now a mandatory dep
|
||||
daphne
|
||||
distro
|
||||
django==3.2.16 # see UPGRADE BLOCKERs https://github.com/ansible/awx/security/dependabot/67
|
||||
django==4.2 # see UPGRADE BLOCKERs
|
||||
django-auth-ldap
|
||||
django-cors-headers
|
||||
django-crum
|
||||
django-extensions
|
||||
django-guid==3.2.1
|
||||
django-oauth-toolkit==1.4.1
|
||||
django-oauth-toolkit<2.0.0 # Version 2.0.0 has breaking changes that will need to be worked out before upgrading
|
||||
django-polymorphic
|
||||
django-pglocks
|
||||
django-redis
|
||||
django-solo
|
||||
django-split-settings==1.0.0 # We hit a strange issue where the release process errored when upgrading past 1.0.0 see UPGRADE BLOCKERS
|
||||
django-taggit
|
||||
djangorestframework==3.13.1
|
||||
djangorestframework
|
||||
djangorestframework-yaml
|
||||
filelock
|
||||
GitPython>=3.1.30 # CVE-2022-24439
|
||||
@@ -36,7 +37,6 @@ openshift
|
||||
pexpect==4.7.0 # see library notes
|
||||
prometheus_client
|
||||
psycopg2
|
||||
psycopg # psycopg3 is used to listen for pg_notify messages from web servers in awx.main.wsrelay where asyncio is used
|
||||
psutil
|
||||
pygerduty
|
||||
pyparsing==2.4.6 # Upgrading to v3 of pyparsing introduce errors on smart host filtering: Expected 'or' term, found 'or' (at char 15), (line:1, col:16)
|
||||
|
||||
@@ -11,7 +11,7 @@ ansiconv==1.0.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
asciichartpy==1.5.25
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
asgiref==3.5.2
|
||||
asgiref==3.6.0
|
||||
# via
|
||||
# channels
|
||||
# channels-redis
|
||||
@@ -24,6 +24,8 @@ async-timeout==4.0.2
|
||||
# aiohttp
|
||||
# aioredis
|
||||
# redis
|
||||
asyncpg==0.27.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
attrs==22.1.0
|
||||
# via
|
||||
# aiohttp
|
||||
@@ -68,12 +70,13 @@ click==8.1.3
|
||||
# via receptorctl
|
||||
constantly==15.1.0
|
||||
# via twisted
|
||||
cryptography==38.0.4
|
||||
cryptography==40.0.2
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# adal
|
||||
# autobahn
|
||||
# azure-keyvault
|
||||
# jwcrypto
|
||||
# pyopenssl
|
||||
# service-identity
|
||||
# social-auth-core
|
||||
@@ -91,9 +94,11 @@ defusedxml==0.7.1
|
||||
# via
|
||||
# python3-openid
|
||||
# social-auth-core
|
||||
deprecated==1.2.13
|
||||
# via jwcrypto
|
||||
distro==1.8.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django==3.2.16
|
||||
django==4.2
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# channels
|
||||
@@ -118,7 +123,7 @@ django-extensions==3.2.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-guid==3.2.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-oauth-toolkit==1.4.1
|
||||
django-oauth-toolkit==1.7.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-pglocks==1.0.4
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
@@ -133,7 +138,7 @@ django-split-settings==1.0.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-taggit==3.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
djangorestframework==3.13.1
|
||||
djangorestframework==3.14.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
djangorestframework-yaml==2.0.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
@@ -210,6 +215,8 @@ json-log-formatter==0.5.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
jsonschema==4.17.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
jwcrypto==1.4.2
|
||||
# via django-oauth-toolkit
|
||||
kubernetes==25.3.0
|
||||
# via openshift
|
||||
lockfile==0.12.2
|
||||
@@ -266,8 +273,6 @@ prometheus-client==0.15.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
psutil==5.9.4
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
psycopg==3.1.4
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
psycopg2==2.9.5
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
ptyprocess==0.7.0
|
||||
@@ -295,7 +300,7 @@ pyjwt==2.6.0
|
||||
# adal
|
||||
# social-auth-core
|
||||
# twilio
|
||||
pyopenssl==22.1.0
|
||||
pyopenssl==23.1.1
|
||||
# via twisted
|
||||
pyparsing==2.4.6
|
||||
# via
|
||||
@@ -329,7 +334,6 @@ python3-openid==3.2.0
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
pytz==2022.6
|
||||
# via
|
||||
# django
|
||||
# djangorestframework
|
||||
# irc
|
||||
# tempora
|
||||
@@ -428,7 +432,7 @@ txaio==22.2.1
|
||||
typing-extensions==4.4.0
|
||||
# via
|
||||
# azure-core
|
||||
# psycopg
|
||||
# pydantic
|
||||
# setuptools-rust
|
||||
# setuptools-scm
|
||||
# twisted
|
||||
@@ -444,6 +448,8 @@ websocket-client==1.4.2
|
||||
# via kubernetes
|
||||
wheel==0.38.4
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
wrapt==1.15.0
|
||||
# via deprecated
|
||||
xmlsec==1.3.13
|
||||
# via python3-saml
|
||||
yarl==1.8.1
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from setuptools_scm import get_version
|
||||
@@ -9,8 +8,13 @@ except ModuleNotFoundError:
|
||||
sys.stderr.write("Unable to import setuptools-scm, attempting to install now...\n")
|
||||
|
||||
os.environ['PIP_DISABLE_PIP_VERSION_CHECK'] = '1'
|
||||
subprocess.check_output([sys.executable, '-m', 'ensurepip'])
|
||||
subprocess.check_output([sys.executable, '-m', 'pip', 'install', 'setuptools-scm'])
|
||||
COMMANDS = ([sys.executable, '-m', 'ensurepip'], [sys.executable, '-m', 'pip', 'install', 'setuptools-scm'])
|
||||
for cmd in COMMANDS:
|
||||
# capture_output because we only want to print version to stdout if successful
|
||||
result = subprocess.run(cmd, capture_output=True)
|
||||
if result.returncode:
|
||||
# failed, we have no version, so print output so that users can debug
|
||||
raise Exception(f'\nCommand `{" ".join(cmd)}` failed (rc={result.returncode}).\n\nstdout:\n{result.stdout}\n\nstderr:\n{result.stderr}')
|
||||
|
||||
from setuptools_scm import get_version
|
||||
|
||||
|
||||
Reference in New Issue
Block a user