Compare commits

..

9 Commits

Author SHA1 Message Date
Peter Braun
8fe1bfd993 Merge branch 'devel' into AAP-60861 2026-04-01 14:09:55 +02:00
melissalkelly
e80ce43f87 Fix workload identity project updates (#16373)
* fix: enable workload identity credentials for project updates

* Add explanatory comment for credential context handling

* Revert build_passwords
2026-03-31 14:48:27 +02:00
Stevenson Michel
595e093bbf [CI-Fix] Pin setuptools_scm<10 to fix api-lint failure (#16376)
Fix CI: Pin setuptools_scm<10 to fix api-lint build failure

setuptools-scm 10.0.5 (with its new vcs-versioning dependency) requires
a [tool.setuptools_scm] or [tool.vcs-versioning] section in pyproject.toml.
AWX intentionally omits this section because it uses a custom version
resolution via setup.cfg (version = attr: awx.get_version). The new major
version of setuptools-scm treats the missing section as a fatal error when
building the sdist in tox's isolated build, causing the linters environment
to fail.

Pinning to <10 restores compatibility with the existing version resolution
strategy.

Failing run: https://github.com/ansible/awx/actions/runs/23744310714
Branch: devel

Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-30 14:37:16 -04:00
Peter Braun
4f231aea44 move fix into get_job_kwargs 2026-03-30 12:46:23 +02:00
Peter Braun
0a80c91a96 fix: empty string vs nil handling for limit parameter 2026-03-30 12:46:22 +02:00
TVo
cd7f6f602f Fix OpenAPI schema validation message mismatch (#16372) 2026-03-25 12:36:10 -06:00
Chris Meyers
310dd3e18f Update dispatcherd to version 2026.3.25 (#16369)
Co-authored-by: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
2026-03-25 10:57:01 -04:00
Matthew Sandoval
7c75788b0a AAP-67740 Pass plugin_description through to CredentialType.description (#16364)
* Pass plugin_description through to CredentialType.description

Propagate the plugin_description field from credential plugins into the
CredentialType description when loading and creating managed credential
types, including updates to existing records.

Assisted-by: Claude

* Add unit tests for plugin_description passthrough to CredentialType

Tests cover load_plugin, get_creation_params, and
_setup_tower_managed_defaults handling of the description field.

Assisted-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>

---------

Co-authored-by: PabloHiro <palonso@redhat.com>
2026-03-25 11:03:11 +01:00
Peter Braun
ab294385ad fix: avoid delete in loop in inventory import (#16366) 2026-03-24 15:37:59 +00:00
11 changed files with 158 additions and 35 deletions

View File

@@ -581,7 +581,7 @@ detect-schema-change: genschema
validate-openapi-schema: genschema validate-openapi-schema: genschema
@echo "Validating OpenAPI schema from schema.json..." @echo "Validating OpenAPI schema from schema.json..."
@python3 -c "from openapi_spec_validator import validate; import json; spec = json.load(open('schema.json')); validate(spec); print('✓ OpenAPI Schema is valid!')" @python3 -c "from openapi_spec_validator import validate; import json; spec = json.load(open('schema.json')); validate(spec); print('✓ Schema is valid')"
docker-compose-clean: awx/projects docker-compose-clean: awx/projects
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml rm -sf $(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml rm -sf

View File

@@ -409,10 +409,12 @@ class Command(BaseCommand):
del_child_group_pks = list(set(db_children_name_pk_map.values())) del_child_group_pks = list(set(db_children_name_pk_map.values()))
for offset in range(0, len(del_child_group_pks), self._batch_size): for offset in range(0, len(del_child_group_pks), self._batch_size):
child_group_pks = del_child_group_pks[offset : (offset + self._batch_size)] child_group_pks = del_child_group_pks[offset : (offset + self._batch_size)]
for db_child in db_children.filter(pk__in=child_group_pks): children_to_remove = list(db_children.filter(pk__in=child_group_pks))
group_group_count += 1 if children_to_remove:
db_group.children.remove(db_child) group_group_count += len(children_to_remove)
logger.debug('Group "%s" removed from group "%s"', db_child.name, db_group.name) db_group.children.remove(*children_to_remove)
for db_child in children_to_remove:
logger.debug('Group "%s" removed from group "%s"', db_child.name, db_group.name)
# FIXME: Inventory source group relationships # FIXME: Inventory source group relationships
# Delete group/host relationships not present in imported data. # Delete group/host relationships not present in imported data.
db_hosts = db_group.hosts db_hosts = db_group.hosts
@@ -441,12 +443,12 @@ class Command(BaseCommand):
del_host_pks = list(del_host_pks) del_host_pks = list(del_host_pks)
for offset in range(0, len(del_host_pks), self._batch_size): for offset in range(0, len(del_host_pks), self._batch_size):
del_pks = del_host_pks[offset : (offset + self._batch_size)] del_pks = del_host_pks[offset : (offset + self._batch_size)]
for db_host in db_hosts.filter(pk__in=del_pks): hosts_to_remove = list(db_hosts.filter(pk__in=del_pks))
group_host_count += 1 if hosts_to_remove:
if db_host not in db_group.hosts.all(): group_host_count += len(hosts_to_remove)
continue db_group.hosts.remove(*hosts_to_remove)
db_group.hosts.remove(db_host) for db_host in hosts_to_remove:
logger.debug('Host "%s" removed from group "%s"', db_host.name, db_group.name) logger.debug('Host "%s" removed from group "%s"', db_host.name, db_group.name)
if settings.SQL_DEBUG: if settings.SQL_DEBUG:
logger.warning( logger.warning(
'group-group and group-host deletions took %d queries for %d relationships', 'group-group and group-host deletions took %d queries for %d relationships',

View File

@@ -531,6 +531,7 @@ class CredentialType(CommonModelNameNotUnique):
existing = ct_class.objects.filter(name=default.name, kind=default.kind).first() existing = ct_class.objects.filter(name=default.name, kind=default.kind).first()
if existing is not None: if existing is not None:
existing.namespace = default.namespace existing.namespace = default.namespace
existing.description = getattr(default, 'description', '')
existing.inputs = {} existing.inputs = {}
existing.injectors = {} existing.injectors = {}
existing.save() existing.save()
@@ -570,7 +571,14 @@ class CredentialType(CommonModelNameNotUnique):
@classmethod @classmethod
def load_plugin(cls, ns, plugin): def load_plugin(cls, ns, plugin):
# TODO: User "side-loaded" credential custom_injectors isn't supported # TODO: User "side-loaded" credential custom_injectors isn't supported
ManagedCredentialType.registry[ns] = SimpleNamespace(namespace=ns, name=plugin.name, kind='external', inputs=plugin.inputs, backend=plugin.backend) ManagedCredentialType.registry[ns] = SimpleNamespace(
namespace=ns,
name=plugin.name,
kind='external',
inputs=plugin.inputs,
backend=plugin.backend,
description=getattr(plugin, 'plugin_description', ''),
)
def inject_credential(self, credential, env, safe_env, args, private_data_dir, container_root=None): def inject_credential(self, credential, env, safe_env, args, private_data_dir, container_root=None):
from awx_plugins.interfaces._temporary_private_inject_api import inject_credential from awx_plugins.interfaces._temporary_private_inject_api import inject_credential
@@ -582,7 +590,13 @@ class CredentialTypeHelper:
@classmethod @classmethod
def get_creation_params(cls, cred_type): def get_creation_params(cls, cred_type):
if cred_type.kind == 'external': if cred_type.kind == 'external':
return dict(namespace=cred_type.namespace, kind=cred_type.kind, name=cred_type.name, managed=True) return {
'namespace': cred_type.namespace,
'kind': cred_type.kind,
'name': cred_type.name,
'managed': True,
'description': getattr(cred_type, 'description', ''),
}
return dict( return dict(
namespace=cred_type.namespace, namespace=cred_type.namespace,
kind=cred_type.kind, kind=cred_type.kind,

View File

@@ -335,7 +335,9 @@ class WorkflowJobNode(WorkflowNodeBase):
# or labels, because they do not propogate WFJT-->node at all # or labels, because they do not propogate WFJT-->node at all
# Combine WFJT prompts with node here, WFJT at higher level # Combine WFJT prompts with node here, WFJT at higher level
node_prompts_data.update(wj_prompts_data) # Empty string values on the workflow job (e.g. from IaC setting limit: "")
# should not override a node's explicit non-empty prompt value
node_prompts_data.update({k: v for k, v in wj_prompts_data.items() if v != ''})
accepted_fields, ignored_fields, errors = ujt_obj._accept_or_ignore_job_kwargs(**node_prompts_data) accepted_fields, ignored_fields, errors = ujt_obj._accept_or_ignore_job_kwargs(**node_prompts_data)
if errors: if errors:
logger.info( logger.info(

View File

@@ -277,6 +277,7 @@ class RunnerCallback:
def artifacts_handler(self, artifact_dir): def artifacts_handler(self, artifact_dir):
success, query_file_contents = try_load_query_file(artifact_dir) success, query_file_contents = try_load_query_file(artifact_dir)
if success: if success:
self.delay_update(event_queries_processed=False)
collections_info = collect_queries(query_file_contents) collections_info = collect_queries(query_file_contents)
for collection, data in collections_info.items(): for collection, data in collections_info.items():
version = data['version'] version = data['version']
@@ -300,24 +301,6 @@ class RunnerCallback:
else: else:
logger.warning(f'The file {COLLECTION_FILENAME} unexpectedly did not contain ansible_version') logger.warning(f'The file {COLLECTION_FILENAME} unexpectedly did not contain ansible_version')
# Write event_queries_processed and installed_collections directly
# to the DB instead of using delay_update. delay_update defers
# writes until the final job status save, but
# events_processed_hook (called from both the task runner after
# the final save and the callback receiver after the wrapup
# event) needs event_queries_processed=False visible in the DB
# to dispatch save_indirect_host_entries. The field defaults to
# True, so without a direct write the hook would see True and
# skip the dispatch. installed_collections is also written
# directly so it is available if the callback receiver
# dispatches before the final save.
from awx.main.models import Job
db_updates = {'event_queries_processed': False}
if 'installed_collections' in query_file_contents:
db_updates['installed_collections'] = query_file_contents['installed_collections']
Job.objects.filter(id=self.instance.id).update(**db_updates)
self.artifacts_processed = True self.artifacts_processed = True

View File

@@ -1682,7 +1682,7 @@ class RunProjectUpdate(BaseTask):
return params return params
def build_credentials_list(self, project_update): def build_credentials_list(self, project_update):
if project_update.scm_type == 'insights' and project_update.credential: if project_update.credential:
return [project_update.credential] return [project_update.credential]
return [] return []

View File

@@ -305,6 +305,47 @@ class TestINIImports:
has_host_group = inventory.groups.get(name='has_a_host') has_host_group = inventory.groups.get(name='has_a_host')
assert has_host_group.hosts.count() == 1 assert has_host_group.hosts.count() == 1
@mock.patch.object(inventory_import, 'AnsibleInventoryLoader', MockLoader)
def test_overwrite_removes_stale_memberships(self, inventory):
"""When overwrite is enabled, host-group and group-group memberships
that are no longer in the imported data should be removed."""
# First import: parent_group has two children, host_group has two hosts
inventory_import.AnsibleInventoryLoader._data = {
"_meta": {"hostvars": {"host1": {}, "host2": {}}},
"all": {"children": ["ungrouped", "parent_group", "child_a", "child_b", "host_group"]},
"parent_group": {"children": ["child_a", "child_b"]},
"host_group": {"hosts": ["host1", "host2"]},
"ungrouped": {"hosts": []},
}
cmd = inventory_import.Command()
cmd.handle(inventory_id=inventory.pk, source=__file__, overwrite=True)
parent = inventory.groups.get(name='parent_group')
assert set(parent.children.values_list('name', flat=True)) == {'child_a', 'child_b'}
host_grp = inventory.groups.get(name='host_group')
assert set(host_grp.hosts.values_list('name', flat=True)) == {'host1', 'host2'}
# Second import: child_b removed from parent_group, host2 moved out of host_group
inventory_import.AnsibleInventoryLoader._data = {
"_meta": {"hostvars": {"host1": {}, "host2": {}}},
"all": {"children": ["ungrouped", "parent_group", "child_a", "child_b", "host_group"]},
"parent_group": {"children": ["child_a"]},
"host_group": {"hosts": ["host1"]},
"ungrouped": {"hosts": ["host2"]},
}
cmd = inventory_import.Command()
cmd.handle(inventory_id=inventory.pk, source=__file__, overwrite=True)
parent.refresh_from_db()
host_grp.refresh_from_db()
# child_b should be removed from parent_group
assert set(parent.children.values_list('name', flat=True)) == {'child_a'}
# host2 should be removed from host_group
assert set(host_grp.hosts.values_list('name', flat=True)) == {'host1'}
# host2 and child_b should still exist in the inventory, just not in those groups
assert inventory.hosts.filter(name='host2').exists()
assert inventory.groups.filter(name='child_b').exists()
@mock.patch.object(inventory_import, 'AnsibleInventoryLoader', MockLoader) @mock.patch.object(inventory_import, 'AnsibleInventoryLoader', MockLoader)
def test_recursive_group_error(self, inventory): def test_recursive_group_error(self, inventory):
inventory_import.AnsibleInventoryLoader._data = { inventory_import.AnsibleInventoryLoader._data = {

View File

@@ -291,6 +291,33 @@ class TestWorkflowJob:
assert set(data['labels']) == set(node_labels) # as exception, WFJT labels not applied assert set(data['labels']) == set(node_labels) # as exception, WFJT labels not applied
assert data['limit'] == 'wj_limit' assert data['limit'] == 'wj_limit'
def test_node_limit_not_overridden_by_empty_string_wj_limit(self, project, inventory):
"""
When the workflow job has an empty string limit (e.g., set via IaC with limit: ""),
the node-level limit should still be passed to the spawned job, not silently suppressed.
"""
jt = JobTemplate.objects.create(
project=project,
inventory=inventory,
ask_limit_on_launch=True,
)
# Simulate a workflow job whose WFJT was created via IaC with `limit: ""`
# (e.g. awx.awx.workflow_job_template: ... limit: "")
# This stores '' in char_prompts instead of treating it as None/"no limit".
wj = WorkflowJob.objects.create(name='test-wf-job')
wj.limit = '' # stores {'limit': ''} in char_prompts - the IaC bug scenario
wj.save()
node = WorkflowJobNode.objects.create(workflow_job=wj, unified_job_template=jt)
node.limit = 'web_servers'
node.save()
data = node.get_job_kwargs()
# The node-level limit should be applied; the WJ's empty string limit is not meaningful
assert data.get('limit') == 'web_servers', (
"Node-level limit 'web_servers' was not passed to the job. " "Likely caused by an empty string WJ limit overriding the node limit"
)
@pytest.mark.django_db @pytest.mark.django_db
class TestWorkflowJobTemplate: class TestWorkflowJobTemplate:

View File

@@ -2,7 +2,11 @@
import pytest import pytest
from types import SimpleNamespace
from unittest import mock
from awx.main.models import Credential, CredentialType from awx.main.models import Credential, CredentialType
from awx.main.models.credential import CredentialTypeHelper, ManagedCredentialType
from django.apps import apps from django.apps import apps
@@ -78,3 +82,53 @@ def test_credential_context_property_independent_instances():
assert cred1.context == {'key1': 'value1'} assert cred1.context == {'key1': 'value1'}
assert cred2.context == {'key2': 'value2'} assert cred2.context == {'key2': 'value2'}
assert cred1.context is not cred2.context assert cred1.context is not cred2.context
def test_load_plugin_passes_description():
plugin = SimpleNamespace(name='test_plugin', inputs={'fields': []}, backend=None, plugin_description='A test plugin')
CredentialType.load_plugin('test_ns', plugin)
entry = ManagedCredentialType.registry['test_ns']
assert entry.description == 'A test plugin'
del ManagedCredentialType.registry['test_ns']
def test_load_plugin_missing_description():
plugin = SimpleNamespace(name='test_plugin', inputs={'fields': []}, backend=None)
CredentialType.load_plugin('test_ns', plugin)
entry = ManagedCredentialType.registry['test_ns']
assert entry.description == ''
del ManagedCredentialType.registry['test_ns']
def test_get_creation_params_external_includes_description():
cred_type = SimpleNamespace(namespace='test_ns', kind='external', name='Test', description='My description')
params = CredentialTypeHelper.get_creation_params(cred_type)
assert params['description'] == 'My description'
def test_get_creation_params_external_missing_description():
cred_type = SimpleNamespace(namespace='test_ns', kind='external', name='Test')
params = CredentialTypeHelper.get_creation_params(cred_type)
assert params['description'] == ''
@pytest.mark.django_db
def test_setup_tower_managed_defaults_updates_description():
registry_entry = SimpleNamespace(
namespace='test_ns',
kind='external',
name='Test Plugin',
inputs={'fields': []},
backend=None,
description='Updated description',
)
# Create an existing credential type with no description
ct = CredentialType.objects.create(name='Test Plugin', kind='external', namespace='old_ns')
assert ct.description == ''
with mock.patch.dict(ManagedCredentialType.registry, {'test_ns': registry_entry}, clear=True):
CredentialType._setup_tower_managed_defaults()
ct.refresh_from_db()
assert ct.description == 'Updated description'
assert ct.namespace == 'test_ns'

View File

@@ -1,5 +1,5 @@
[build-system] [build-system]
requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2"] requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2,<10"]
build-backend = "setuptools.build_meta" build-backend = "setuptools.build_meta"
# Do not uncomment the line below. We need to be able to override the version via a file, and this # Do not uncomment the line below. We need to be able to override the version via a file, and this

View File

@@ -116,7 +116,7 @@ cython==3.1.3
# via -r /awx_devel/requirements/requirements.in # via -r /awx_devel/requirements/requirements.in
daphne==4.2.1 daphne==4.2.1
# via -r /awx_devel/requirements/requirements.in # via -r /awx_devel/requirements/requirements.in
dispatcherd[pg-notify]==2026.02.26 dispatcherd[pg-notify]==2026.3.25
# via -r /awx_devel/requirements/requirements.in # via -r /awx_devel/requirements/requirements.in
distro==1.9.0 distro==1.9.0
# via -r /awx_devel/requirements/requirements.in # via -r /awx_devel/requirements/requirements.in