[4.6] Backport the "live" tests (#6859)

* Create a new pytest folder for live system testing with normal services (#15688)

* PoC for running dev env tests

* Replace in github actions

* Move folder to better location

* Further streamlining of new test folders

* Consolidate fixture, add writeup docs

* Use star import

* Push the wait-for-job to the conftest

Fix misused project cache identifier (#15690)

Fix project cache identifiers for new updates

Finish test and discover viable solution

Add comment on related task code

AAP-37989 Tests for exclude list with multiple jobs (#15722)

* Tests for exclude list with multiple jobs

Create test for using manual & file projects (#15754)

* Create test for using a manual project

* Chang default project factory to git, remove project files monkeypatch

* skip update of factory project

* Initial file scaffolding for feature

* Fill in galaxy and names

* Add README, describe project folders and dependencies

Add ee cleanup tests

* Adds cleanup tests to the live test.

Fix rsyslog permission error in github ubuntu tests from apparmor (#15717)

* Add test to detect rsyslog config problems

* Get dmesg output

* Disable apparmor for rsyslogd

Make awx/main/tests/live dramatically faster (#15780)

* Make awx/main/tests/live dramatically faster

* Add new setting to exclude list

* Fix rebase issues

* Did not want to backport this
This commit is contained in:
Alan Rominger 2025-02-25 15:22:38 -05:00 committed by GitHub
parent ba053dfb51
commit 529ee73fcd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 444 additions and 144 deletions

View File

@ -23,6 +23,14 @@ outputs:
runs:
using: composite
steps:
- name: Disable apparmor for rsyslogd, first step
shell: bash
run: sudo ln -s /etc/apparmor.d/usr.sbin.rsyslogd /etc/apparmor.d/disable/
- name: Disable apparmor for rsyslogd, second step
shell: bash
run: sudo apparmor_parser -R /etc/apparmor.d/usr.sbin.rsyslogd
- name: Build awx_devel image for running checks
uses: ./.github/actions/awx_devel_image
with:

View File

@ -70,8 +70,8 @@ jobs:
github-token: ${{ secrets.GITHUB_TOKEN }}
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Run smoke test
run: ansible-playbook tools/docker-compose/ansible/smoke-test.yml -v
- name: Run live dev env tests
run: docker exec tools_awx_1 /bin/bash -c "make live_test"
awx-operator:
runs-on: ubuntu-latest

View File

@ -353,6 +353,9 @@ test:
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
live_test:
cd awx/main/tests/live && py.test tests/
test_migrations:
if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \

View File

@ -5,9 +5,9 @@ import time
from uuid import uuid4
from django_guid import get_guid
from django.conf import settings
from . import pg_bus_conn
from awx.main.utils import is_testing
logger = logging.getLogger('awx.main.dispatch')
@ -101,7 +101,7 @@ class task:
obj = cls.get_async_body(args=args, kwargs=kwargs, uuid=uuid, **kw)
if callable(queue):
queue = queue()
if not is_testing():
if not settings.DISPATCHER_MOCK_PUBLISH:
with pg_bus_conn() as conn:
conn.notify(queue, json.dumps(obj))
return (obj, queue)

View File

@ -226,22 +226,24 @@ class RemoteJobError(RuntimeError):
pass
def run_until_complete(node, timing_data=None, **kwargs):
def run_until_complete(node, timing_data=None, worktype='ansible-runner', ttl='20s', **kwargs):
"""
Runs an ansible-runner work_type on remote node, waits until it completes, then returns stdout.
"""
config_data = read_receptor_config()
receptor_ctl = get_receptor_ctl(config_data)
use_stream_tls = getattr(get_conn_type(node, receptor_ctl), 'name', None) == "STREAMTLS"
kwargs.setdefault('tlsclient', get_tls_client(config_data, use_stream_tls))
kwargs.setdefault('ttl', '20s')
if ttl is not None:
kwargs['ttl'] = ttl
kwargs.setdefault('payload', '')
if work_signing_enabled(config_data):
kwargs['signwork'] = True
transmit_start = time.time()
result = receptor_ctl.submit_work(worktype='ansible-runner', node=node, **kwargs)
result = receptor_ctl.submit_work(worktype=worktype, node=node, **kwargs)
unit_id = result['unitid']
run_start = time.time()
@ -369,7 +371,7 @@ def _convert_args_to_cli(vargs):
return args
def worker_cleanup(node_name, vargs, timeout=300.0):
def worker_cleanup(node_name, vargs):
args = _convert_args_to_cli(vargs)
remote_command = ' '.join(args)

View File

@ -25,6 +25,7 @@ from django.utils.translation import gettext_lazy as _
from django.utils.translation import gettext_noop
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist
from django.db.models.query import QuerySet
# Django-CRUM
from crum import impersonate
@ -396,48 +397,68 @@ def purge_old_stdout_files():
logger.debug("Removing {}".format(os.path.join(settings.JOBOUTPUT_ROOT, f)))
def _cleanup_images_and_files(**kwargs):
if settings.IS_K8S:
return
this_inst = Instance.objects.me()
runner_cleanup_kwargs = this_inst.get_cleanup_task_kwargs(**kwargs)
if runner_cleanup_kwargs:
stdout = ''
with StringIO() as buffer:
with redirect_stdout(buffer):
ansible_runner.cleanup.run_cleanup(runner_cleanup_kwargs)
stdout = buffer.getvalue()
if '(changed: True)' in stdout:
logger.info(f'Performed local cleanup with kwargs {kwargs}, output:\n{stdout}')
class CleanupImagesAndFiles:
@classmethod
def get_first_control_instance(cls) -> Instance | None:
return (
Instance.objects.filter(node_type__in=['hybrid', 'control'], node_state=Instance.States.READY, enabled=True, capacity__gt=0)
.order_by('-hostname')
.first()
)
# if we are the first instance alphabetically, then run cleanup on execution nodes
checker_instance = (
Instance.objects.filter(node_type__in=['hybrid', 'control'], node_state=Instance.States.READY, enabled=True, capacity__gt=0)
.order_by('-hostname')
.first()
)
if checker_instance and this_inst.hostname == checker_instance.hostname:
for inst in Instance.objects.filter(node_type='execution', node_state=Instance.States.READY, enabled=True, capacity__gt=0):
runner_cleanup_kwargs = inst.get_cleanup_task_kwargs(**kwargs)
if not runner_cleanup_kwargs:
continue
try:
stdout = worker_cleanup(inst.hostname, runner_cleanup_kwargs)
if '(changed: True)' in stdout:
logger.info(f'Performed cleanup on execution node {inst.hostname} with output:\n{stdout}')
except RuntimeError:
logger.exception(f'Error running cleanup on execution node {inst.hostname}')
@classmethod
def get_execution_instances(cls) -> QuerySet[Instance]:
return Instance.objects.filter(node_type='execution', node_state=Instance.States.READY, enabled=True, capacity__gt=0)
@classmethod
def run_local(cls, this_inst: Instance, **kwargs):
if settings.IS_K8S:
return
runner_cleanup_kwargs = this_inst.get_cleanup_task_kwargs(**kwargs)
if runner_cleanup_kwargs:
stdout = ''
with StringIO() as buffer:
with redirect_stdout(buffer):
ansible_runner.cleanup.run_cleanup(runner_cleanup_kwargs)
stdout = buffer.getvalue()
if '(changed: True)' in stdout:
logger.info(f'Performed local cleanup with kwargs {kwargs}, output:\n{stdout}')
@classmethod
def run_remote(cls, this_inst: Instance, **kwargs):
# if we are the first instance alphabetically, then run cleanup on execution nodes
checker_instance = cls.get_first_control_instance()
if checker_instance and this_inst.hostname == checker_instance.hostname:
for inst in cls.get_execution_instances():
runner_cleanup_kwargs = inst.get_cleanup_task_kwargs(**kwargs)
if not runner_cleanup_kwargs:
continue
try:
stdout = worker_cleanup(inst.hostname, runner_cleanup_kwargs)
if '(changed: True)' in stdout:
logger.info(f'Performed cleanup on execution node {inst.hostname} with output:\n{stdout}')
except RuntimeError:
logger.exception(f'Error running cleanup on execution node {inst.hostname}')
@classmethod
def run(cls, **kwargs):
if settings.IS_K8S:
return
this_inst = Instance.objects.me()
cls.run_local(this_inst, **kwargs)
cls.run_remote(this_inst, **kwargs)
@task(queue='tower_broadcast_all')
def handle_removed_image(remove_images=None):
"""Special broadcast invocation of this method to handle case of deleted EE"""
_cleanup_images_and_files(remove_images=remove_images, file_pattern='')
CleanupImagesAndFiles.run(remove_images=remove_images, file_pattern='')
@task(queue=get_task_queuename)
def cleanup_images_and_files():
_cleanup_images_and_files(image_prune=True)
CleanupImagesAndFiles.run(image_prune=True)
@task(queue=get_task_queuename)

42
awx/main/tests/README.md Normal file
View File

@ -0,0 +1,42 @@
## Test Environments
Several of the subfolders of `awx/main/tests/` indicate a different required _environment_
where you can run the tests. Those folders are:
- `functional/` - requires a test database and no other services running
- `live/` - must run in `tools_awx_1` container launched by `make docker-compose`
- `unit/` - does not require a test database or any active services
### Functional and unit test environment
The functional and unit tests have an invocation in `make test`,
and this attaches several other things like schema that piggybacks on requests.
These tests are ran from the root AWX folder.
#### Functional tests
Only tests in the `functional/` folder should use the `@pytest.mark.django_db` decorator.
This is the only difference between the functional and unit folders,
the test environment is otherwise the same for both.
Functional tests use a sqlite3 database, so the postgres service is not necessary.
### Live tests
The live tests have an invocation in `make live_test` which will change
directory before running, which is required to pick up a different pytest
configuration.
This will use the postges container from `make docker-compose` for the database,
and will disable the pytest-django features of running with a test database
and running tests in transactions.
This means that any changes done in the course of the test could potentially
be seen in your browser via the API or UI, and anything the test fails
to clean up will remain in the database.
### Folders that should not contain tests
- `data/` - just files other tests use
- `docs/` - utilities for schema generation
- `factories/` - general utilities
- `manual/` - python files to be ran directly

View File

@ -216,6 +216,16 @@ def mock_get_event_queryset_no_job_created():
@pytest.fixture
def mock_me():
"Allows Instance.objects.me() to work without touching the database"
me_mock = mock.MagicMock(return_value=Instance(id=1, hostname=settings.CLUSTER_HOST_ID, uuid='00000000-0000-0000-0000-000000000000'))
with mock.patch.object(Instance.objects, 'me', me_mock):
yield
@pytest.fixture
def me_inst():
"Inserts an instance to the database for Instance.objects.me(), and goes ahead and mocks it in"
inst = Instance.objects.create(hostname='local_node', uuid='00000000-0000-0000-0000-000000000000')
me_mock = mock.MagicMock(return_value=inst)
with mock.patch.object(Instance.objects, 'me', me_mock):
yield inst

View File

@ -0,0 +1,41 @@
# Project data for live tests
Each folder in this directory is usable as source for a project or role or collection,
which is used in tests, particularly the "awx/main/tests/live" tests.
Although these are not git repositories, test fixtures will make copies,
and in the coppied folders, run `git init` type commands, turning them into
git repos. This is done in the locations
- `/var/lib/awx/projects`
- `/tmp/live_tests`
These can then be referenced for manual projects or git via the `file://` protocol.
## debug
This is the simplest possible case with 1 playbook with 1 debug task.
## with_requirements
This has a playbook that runs a task that uses a role.
The role project is referenced in the `roles/requirements.yml` file.
### role_requirement
This is the source for the role that the `with_requirements` project uses.
## test_host_query
This has a playbook that runs a task from a custom collection module which
is registered for the host query feature.
The collection is referenced in its `collections/requirements.yml` file.
### host_query
This can act as source code for a collection that enables host/event querying.
It has a `meta/event_query.yml` file, which may provide you an example of how
to implement this in your own collection.

View File

@ -0,0 +1,6 @@
---
- hosts: all
gather_facts: false
connection: local
tasks:
- debug: msg='hello'

View File

@ -0,0 +1,19 @@
---
authors:
- AWX Project Contributors <awx-project@googlegroups.com>
dependencies: {}
description: Indirect host counting example repo. Not for use in production.
documentation: https://github.com/ansible/awx
homepage: https://github.com/ansible/awx
issues: https://github.com/ansible/awx
license:
- GPL-3.0-or-later
name: query
namespace: demo
readme: README.md
repository: https://github.com/ansible/awx
tags:
- demo
- testing
- host_counting
version: 0.0.1

View File

@ -0,0 +1,4 @@
---
{
"demo.query.example": ""
}

View File

@ -0,0 +1,19 @@
---
galaxy_info:
author: "For Test"
company: AWX
license: MIT
min_ansible_version: 1.4
platforms:
- name: EL
versions:
- 8
- 9
- name: Fedora
versions:
- 39
- 40
- 41
categories:
- stuff
dependencies: []

View File

@ -0,0 +1,4 @@
---
- name: debug variable
debug:
msg: "1234567890"

View File

@ -0,0 +1,5 @@
---
collections:
- name: 'file:///tmp/live_tests/host_query'
type: git
version: devel

View File

@ -0,0 +1,8 @@
---
- hosts: all
gather_facts: false
connection: local
tasks:
- demo.query.example:
register: result
- debug: var=result

View File

@ -0,0 +1,3 @@
---
- name: role_requirement
src: git+file:///tmp/live_tests/role_requirement

View File

@ -0,0 +1,7 @@
---
- hosts: all
connection: local
gather_facts: false
tasks:
- include_role:
name: role_requirement

View File

@ -99,11 +99,19 @@ def mk_user(name, is_superuser=False, organization=None, team=None, persisted=Tr
def mk_project(name, organization=None, description=None, persisted=True):
description = description or '{}-description'.format(name)
project = Project(name=name, description=description, playbook_files=['helloworld.yml', 'alt-helloworld.yml'])
project = Project(
name=name,
description=description,
playbook_files=['helloworld.yml', 'alt-helloworld.yml'],
scm_type='git',
scm_url='https://foo.invalid',
scm_revision='1234567890123456789012345678901234567890',
scm_update_on_launch=False,
)
if organization is not None:
project.organization = organization
if persisted:
project.save()
project.save(skip_update=True)
return project

View File

@ -18,7 +18,7 @@ class TestUnifiedOrganization:
def data_for_model(self, model, orm_style=False):
data = {'name': 'foo', 'organization': None}
if model == 'JobTemplate':
proj = models.Project.objects.create(name="test-proj", playbook_files=['helloworld.yml'])
proj = models.Project.objects.create(name="test-proj", playbook_files=['helloworld.yml'], scm_type='git', scm_url='https://foo.invalid')
if orm_style:
data['project_id'] = proj.id
else:

View File

@ -115,20 +115,6 @@ def team_member(user, team):
return ret
@pytest.fixture(scope="session", autouse=True)
def project_playbooks():
"""
Return playbook_files as playbooks for manual projects when testing.
"""
class PlaybooksMock(mock.PropertyMock):
def __get__(self, obj, obj_type):
return obj.playbook_files
mocked = mock.patch.object(Project, 'playbooks', new_callable=PlaybooksMock)
mocked.start()
@pytest.fixture
def run_computed_fields_right_away(request):
def run_me(inventory_id):

View File

@ -335,7 +335,7 @@ def test_team_project_list(get, team_project_list):
@pytest.mark.parametrize("u,expected_status_code", [('rando', 403), ('org_member', 403), ('org_admin', 201), ('admin', 201)])
@pytest.mark.django_db()
@pytest.mark.django_db
def test_create_project(post, organization, org_admin, org_member, admin, rando, u, expected_status_code):
if u == 'rando':
u = rando
@ -353,11 +353,12 @@ def test_create_project(post, organization, org_admin, org_member, admin, rando,
'organization': organization.id,
},
u,
expect=expected_status_code,
)
print(result.data)
assert result.status_code == expected_status_code
if expected_status_code == 201:
assert Project.objects.filter(name='Project', organization=organization).exists()
elif expected_status_code == 403:
assert 'do not have permission' in str(result.data['detail'])
@pytest.mark.django_db

View File

@ -1,11 +1,10 @@
import pytest
from unittest import mock
import os
import tempfile
import shutil
from awx.main.tasks.jobs import RunJob
from awx.main.tasks.system import execution_node_health_check, _cleanup_images_and_files
from awx.main.tasks.system import CleanupImagesAndFiles, execution_node_health_check
from awx.main.models import Instance, Job
@ -27,39 +26,61 @@ def test_no_worker_info_on_AWX_nodes(node_type):
@pytest.fixture
def mock_job_folder(request):
pdd_path = tempfile.mkdtemp(prefix='awx_123_')
def job_folder_factory(request):
def _rf(job_id='1234'):
pdd_path = tempfile.mkdtemp(prefix=f'awx_{job_id}_')
def test_folder_cleanup():
if os.path.exists(pdd_path):
shutil.rmtree(pdd_path)
def test_folder_cleanup():
if os.path.exists(pdd_path):
shutil.rmtree(pdd_path)
request.addfinalizer(test_folder_cleanup)
request.addfinalizer(test_folder_cleanup)
return pdd_path
return pdd_path
return _rf
@pytest.fixture
def mock_job_folder(job_folder_factory):
return job_folder_factory()
@pytest.mark.django_db
def test_folder_cleanup_stale_file(mock_job_folder, mock_me):
_cleanup_images_and_files()
CleanupImagesAndFiles.run()
assert os.path.exists(mock_job_folder) # grace period should protect folder from deletion
_cleanup_images_and_files(grace_period=0)
CleanupImagesAndFiles.run(grace_period=0)
assert not os.path.exists(mock_job_folder) # should be deleted
@pytest.mark.django_db
def test_folder_cleanup_running_job(mock_job_folder, mock_me):
me_inst = Instance.objects.create(hostname='local_node', uuid='00000000-0000-0000-0000-000000000000')
with mock.patch.object(Instance.objects, 'me', return_value=me_inst):
job = Job.objects.create(id=123, controller_node=me_inst.hostname, status='running')
_cleanup_images_and_files(grace_period=0)
assert os.path.exists(mock_job_folder) # running job should prevent folder from getting deleted
def test_folder_cleanup_running_job(mock_job_folder, me_inst):
job = Job.objects.create(id=1234, controller_node=me_inst.hostname, status='running')
CleanupImagesAndFiles.run(grace_period=0)
assert os.path.exists(mock_job_folder) # running job should prevent folder from getting deleted
job.status = 'failed'
job.save(update_fields=['status'])
_cleanup_images_and_files(grace_period=0)
assert not os.path.exists(mock_job_folder) # job is finished and no grace period, should delete
job.status = 'failed'
job.save(update_fields=['status'])
CleanupImagesAndFiles.run(grace_period=0)
assert not os.path.exists(mock_job_folder) # job is finished and no grace period, should delete
@pytest.mark.django_db
def test_folder_cleanup_multiple_running_jobs(job_folder_factory, me_inst):
jobs = []
dirs = []
num_jobs = 3
for i in range(num_jobs):
job = Job.objects.create(controller_node=me_inst.hostname, status='running')
dirs.append(job_folder_factory(job.id))
jobs.append(job)
CleanupImagesAndFiles.run(grace_period=0)
assert [os.path.exists(d) for d in dirs] == [True for i in range(num_jobs)]
@pytest.mark.django_db

View File

@ -0,0 +1,3 @@
# This file is needed to undo the pytest settings from the project root
[pytest]
addopts = -p no:django -p awx.main.tests.live.pytest_django_config

View File

@ -0,0 +1,12 @@
import django
from awx import prepare_env
def pytest_load_initial_conftests(args):
"""Replacement for same-named method in pytest_django plugin
Instead of setting up a test database, this just sets up Django normally
this will give access to the postgres database as-is, for better and worse"""
prepare_env()
django.setup()

View File

@ -13,7 +13,6 @@ from awx.api.versioning import reverse
# These tests are invoked from the awx/main/tests/live/ subfolder
# so any fixtures from higher-up conftest files must be explicitly included
from awx.main.tests.functional.conftest import * # noqa
from awx.main.tests.conftest import load_all_credentials # noqa: F401; pylint: disable=unused-import
from awx.main.tests import data
from awx.main.models import Project, JobTemplate, Organization, Inventory

View File

@ -0,0 +1,2 @@
def test_git_file_project(live_tmp_folder, run_job_from_playbook):
run_job_from_playbook('test_git_file_project', 'debug.yml', scm_url=f'file://{live_tmp_folder}/debug')

View File

@ -0,0 +1,2 @@
def test_manual_project(copy_project_folders, run_job_from_playbook):
run_job_from_playbook('test_manual_project', 'debug.yml', local_path='debug')

View File

@ -5,9 +5,9 @@ import pytest
from django.conf import settings
from awx.main.tests.live.tests.conftest import wait_for_job
from awx.main.tests.live.tests.conftest import wait_for_job, wait_for_events
from awx.main.models import Project, SystemJobTemplate
from awx.main.models import Project, SystemJobTemplate, Job
@pytest.fixture(scope='session')
@ -54,3 +54,11 @@ def test_cache_is_populated_after_cleanup_job(project_with_requirements):
# Now, we still have a populated cache
assert project_cache_is_populated(project_with_requirements)
def test_git_file_collection_requirement(live_tmp_folder, copy_project_folders, run_job_from_playbook):
# this behaves differently, as use_requirements.yml references only the folder, does not include the github name
run_job_from_playbook('test_git_file_collection_requirement', 'use_requirement.yml', scm_url=f'file://{live_tmp_folder}/with_requirements')
job = Job.objects.filter(name__icontains='test_git_file_collection_requirement').order_by('-created').first()
wait_for_events(job)
assert '1234567890' in job.job_events.filter(task='debug variable', event='runner_on_ok').first().stdout

View File

@ -0,0 +1,82 @@
import os
import json
import pytest
import tempfile
import subprocess
from unittest import mock
from awx.main.tasks.receptor import _convert_args_to_cli, run_until_complete
from awx.main.tasks.system import CleanupImagesAndFiles
from awx.main.models import Instance, JobTemplate
def get_podman_images():
cmd = ['podman', 'images', '--format', 'json']
return json.loads((subprocess.run(cmd, capture_output=True, text=True, check=True)).stdout)
def test_folder_cleanup_multiple_running_jobs_execution_node(request):
demo_jt = JobTemplate.objects.get(name='Demo Job Template')
jobs = [demo_jt.create_unified_job(_eager_fields={'status': 'running'}) for i in range(3)]
def delete_jobs():
for job in jobs:
job.delete()
request.addfinalizer(delete_jobs)
job_dirs = []
job_patterns = []
for job in jobs:
job_pattern = f'awx_{job.id}_1234'
job_dir = os.path.join(tempfile.gettempdir(), job_pattern)
job_patterns.append(job_pattern)
job_dirs.append(job_dir)
os.mkdir(job_dir)
inst = Instance.objects.me()
runner_cleanup_kwargs = inst.get_cleanup_task_kwargs(exclude_strings=job_patterns, grace_period=0)
# We can not call worker_cleanup directly because execution and control nodes are not fungible
args = _convert_args_to_cli(runner_cleanup_kwargs)
remote_command = ' '.join(args)
subprocess.call('ansible-runner worker ' + remote_command, shell=True)
print('ansible-runner worker ' + remote_command)
assert [os.path.exists(job_dir) for job_dir in job_dirs] == [True for i in range(3)]
@pytest.mark.parametrize(
'worktype',
('remote', 'local'),
)
def test_tagless_image(podman_image_generator, worktype: str):
"""
Ensure podman images on Control and Hybrid nodes are deleted during cleanup.
"""
podman_image_generator()
dangling_image = next((image for image in get_podman_images() if image.get('Dangling', False)), None)
assert dangling_image
instance_me = Instance.objects.me()
match worktype:
case 'local':
CleanupImagesAndFiles.run_local(instance_me, image_prune=True)
case 'remote':
with (
mock.patch(
'awx.main.tasks.receptor.run_until_complete', lambda *args, **kwargs: run_until_complete(*args, worktype='local', ttl=None, **kwargs)
),
mock.patch('awx.main.tasks.system.CleanupImagesAndFiles.get_execution_instances', lambda: [Instance.objects.me()]),
):
CleanupImagesAndFiles.run_remote(instance_me, image_prune=True)
case _:
raise ValueError(f'worktype "{worktype}" not supported.')
for image in get_podman_images():
assert image['Id'] != dangling_image['Id']

View File

@ -0,0 +1,15 @@
from awx.api.versioning import reverse
from awx.main.models import JobTemplate, Job
from awx.main.tests.live.tests.conftest import wait_for_job
def test_launch_demo_jt(post, admin):
jt = JobTemplate.objects.get(name='Demo Job Template')
url = reverse('api:job_template_launch', kwargs={'pk': jt.id})
r = post(url=url, data={}, user=admin, expect=201)
job = Job.objects.get(pk=r.data['id'])
wait_for_job(job)

View File

@ -0,0 +1,10 @@
import os
RSYSLOG_CONFIG = '/var/lib/awx/rsyslog/rsyslog.conf'
def test_rsyslog_config_readable():
with open(RSYSLOG_CONFIG, 'r') as f:
content = f.read()
assert '/var/lib/awx/rsyslog' in content
assert oct(os.stat(RSYSLOG_CONFIG).st_mode) == '0o100640'

View File

@ -7,6 +7,9 @@ from awx.settings.development import * # NOQA
# Some things make decisions based on settings.SETTINGS_MODULE, so this is done for that
SETTINGS_MODULE = 'awx.settings.development'
# Turn off task submission, because sqlite3 does not have pg_notify
DISPATCHER_MOCK_PUBLISH = True
# Use SQLite for unit tests instead of PostgreSQL. If the lines below are
# commented out, Django will create the test_awx-dev database in PostgreSQL to
# run unit tests.

View File

@ -11,6 +11,7 @@ LOCAL_SETTINGS = (
'CACHES',
'DEBUG',
'NAMED_URL_GRAPH',
'DISPATCHER_MOCK_PUBLISH',
)

View File

@ -483,6 +483,11 @@ EXECUTION_NODE_REMEDIATION_CHECKS = 60 * 30 # once every 30 minutes check if an
# Amount of time dispatcher will try to reconnect to database for jobs and consuming new work
DISPATCHER_DB_DOWNTIME_TOLERANCE = 40
# If you set this, nothing will ever be sent to pg_notify
# this is not practical to use, although periodic schedules may still run slugish but functional tasks
# sqlite3 based tests will use this
DISPATCHER_MOCK_PUBLISH = False
BROKER_URL = 'unix:///var/run/redis/redis.sock'
CELERYBEAT_SCHEDULE = {
'tower_scheduler': {'task': 'awx.main.tasks.system.awx_periodic_scheduler', 'schedule': timedelta(seconds=30), 'options': {'expires': 20}},

View File

@ -1,60 +0,0 @@
---
#
# This is used by a CI check in GitHub Actions and isnt really
# meant to be run locally.
#
# The development environment does some unfortunate things to
# make rootless podman work inside of a docker container.
# The goal here is to essentially tests that the awx user is
# able to run `podman run`.
#
- name: Test that the development environment is able to launch a job
hosts: localhost
tasks:
- name: Reset admin password
shell: |
docker exec -i tools_awx_1 bash <<EOSH
awx-manage update_password --username=admin --password=password
awx-manage create_preload_data
EOSH
- block:
- name: Launch Demo Job Template
awx.awx.job_launch:
name: Demo Job Template
wait: yes
validate_certs: no
controller_host: "http://localhost:8013"
controller_username: "admin"
controller_password: "password"
rescue:
- name: Get list of project updates and jobs
uri:
url: "http://localhost:8013/api/v2/{{ resource }}/"
user: admin
password: "password"
force_basic_auth: yes
register: job_lists
loop:
- project_updates
- jobs
loop_control:
loop_var: resource
- name: Get all job and project details
uri:
url: "http://localhost:8013{{ endpoint }}"
user: admin
password: "password"
force_basic_auth: yes
loop: |
{{ job_lists.results | map(attribute='json') | map(attribute='results') | flatten | map(attribute='url') }}
loop_control:
loop_var: endpoint
- name: Re-emit failure
vars:
failed_task:
result: '{{ ansible_failed_result }}'
fail:
msg: '{{ failed_task }}'