mirror of
https://github.com/ansible/awx.git
synced 2026-01-11 10:00:01 -03:30
Merge branch 'master' into licenses-unstable
This commit is contained in:
commit
682a804a55
20
Makefile
20
Makefile
@ -244,14 +244,22 @@ socketservice:
|
||||
factcacher:
|
||||
$(PYTHON) manage.py run_fact_cache_receiver
|
||||
|
||||
pep8:
|
||||
pep8 -r awx/
|
||||
reports:
|
||||
mkdir -p $@
|
||||
|
||||
pyflakes:
|
||||
pyflakes awx/
|
||||
pep8: reports
|
||||
@(set -o pipefail && $@ | tee reports/$@.report)
|
||||
|
||||
check:
|
||||
flake8
|
||||
flake8: reports
|
||||
@$@ --output-file=reports/$@.report
|
||||
|
||||
pyflakes: reports
|
||||
@(set -o pipefail && $@ | tee reports/$@.report)
|
||||
|
||||
pylint: reports
|
||||
@(set -o pipefail && $@ | reports/$@.report)
|
||||
|
||||
check: flake8 pep8 # pyflakes pylint
|
||||
|
||||
# Run all API unit tests.
|
||||
test:
|
||||
|
||||
@ -108,7 +108,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
raise PermissionDenied('your account is inactive')
|
||||
|
||||
# Always allow superusers (as long as they are active).
|
||||
if request.user.is_superuser:
|
||||
if getattr(view, 'always_allow_superuser', True) and request.user.is_superuser:
|
||||
return True
|
||||
|
||||
# Check if view supports the request method before checking permission
|
||||
|
||||
@ -8,11 +8,15 @@ job template.
|
||||
|
||||
For example, using curl:
|
||||
|
||||
curl --data-urlencode host_config_key=HOST_CONFIG_KEY http://server/api/v1/job_templates/N/callback/
|
||||
curl -H "Content-Type: application/json" -d '{"host_config_key": "HOST_CONFIG_KEY"}' http://server/api/v1/job_templates/N/callback/
|
||||
|
||||
Or using wget:
|
||||
|
||||
wget -O /dev/null --post-data="host_config_key=HOST_CONFIG_KEY" http://server/api/v1/job_templates/N/callback/
|
||||
wget -O /dev/null --post-data='{"host_config_key": "HOST_CONFIG_KEY"}' --header=Content-Type:application/json http://server/api/v1/job_templates/N/callback/
|
||||
|
||||
You may also pass `extra_vars` to the callback:
|
||||
|
||||
curl -H "Content-Type: application/json" -d '{"host_config_key": "HOST_CONFIG_KEY", "extra_vars": {"key": "value"}}' http://server/api/v1/job_templates/N/callback/
|
||||
|
||||
The response will return status 202 if the request is valid, 403 for an
|
||||
invalid host config key, or 400 if the host cannot be determined from the
|
||||
|
||||
@ -1914,6 +1914,9 @@ class JobTemplateCallback(GenericAPIView):
|
||||
return Response(data)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
extra_vars = None
|
||||
if request.content_type == "application/json":
|
||||
extra_vars = request.DATA.get("extra_vars", None)
|
||||
# Permission class should have already validated host_config_key.
|
||||
job_template = self.get_object()
|
||||
# Attempt to find matching hosts based on remote address.
|
||||
@ -1968,8 +1971,10 @@ class JobTemplateCallback(GenericAPIView):
|
||||
job = job_template.create_job(limit=limit, launch_type='callback')
|
||||
|
||||
# Send a signal to celery that the job should be started.
|
||||
isau = inventory_sources_already_updated
|
||||
result = job.signal_start(inventory_sources_already_updated=isau)
|
||||
kv = {"inventory_sources_already_updated": inventory_sources_already_updated}
|
||||
if extra_vars is not None:
|
||||
kv['extra_vars'] = extra_vars
|
||||
result = job.signal_start(**kv)
|
||||
if not result:
|
||||
data = dict(msg='Error starting job!')
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
@ -2460,6 +2465,7 @@ class AdHocCommandList(ListCreateAPIView):
|
||||
model = AdHocCommand
|
||||
serializer_class = AdHocCommandListSerializer
|
||||
new_in_220 = True
|
||||
always_allow_superuser = False
|
||||
|
||||
@csrf_exempt
|
||||
@transaction.non_atomic_requests
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
||||
@ -147,7 +148,7 @@ class BaseAccess(object):
|
||||
def check_license(self, add_host=False):
|
||||
reader = TaskSerializer()
|
||||
validation_info = reader.from_file()
|
||||
if 'test' in sys.argv or 'jenkins' in sys.argv:
|
||||
if ('test' in sys.argv or 'jenkins' in sys.argv) and not os.environ.get('SKIP_LICENSE_FIXUP_FOR_TEST', ''):
|
||||
validation_info['free_instances'] = 99999999
|
||||
validation_info['time_remaining'] = 99999999
|
||||
validation_info['grace_period_remaining'] = 99999999
|
||||
|
||||
@ -882,9 +882,8 @@ class Command(NoArgsCommand):
|
||||
continue
|
||||
mem_group = self.all_group.all_groups[group_name]
|
||||
group = self.inventory.groups.create(name=group_name, variables=json.dumps(mem_group.variables), description='imported')
|
||||
# Access auto one-to-one attribute to create related object.
|
||||
#group.inventory_source
|
||||
InventorySource.objects.create(group=group, inventory=self.inventory, name=('%s (%s)' % (group_name, self.inventory.name)))
|
||||
# Create related inventory source (name will be set by save() method on InventorySource).
|
||||
InventorySource.objects.create(group=group, inventory=self.inventory)
|
||||
self.logger.info('Group "%s" added', group.name)
|
||||
if inv_src_group and group_name in root_group_names:
|
||||
self._batch_add_m2m(inv_src_group.children, group)
|
||||
|
||||
@ -192,6 +192,8 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique):
|
||||
host = self.host or ''
|
||||
if not host and self.kind == 'vmware':
|
||||
raise ValidationError('Host required for VMware credential.')
|
||||
if not host and self.kind == 'openstack':
|
||||
raise ValidationError('Host required for OpenStack credential.')
|
||||
return host
|
||||
|
||||
def clean_username(self):
|
||||
@ -203,6 +205,8 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique):
|
||||
'credential.')
|
||||
if not username and self.kind == 'vmware':
|
||||
raise ValidationError('Username required for VMware credential.')
|
||||
if not username and self.kind == 'openstack':
|
||||
raise ValidationError('Username required for OpenStack credential.')
|
||||
return username
|
||||
|
||||
def clean_password(self):
|
||||
@ -213,6 +217,8 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique):
|
||||
raise ValidationError('API key required for Rackspace credential.')
|
||||
if not password and self.kind == 'vmware':
|
||||
raise ValidationError('Password required for VMware credential.')
|
||||
if not password and self.kind == 'openstack':
|
||||
raise ValidationError('Password or API key required for OpenStack credential.')
|
||||
return password
|
||||
|
||||
def clean_project(self):
|
||||
|
||||
@ -1110,12 +1110,14 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions):
|
||||
self.inventory = self.group.inventory
|
||||
if 'inventory' not in update_fields:
|
||||
update_fields.append('inventory')
|
||||
# Set name automatically.
|
||||
# Set name automatically. Include PK (or placeholder) to make sure the names are always unique.
|
||||
replace_text = '__replace_%s__' % now()
|
||||
old_name_re = re.compile(r'^inventory_source \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.*?$')
|
||||
if not self.name or old_name_re.match(self.name):
|
||||
if self.inventory and self.group:
|
||||
self.name = '%s (%s)' % (self.group.name, self.inventory.name)
|
||||
if self.inventory and self.group and self.pk:
|
||||
self.name = '%s (%s - %s)' % (self.group.name, self.inventory.name, self.pk)
|
||||
elif self.inventory and self.group:
|
||||
self.name = '%s (%s - %s)' % (self.group.name, self.inventory.name, replace_text)
|
||||
elif self.inventory and self.pk:
|
||||
self.name = '%s (%s)' % (self.inventory.name, self.pk)
|
||||
elif self.inventory:
|
||||
@ -1247,6 +1249,19 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions):
|
||||
def task_impact(self):
|
||||
return 50
|
||||
|
||||
# InventoryUpdate credential required
|
||||
# Custom InventoryUpdate credential not required
|
||||
@property
|
||||
def can_start(self):
|
||||
if not super(InventoryUpdate, self).can_start:
|
||||
return False
|
||||
|
||||
if (self.source != 'custom'
|
||||
and not (self.credential and self.credential.active)):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class CustomInventoryScript(CommonModelNameNotUnique):
|
||||
|
||||
class Meta:
|
||||
|
||||
@ -491,6 +491,17 @@ class Job(UnifiedJob, JobOptions):
|
||||
presets[kw] = getattr(self, kw)
|
||||
return self.job_template.create_unified_job(**presets)
|
||||
|
||||
# Job Credential required
|
||||
@property
|
||||
def can_start(self):
|
||||
if not super(Job, self).can_start:
|
||||
return False
|
||||
|
||||
if not (self.credential and self.credential.active):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
class JobHostSummary(CreatedModifiedModel):
|
||||
'''
|
||||
Per-host statistics for each job.
|
||||
|
||||
@ -610,11 +610,15 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
"""Return a file-like object containing the standard out of the
|
||||
job's result.
|
||||
"""
|
||||
msg = {
|
||||
'pending': 'Waiting for results...',
|
||||
'missing': 'stdout capture is missing',
|
||||
}
|
||||
if self.result_stdout_text:
|
||||
return StringIO(self.result_stdout_text)
|
||||
else:
|
||||
if not os.path.exists(self.result_stdout_file):
|
||||
return StringIO("stdout capture is missing")
|
||||
return StringIO(msg['missing' if self.finished else 'pending'])
|
||||
|
||||
# There is a potential timing issue here, because another
|
||||
# process may be deleting the stdout file after it is written
|
||||
@ -631,7 +635,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
self.result_stdout_text = type(self).objects.get(id=self.id).result_stdout_text
|
||||
return self.result_stdout_raw_handle(attempt=attempt + 1)
|
||||
else:
|
||||
return StringIO("stdout capture is missing")
|
||||
return StringIO(msg['missing' if self.finished else 'pending'])
|
||||
|
||||
def _escape_ascii(self, content):
|
||||
ansi_escape = re.compile(r'\x1b[^m]*m')
|
||||
|
||||
@ -17,3 +17,4 @@ from awx.main.tests.redact import * # noqa
|
||||
from awx.main.tests.views import * # noqa
|
||||
from awx.main.tests.commands import * # noqa
|
||||
from awx.main.tests.fact import * # noqa
|
||||
from awx.main.tests.unified_jobs import * # noqa
|
||||
|
||||
@ -6,6 +6,7 @@ import glob
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
import mock
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@ -15,36 +16,37 @@ from django.core.urlresolvers import reverse
|
||||
from crum import impersonate
|
||||
|
||||
# AWX
|
||||
from awx.main.utils import * # noqa
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.tests.base import BaseJobExecutionTest
|
||||
from awx.main.tests.tasks import TEST_SSH_KEY_DATA, TEST_SSH_KEY_DATA_LOCKED, TEST_SSH_KEY_DATA_UNLOCK
|
||||
|
||||
__all__ = ['RunAdHocCommandTest', 'AdHocCommandApiTest']
|
||||
|
||||
|
||||
class BaseAdHocCommandTest(BaseJobExecutionTest):
|
||||
'''
|
||||
Common initialization for testing ad hoc commands.
|
||||
'''
|
||||
|
||||
def setUp(self):
|
||||
super(BaseAdHocCommandTest, self).setUp()
|
||||
self.setup_instances()
|
||||
self.setup_users()
|
||||
self.organization = self.make_organizations(self.super_django_user, 1)[0]
|
||||
self.organization.admins.add(self.normal_django_user)
|
||||
self.inventory = self.organization.inventories.create(name='test-inventory', description='description for test-inventory')
|
||||
self.host = self.inventory.hosts.create(name='host.example.com')
|
||||
self.host2 = self.inventory.hosts.create(name='host2.example.com')
|
||||
self.group = self.inventory.groups.create(name='test-group')
|
||||
self.group2 = self.inventory.groups.create(name='test-group2')
|
||||
self.group.hosts.add(self.host)
|
||||
self.group2.hosts.add(self.host, self.host2)
|
||||
self.inventory2 = self.organization.inventories.create(name='test-inventory2')
|
||||
self.host3 = self.inventory2.hosts.create(name='host3.example.com')
|
||||
self.credential = None
|
||||
settings.INTERNAL_API_URL = self.live_server_url
|
||||
settings.CALLBACK_CONSUMER_PORT = ''
|
||||
with ignore_inventory_computed_fields():
|
||||
super(BaseAdHocCommandTest, self).setUp()
|
||||
self.setup_instances()
|
||||
self.setup_users()
|
||||
self.organization = self.make_organizations(self.super_django_user, 1)[0]
|
||||
self.organization.admins.add(self.normal_django_user)
|
||||
self.inventory = self.organization.inventories.create(name='test-inventory', description='description for test-inventory')
|
||||
self.host = self.inventory.hosts.create(name='host.example.com')
|
||||
self.host2 = self.inventory.hosts.create(name='host2.example.com')
|
||||
self.group = self.inventory.groups.create(name='test-group')
|
||||
self.group2 = self.inventory.groups.create(name='test-group2')
|
||||
self.group.hosts.add(self.host)
|
||||
self.group2.hosts.add(self.host, self.host2)
|
||||
self.inventory2 = self.organization.inventories.create(name='test-inventory2')
|
||||
self.host3 = self.inventory2.hosts.create(name='host3.example.com')
|
||||
self.credential = None
|
||||
settings.INTERNAL_API_URL = self.live_server_url
|
||||
settings.CALLBACK_CONSUMER_PORT = ''
|
||||
|
||||
def create_test_credential(self, **kwargs):
|
||||
self.credential = self.make_credential(**kwargs)
|
||||
@ -124,7 +126,8 @@ class RunAdHocCommandTest(BaseAdHocCommandTest):
|
||||
self.check_job_result(ad_hoc_command, 'failed')
|
||||
self.check_ad_hoc_command_events(ad_hoc_command, 'unreachable')
|
||||
|
||||
def test_cancel_ad_hoc_command(self):
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', return_value=('canceled', 0))
|
||||
def test_cancel_ad_hoc_command(self, ignore):
|
||||
ad_hoc_command = self.create_test_ad_hoc_command()
|
||||
self.assertEqual(ad_hoc_command.status, 'new')
|
||||
self.assertFalse(ad_hoc_command.cancel_flag)
|
||||
@ -145,7 +148,8 @@ class RunAdHocCommandTest(BaseAdHocCommandTest):
|
||||
# Unable to start ad hoc command again.
|
||||
self.assertFalse(ad_hoc_command.signal_start())
|
||||
|
||||
def test_ad_hoc_command_options(self):
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', return_value=('successful', 0))
|
||||
def test_ad_hoc_command_options(self, ignore):
|
||||
ad_hoc_command = self.create_test_ad_hoc_command(forks=2, verbosity=2)
|
||||
self.assertEqual(ad_hoc_command.status, 'new')
|
||||
self.assertFalse(ad_hoc_command.passwords_needed_to_start)
|
||||
@ -191,7 +195,8 @@ class RunAdHocCommandTest(BaseAdHocCommandTest):
|
||||
self.check_ad_hoc_command_events(ad_hoc_command3, 'ok', hosts=[])
|
||||
self.assertEqual(ad_hoc_command3.ad_hoc_command_events.count(), 0)
|
||||
|
||||
def test_ssh_username_and_password(self):
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', return_value=('successful', 0))
|
||||
def test_ssh_username_and_password(self, ignore):
|
||||
self.create_test_credential(username='sshuser', password='sshpass')
|
||||
ad_hoc_command = self.create_test_ad_hoc_command()
|
||||
self.assertEqual(ad_hoc_command.status, 'new')
|
||||
@ -199,10 +204,11 @@ class RunAdHocCommandTest(BaseAdHocCommandTest):
|
||||
self.assertTrue(ad_hoc_command.signal_start())
|
||||
ad_hoc_command = AdHocCommand.objects.get(pk=ad_hoc_command.pk)
|
||||
self.check_job_result(ad_hoc_command, 'successful')
|
||||
self.assertTrue('"-u"' in ad_hoc_command.job_args)
|
||||
self.assertTrue('"--ask-pass"' in ad_hoc_command.job_args)
|
||||
self.assertIn('"-u"', ad_hoc_command.job_args)
|
||||
self.assertIn('"--ask-pass"', ad_hoc_command.job_args)
|
||||
|
||||
def test_ssh_ask_password(self):
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', return_value=('successful', 0))
|
||||
def test_ssh_ask_password(self, ignore):
|
||||
self.create_test_credential(password='ASK')
|
||||
ad_hoc_command = self.create_test_ad_hoc_command()
|
||||
self.assertEqual(ad_hoc_command.status, 'new')
|
||||
@ -212,9 +218,10 @@ class RunAdHocCommandTest(BaseAdHocCommandTest):
|
||||
self.assertTrue(ad_hoc_command.signal_start(ssh_password='sshpass'))
|
||||
ad_hoc_command = AdHocCommand.objects.get(pk=ad_hoc_command.pk)
|
||||
self.check_job_result(ad_hoc_command, 'successful')
|
||||
self.assertTrue('"--ask-pass"' in ad_hoc_command.job_args)
|
||||
self.assertIn('"--ask-pass"', ad_hoc_command.job_args)
|
||||
|
||||
def test_sudo_username_and_password(self):
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', return_value=('successful', 0))
|
||||
def test_sudo_username_and_password(self, ignore):
|
||||
self.create_test_credential(become_method="sudo",
|
||||
become_username='sudouser',
|
||||
become_password='sudopass')
|
||||
@ -223,15 +230,14 @@ class RunAdHocCommandTest(BaseAdHocCommandTest):
|
||||
self.assertFalse(ad_hoc_command.passwords_needed_to_start)
|
||||
self.assertTrue(ad_hoc_command.signal_start())
|
||||
ad_hoc_command = AdHocCommand.objects.get(pk=ad_hoc_command.pk)
|
||||
# Job may fail if current user doesn't have password-less sudo
|
||||
# privileges, but we're mainly checking the command line arguments.
|
||||
self.check_job_result(ad_hoc_command, ('successful', 'failed'))
|
||||
self.assertTrue('"--become-method"' in ad_hoc_command.job_args)
|
||||
self.assertTrue('"--become-user"' in ad_hoc_command.job_args)
|
||||
self.assertTrue('"--ask-become-pass"' in ad_hoc_command.job_args)
|
||||
self.assertFalse('"--become"' in ad_hoc_command.job_args)
|
||||
self.assertIn('"--become-method"', ad_hoc_command.job_args)
|
||||
self.assertIn('"--become-user"', ad_hoc_command.job_args)
|
||||
self.assertIn('"--ask-become-pass"', ad_hoc_command.job_args)
|
||||
self.assertNotIn('"--become"', ad_hoc_command.job_args)
|
||||
|
||||
def test_sudo_ask_password(self):
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', return_value=('successful', 0))
|
||||
def test_sudo_ask_password(self, ignore):
|
||||
self.create_test_credential(become_password='ASK')
|
||||
ad_hoc_command = self.create_test_ad_hoc_command()
|
||||
self.assertEqual(ad_hoc_command.status, 'new')
|
||||
@ -240,13 +246,13 @@ class RunAdHocCommandTest(BaseAdHocCommandTest):
|
||||
self.assertFalse(ad_hoc_command.signal_start())
|
||||
self.assertTrue(ad_hoc_command.signal_start(become_password='sudopass'))
|
||||
ad_hoc_command = AdHocCommand.objects.get(pk=ad_hoc_command.pk)
|
||||
# Job may fail, but we're mainly checking the command line arguments.
|
||||
self.check_job_result(ad_hoc_command, ('successful', 'failed'))
|
||||
self.assertTrue('"--ask-become-pass"' in ad_hoc_command.job_args)
|
||||
self.assertFalse('"--become-user"' in ad_hoc_command.job_args)
|
||||
self.assertFalse('"--become"' in ad_hoc_command.job_args)
|
||||
self.assertIn('"--ask-become-pass"', ad_hoc_command.job_args)
|
||||
self.assertNotIn('"--become-user"', ad_hoc_command.job_args)
|
||||
self.assertNotIn('"--become"', ad_hoc_command.job_args)
|
||||
|
||||
def test_unlocked_ssh_key(self):
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', return_value=('successful', 0))
|
||||
def test_unlocked_ssh_key(self, ignore):
|
||||
self.create_test_credential(ssh_key_data=TEST_SSH_KEY_DATA)
|
||||
ad_hoc_command = self.create_test_ad_hoc_command()
|
||||
self.assertEqual(ad_hoc_command.status, 'new')
|
||||
@ -254,8 +260,8 @@ class RunAdHocCommandTest(BaseAdHocCommandTest):
|
||||
self.assertTrue(ad_hoc_command.signal_start())
|
||||
ad_hoc_command = AdHocCommand.objects.get(pk=ad_hoc_command.pk)
|
||||
self.check_job_result(ad_hoc_command, 'successful')
|
||||
self.assertFalse('"--private-key=' in ad_hoc_command.job_args)
|
||||
self.assertTrue('ssh-agent' in ad_hoc_command.job_args)
|
||||
self.assertNotIn('"--private-key=', ad_hoc_command.job_args)
|
||||
self.assertIn('ssh-agent', ad_hoc_command.job_args)
|
||||
|
||||
def test_locked_ssh_key_with_password(self):
|
||||
self.create_test_credential(ssh_key_data=TEST_SSH_KEY_DATA_LOCKED,
|
||||
@ -266,8 +272,8 @@ class RunAdHocCommandTest(BaseAdHocCommandTest):
|
||||
self.assertTrue(ad_hoc_command.signal_start())
|
||||
ad_hoc_command = AdHocCommand.objects.get(pk=ad_hoc_command.pk)
|
||||
self.check_job_result(ad_hoc_command, 'successful')
|
||||
self.assertTrue('ssh-agent' in ad_hoc_command.job_args)
|
||||
self.assertTrue('Bad passphrase' not in ad_hoc_command.result_stdout)
|
||||
self.assertIn('ssh-agent', ad_hoc_command.job_args)
|
||||
self.assertNotIn('Bad passphrase', ad_hoc_command.result_stdout)
|
||||
|
||||
def test_locked_ssh_key_with_bad_password(self):
|
||||
self.create_test_credential(ssh_key_data=TEST_SSH_KEY_DATA_LOCKED,
|
||||
@ -278,8 +284,8 @@ class RunAdHocCommandTest(BaseAdHocCommandTest):
|
||||
self.assertTrue(ad_hoc_command.signal_start())
|
||||
ad_hoc_command = AdHocCommand.objects.get(pk=ad_hoc_command.pk)
|
||||
self.check_job_result(ad_hoc_command, 'failed')
|
||||
self.assertTrue('ssh-agent' in ad_hoc_command.job_args)
|
||||
self.assertTrue('Bad passphrase' in ad_hoc_command.result_stdout)
|
||||
self.assertIn('ssh-agent', ad_hoc_command.job_args)
|
||||
self.assertIn('Bad passphrase', ad_hoc_command.result_stdout)
|
||||
|
||||
def test_locked_ssh_key_ask_password(self):
|
||||
self.create_test_credential(ssh_key_data=TEST_SSH_KEY_DATA_LOCKED,
|
||||
@ -303,8 +309,8 @@ class RunAdHocCommandTest(BaseAdHocCommandTest):
|
||||
self.assertTrue(ad_hoc_command.signal_start(ssh_key_unlock=TEST_SSH_KEY_DATA_UNLOCK))
|
||||
ad_hoc_command = AdHocCommand.objects.get(pk=ad_hoc_command.pk)
|
||||
self.check_job_result(ad_hoc_command, 'successful')
|
||||
self.assertTrue('ssh-agent' in ad_hoc_command.job_args)
|
||||
self.assertTrue('Bad passphrase' not in ad_hoc_command.result_stdout)
|
||||
self.assertIn('ssh-agent', ad_hoc_command.job_args)
|
||||
self.assertNotIn('Bad passphrase', ad_hoc_command.result_stdout)
|
||||
|
||||
def test_run_with_proot(self):
|
||||
# Only run test if proot is installed
|
||||
@ -348,7 +354,8 @@ class RunAdHocCommandTest(BaseAdHocCommandTest):
|
||||
self.check_job_result(ad_hoc_command, 'successful')
|
||||
self.check_ad_hoc_command_events(ad_hoc_command, 'ok')
|
||||
|
||||
def test_run_with_proot_not_installed(self):
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', return_value=('failed', 0))
|
||||
def test_run_with_proot_not_installed(self, ignore):
|
||||
# Enable proot for this test, specify invalid proot cmd.
|
||||
settings.AWX_PROOT_ENABLED = True
|
||||
settings.AWX_PROOT_CMD = 'PR00T'
|
||||
@ -360,6 +367,9 @@ class RunAdHocCommandTest(BaseAdHocCommandTest):
|
||||
self.check_job_result(ad_hoc_command, 'error', expect_traceback=True)
|
||||
|
||||
|
||||
def run_pexpect_mock(self, *args, **kwargs):
|
||||
return 'successful', 0
|
||||
|
||||
class AdHocCommandApiTest(BaseAdHocCommandTest):
|
||||
'''
|
||||
Test API list/detail views for ad hoc commands.
|
||||
@ -385,7 +395,8 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
|
||||
del data[k]
|
||||
return self.post(url, data, expect=expect)
|
||||
|
||||
def test_ad_hoc_command_list(self):
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', side_effect=run_pexpect_mock)
|
||||
def test_ad_hoc_command_list(self, ignore):
|
||||
url = reverse('api:ad_hoc_command_list')
|
||||
|
||||
# Retrieve the empty list of ad hoc commands.
|
||||
@ -557,8 +568,16 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command(become_enabled=True)
|
||||
self.assertEqual(response['become_enabled'], True)
|
||||
|
||||
# Try to run with expired license.
|
||||
self.create_expired_license_file()
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(expect=403)
|
||||
with self.current_user('normal'):
|
||||
self.run_test_ad_hoc_command(expect=403)
|
||||
|
||||
def test_ad_hoc_command_detail(self):
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', side_effect=run_pexpect_mock)
|
||||
def test_ad_hoc_command_detail(self, ignore):
|
||||
with self.current_user('admin'):
|
||||
response1 = self.run_test_ad_hoc_command()
|
||||
response2 = self.run_test_ad_hoc_command()
|
||||
@ -622,7 +641,8 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
|
||||
self.delete(url, expect=204)
|
||||
self.delete(url, expect=404)
|
||||
|
||||
def test_ad_hoc_command_cancel(self):
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', side_effect=run_pexpect_mock)
|
||||
def test_ad_hoc_command_cancel(self, ignore):
|
||||
# Override setting so that ad hoc command isn't actually started.
|
||||
with self.settings(CELERY_UNIT_TEST=False):
|
||||
with self.current_user('admin'):
|
||||
@ -674,7 +694,8 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
|
||||
self.assertEqual(response['can_cancel'], False)
|
||||
self.post(url, {}, expect=405)
|
||||
|
||||
def test_ad_hoc_command_relaunch(self):
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', side_effect=run_pexpect_mock)
|
||||
def test_ad_hoc_command_relaunch(self, ignore):
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command()
|
||||
|
||||
@ -734,7 +755,16 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
|
||||
self.assertEqual(response['passwords_needed_to_start'], [])
|
||||
response = self.post(url, {}, expect=400)
|
||||
|
||||
# Try to relaunch with expired license.
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command(inventory=self.inventory2.pk)
|
||||
self.create_expired_license_file()
|
||||
with self.current_user('admin'):
|
||||
self.post(response['related']['relaunch'], {}, expect=403)
|
||||
|
||||
def test_ad_hoc_command_events_list(self):
|
||||
# TODO: Create test events instead of relying on playbooks execution
|
||||
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command()
|
||||
response = self.run_test_ad_hoc_command()
|
||||
@ -823,6 +853,8 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
|
||||
self.delete(url, expect=401)
|
||||
|
||||
def test_ad_hoc_command_event_detail(self):
|
||||
# TODO: Mock pexpect. Create test events instead of relying on playbooks execution
|
||||
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command()
|
||||
|
||||
@ -877,7 +909,8 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
|
||||
self.patch(url, {}, expect=401)
|
||||
self.delete(url, expect=401)
|
||||
|
||||
def test_ad_hoc_command_activity_stream(self):
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', side_effect=run_pexpect_mock)
|
||||
def test_ad_hoc_command_activity_stream(self, ignore):
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command()
|
||||
|
||||
@ -927,7 +960,8 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
|
||||
self.patch(url, {}, expect=401)
|
||||
self.delete(url, expect=401)
|
||||
|
||||
def test_inventory_ad_hoc_commands_list(self):
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', side_effect=run_pexpect_mock)
|
||||
def test_inventory_ad_hoc_commands_list(self, ignore):
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command()
|
||||
response = self.run_test_ad_hoc_command(inventory=self.inventory2.pk)
|
||||
@ -1029,7 +1063,16 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
|
||||
response = self.get(inventory_url, expect=200)
|
||||
self.assertTrue(response['can_run_ad_hoc_commands'])
|
||||
|
||||
# Try to run with expired license.
|
||||
self.create_expired_license_file()
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(url=url, expect=403)
|
||||
with self.current_user('normal'):
|
||||
self.run_test_ad_hoc_command(url=url, expect=403)
|
||||
|
||||
def test_host_ad_hoc_commands_list(self):
|
||||
# TODO: Figure out why this test needs pexpect
|
||||
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command()
|
||||
response = self.run_test_ad_hoc_command(limit=self.host2.name)
|
||||
@ -1078,7 +1121,16 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
|
||||
self.patch(url, {}, expect=401)
|
||||
self.delete(url, expect=401)
|
||||
|
||||
# Try to run with expired license.
|
||||
self.create_expired_license_file()
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(url=url, expect=403)
|
||||
with self.current_user('normal'):
|
||||
self.run_test_ad_hoc_command(url=url, expect=403)
|
||||
|
||||
def test_group_ad_hoc_commands_list(self):
|
||||
# TODO: Figure out why this test needs pexpect
|
||||
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command() # self.host + self.host2
|
||||
response = self.run_test_ad_hoc_command(limit=self.group.name) # self.host
|
||||
@ -1132,7 +1184,16 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
|
||||
self.patch(url, {}, expect=401)
|
||||
self.delete(url, expect=401)
|
||||
|
||||
# Try to run with expired license.
|
||||
self.create_expired_license_file()
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(url=url, expect=403)
|
||||
with self.current_user('normal'):
|
||||
self.run_test_ad_hoc_command(url=url, expect=403)
|
||||
|
||||
def test_host_ad_hoc_command_events_list(self):
|
||||
# TODO: Mock run_pexpect. Create test events instead of relying on playbooks execution
|
||||
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command()
|
||||
|
||||
|
||||
@ -14,6 +14,7 @@ import time
|
||||
from multiprocessing import Process
|
||||
from subprocess import Popen
|
||||
import re
|
||||
import mock
|
||||
|
||||
# PyYAML
|
||||
import yaml
|
||||
@ -76,8 +77,14 @@ class QueueStartStopTestMixin(QueueTestMixin):
|
||||
super(QueueStartStopTestMixin, self).tearDown()
|
||||
self.terminate_queue()
|
||||
|
||||
class MockCommonlySlowTestMixin(object):
|
||||
def __init__(self, *args, **kwargs):
|
||||
from awx.api import generics
|
||||
mock.patch.object(generics, 'get_view_description', return_value=None).start()
|
||||
super(MockCommonlySlowTestMixin, self).__init__(*args, **kwargs)
|
||||
|
||||
ansible_version = get_ansible_version()
|
||||
class BaseTestMixin(QueueTestMixin):
|
||||
class BaseTestMixin(QueueTestMixin, MockCommonlySlowTestMixin):
|
||||
'''
|
||||
Mixin with shared code for use by all test cases.
|
||||
'''
|
||||
@ -179,6 +186,13 @@ class BaseTestMixin(QueueTestMixin):
|
||||
self._temp_paths.append(license_path)
|
||||
os.environ['AWX_LICENSE_FILE'] = license_path
|
||||
|
||||
def create_expired_license_file(self, instance_count=1000, grace_period=False):
|
||||
license_date = time.time() - 1
|
||||
if not grace_period:
|
||||
license_date -= 2592000
|
||||
self.create_test_license_file(instance_count, license_date)
|
||||
os.environ['SKIP_LICENSE_FIXUP_FOR_TEST'] = '1'
|
||||
|
||||
def assertElapsedLessThan(self, seconds):
|
||||
elapsed = time.time() - self._start_time
|
||||
self.assertTrue(elapsed < seconds, 'elapsed time of %0.3fs is greater than %0.3fs' % (elapsed, seconds))
|
||||
|
||||
@ -341,7 +341,7 @@ class CleanupJobsTest(BaseCommandMixin, BaseLiveServerTest):
|
||||
shutil.rmtree(self.test_project_path, True)
|
||||
|
||||
def create_test_credential(self, **kwargs):
|
||||
self.credential = self.make_credential(kwargs)
|
||||
self.credential = self.make_credential(**kwargs)
|
||||
return self.credential
|
||||
|
||||
def create_test_project(self, playbook_content):
|
||||
@ -409,6 +409,7 @@ class CleanupJobsTest(BaseCommandMixin, BaseLiveServerTest):
|
||||
self.assertEqual(ad_hoc_commands_before, ad_hoc_commands_after)
|
||||
|
||||
# Create and run job.
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_PLAYBOOK)
|
||||
job_template = self.create_test_job_template()
|
||||
job = self.create_test_job(job_template=job_template)
|
||||
|
||||
@ -20,7 +20,7 @@ from django.utils.timezone import now
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.tests.base import BaseTest, BaseTransactionTest
|
||||
|
||||
__all__ = ['InventoryTest', 'InventoryUpdatesTest']
|
||||
__all__ = ['InventoryTest', 'InventoryUpdatesTest', 'InventoryCredentialTest']
|
||||
|
||||
TEST_SIMPLE_INVENTORY_SCRIPT = "#!/usr/bin/env python\nimport json\nprint json.dumps({'hosts': ['ahost-01', 'ahost-02', 'ahost-03', 'ahost-04']})"
|
||||
TEST_SIMPLE_INVENTORY_SCRIPT_WITHOUT_HASHBANG = "import json\nprint json.dumps({'hosts': ['ahost-01', 'ahost-02', 'ahost-03', 'ahost-04']})"
|
||||
@ -1762,6 +1762,22 @@ class InventoryUpdatesTest(BaseTransactionTest):
|
||||
self.assertTrue(self.group.children.get(name='images').children.filter(active=True).count())
|
||||
self.assertTrue('instances' in child_names)
|
||||
self.assertTrue(self.group.children.get(name='instances').children.filter(active=True).count())
|
||||
# Sync again with overwrite set to False after renaming a group that
|
||||
# was created by the sync. With overwrite false, the renamed group and
|
||||
# the original group (created again by the sync) will both exist.
|
||||
region_group = self.group.children.get(name='regions').children.all()[0]
|
||||
region_group_original_name = region_group.name
|
||||
region_group.name = region_group.name + '-renamed'
|
||||
region_group.save(update_fields=['name'])
|
||||
cache_path3 = tempfile.mkdtemp(prefix='awx_ec2_')
|
||||
self._temp_paths.append(cache_path3)
|
||||
inventory_source.source_vars = '---\n\ncache_path: %s\n' % cache_path3
|
||||
inventory_source.overwrite = False
|
||||
inventory_source.save()
|
||||
self.check_inventory_source(inventory_source, initial=False, instance_id_group_ok=True)
|
||||
child_names = self.group.children.filter(active=True).values_list('name', flat=True)
|
||||
self.assertTrue(region_group_original_name in self.group.children.get(name='regions').children.values_list('name', flat=True))
|
||||
self.assertTrue(region_group.name in self.group.children.get(name='regions').children.values_list('name', flat=True))
|
||||
return
|
||||
# Print out group/host tree for debugging.
|
||||
print
|
||||
@ -1978,3 +1994,35 @@ class InventoryUpdatesTest(BaseTransactionTest):
|
||||
project=api_project)
|
||||
inventory_source = self.update_inventory_source(self.group, source='openstack', credential=credential)
|
||||
self.check_inventory_source(inventory_source)
|
||||
|
||||
|
||||
class InventoryCredentialTest(BaseTest):
|
||||
def setUp(self):
|
||||
super(InventoryCredentialTest, self).setUp()
|
||||
#self.start_redis()
|
||||
self.setup_instances()
|
||||
self.setup_users()
|
||||
|
||||
self.url = reverse('api:credential_list')
|
||||
|
||||
def test_openstack_create_ok(self):
|
||||
data = {
|
||||
'kind': 'openstack',
|
||||
'name': 'Best credential ever',
|
||||
'username': 'some_user',
|
||||
'password': 'some_password',
|
||||
'project': 'some_project',
|
||||
'host': 'some_host',
|
||||
}
|
||||
self.post(self.url, data=data, expect=201, auth=self.get_super_credentials())
|
||||
|
||||
def test_openstack_create_fail_required_fields(self):
|
||||
data = {
|
||||
'kind': 'openstack',
|
||||
'name': 'Best credential ever',
|
||||
}
|
||||
response = self.post(self.url, data=data, expect=400, auth=self.get_super_credentials())
|
||||
self.assertIn('username', response)
|
||||
self.assertIn('password', response)
|
||||
self.assertIn('host', response)
|
||||
self.assertIn('project', response)
|
||||
|
||||
@ -754,6 +754,13 @@ class JobTemplateCallbackTest(BaseJobTestMixin, django.test.LiveServerTestCase):
|
||||
self.assertEqual(job.hosts.count(), 1)
|
||||
self.assertEqual(job.hosts.all()[0], host)
|
||||
|
||||
# Run the callback job again with extra vars and verify their presence
|
||||
data.update(dict(extra_vars=dict(key="value")))
|
||||
result = self.post(url, data, expect=202, remote_addr=host_ip)
|
||||
jobs_qs = job_template.jobs.filter(launch_type='callback').order_by('-pk')
|
||||
job = jobs_qs[0]
|
||||
self.assertTrue("key" in job.extra_vars)
|
||||
|
||||
# GET as unauthenticated user will prompt for authentication.
|
||||
self.get(url, expect=401, remote_addr=host_ip)
|
||||
|
||||
@ -797,9 +804,9 @@ class JobTemplateCallbackTest(BaseJobTestMixin, django.test.LiveServerTestCase):
|
||||
if host_ip:
|
||||
break
|
||||
self.assertTrue(host)
|
||||
self.assertEqual(jobs_qs.count(), 1)
|
||||
self.post(url, data, expect=202, remote_addr=host_ip)
|
||||
self.assertEqual(jobs_qs.count(), 2)
|
||||
self.post(url, data, expect=202, remote_addr=host_ip)
|
||||
self.assertEqual(jobs_qs.count(), 3)
|
||||
job = jobs_qs[0]
|
||||
self.assertEqual(job.launch_type, 'callback')
|
||||
self.assertEqual(job.limit, host.name)
|
||||
@ -822,9 +829,9 @@ class JobTemplateCallbackTest(BaseJobTestMixin, django.test.LiveServerTestCase):
|
||||
if host_ip:
|
||||
break
|
||||
self.assertTrue(host)
|
||||
self.assertEqual(jobs_qs.count(), 2)
|
||||
self.post(url, data, expect=202, remote_addr=host_ip)
|
||||
self.assertEqual(jobs_qs.count(), 3)
|
||||
self.post(url, data, expect=202, remote_addr=host_ip)
|
||||
self.assertEqual(jobs_qs.count(), 4)
|
||||
job = jobs_qs[0]
|
||||
self.assertEqual(job.launch_type, 'callback')
|
||||
self.assertEqual(job.limit, host.name)
|
||||
@ -836,9 +843,9 @@ class JobTemplateCallbackTest(BaseJobTestMixin, django.test.LiveServerTestCase):
|
||||
host_qs = host_qs.filter(variables__icontains='ansible_ssh_host')
|
||||
host = host_qs[0]
|
||||
host_ip = host.variables_dict['ansible_ssh_host']
|
||||
self.assertEqual(jobs_qs.count(), 3)
|
||||
self.post(url, data, expect=202, remote_addr=host_ip)
|
||||
self.assertEqual(jobs_qs.count(), 4)
|
||||
self.post(url, data, expect=202, remote_addr=host_ip)
|
||||
self.assertEqual(jobs_qs.count(), 5)
|
||||
job = jobs_qs[0]
|
||||
self.assertEqual(job.launch_type, 'callback')
|
||||
self.assertEqual(job.limit, host.name)
|
||||
@ -868,9 +875,9 @@ class JobTemplateCallbackTest(BaseJobTestMixin, django.test.LiveServerTestCase):
|
||||
host_ip = list(ips)[0]
|
||||
break
|
||||
self.assertTrue(host)
|
||||
self.assertEqual(jobs_qs.count(), 4)
|
||||
self.post(url, data, expect=202, remote_addr=host_ip)
|
||||
self.assertEqual(jobs_qs.count(), 5)
|
||||
self.post(url, data, expect=202, remote_addr=host_ip)
|
||||
self.assertEqual(jobs_qs.count(), 6)
|
||||
job = jobs_qs[0]
|
||||
self.assertEqual(job.launch_type, 'callback')
|
||||
self.assertEqual(job.limit, ':&'.join([job_template.limit, host.name]))
|
||||
|
||||
@ -18,6 +18,7 @@ from django.utils.timezone import now
|
||||
from crum import impersonate
|
||||
|
||||
# AWX
|
||||
from awx.main.utils import * # noqa
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.tests.base import BaseJobExecutionTest
|
||||
|
||||
@ -345,22 +346,23 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
'''
|
||||
|
||||
def setUp(self):
|
||||
super(RunJobTest, self).setUp()
|
||||
self.test_project_path = None
|
||||
self.setup_instances()
|
||||
self.setup_users()
|
||||
self.organization = self.make_organizations(self.super_django_user, 1)[0]
|
||||
self.inventory = self.organization.inventories.create(name='test-inventory',
|
||||
description='description for test-inventory')
|
||||
self.host = self.inventory.hosts.create(name='host.example.com')
|
||||
self.group = self.inventory.groups.create(name='test-group')
|
||||
self.group2 = self.inventory.groups.create(name='test-group2')
|
||||
self.group.hosts.add(self.host)
|
||||
self.group2.hosts.add(self.host)
|
||||
self.project = None
|
||||
self.credential = None
|
||||
self.cloud_credential = None
|
||||
settings.INTERNAL_API_URL = self.live_server_url
|
||||
with ignore_inventory_computed_fields():
|
||||
super(RunJobTest, self).setUp()
|
||||
self.test_project_path = None
|
||||
self.setup_instances()
|
||||
self.setup_users()
|
||||
self.organization = self.make_organizations(self.super_django_user, 1)[0]
|
||||
self.inventory = self.organization.inventories.create(name='test-inventory',
|
||||
description='description for test-inventory')
|
||||
self.host = self.inventory.hosts.create(name='host.example.com')
|
||||
self.group = self.inventory.groups.create(name='test-group')
|
||||
self.group2 = self.inventory.groups.create(name='test-group2')
|
||||
self.group.hosts.add(self.host)
|
||||
self.group2.hosts.add(self.host)
|
||||
self.project = None
|
||||
self.credential = None
|
||||
self.cloud_credential = None
|
||||
settings.INTERNAL_API_URL = self.live_server_url
|
||||
|
||||
def tearDown(self):
|
||||
super(RunJobTest, self).tearDown()
|
||||
@ -562,6 +564,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
self.assertEqual(qs.count(), 0)
|
||||
|
||||
def test_run_job(self):
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_PLAYBOOK)
|
||||
job_template = self.create_test_job_template()
|
||||
job = self.create_test_job(job_template=job_template)
|
||||
@ -590,6 +593,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
return job
|
||||
|
||||
def test_check_job(self):
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_PLAYBOOK)
|
||||
job_template = self.create_test_job_template()
|
||||
job = self.create_test_job(job_template=job_template, job_type='check')
|
||||
@ -617,6 +621,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
return job
|
||||
|
||||
def test_run_job_that_fails(self):
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_PLAYBOOK2)
|
||||
job_template = self.create_test_job_template()
|
||||
job = self.create_test_job(job_template=job_template)
|
||||
@ -644,6 +649,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
return job
|
||||
|
||||
def test_run_job_with_ignore_errors(self):
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_IGNORE_ERRORS_PLAYBOOK)
|
||||
job_template = self.create_test_job_template()
|
||||
job = self.create_test_job(job_template=job_template)
|
||||
@ -766,6 +772,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
self.assertEqual(self.host.last_job_host_summary, None)
|
||||
|
||||
def test_check_job_where_task_would_fail(self):
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_PLAYBOOK2)
|
||||
job_template = self.create_test_job_template()
|
||||
job = self.create_test_job(job_template=job_template, job_type='check')
|
||||
@ -799,6 +806,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
self.assertTrue(job.cancel()) # No change from calling again.
|
||||
|
||||
def test_cancel_job(self):
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_PLAYBOOK)
|
||||
job_template = self.create_test_job_template()
|
||||
# Pass save=False just for the sake of test coverage.
|
||||
@ -824,6 +832,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
self.assertFalse(job.signal_start())
|
||||
|
||||
def test_extra_job_options(self):
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_EXTRA_VARS_PLAYBOOK)
|
||||
# Test with extra_vars containing misc whitespace.
|
||||
job_template = self.create_test_job_template(force_handlers=True,
|
||||
@ -856,6 +865,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
self.check_job_result(job3, 'successful')
|
||||
|
||||
def test_lots_of_extra_vars(self):
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_EXTRA_VARS_PLAYBOOK)
|
||||
extra_vars = json.dumps(dict(('var_%d' % x, x) for x in xrange(200)))
|
||||
job_template = self.create_test_job_template(extra_vars=extra_vars)
|
||||
@ -869,6 +879,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
self.assertTrue('"-e"' in job.job_args)
|
||||
|
||||
def test_limit_option(self):
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_PLAYBOOK)
|
||||
job_template = self.create_test_job_template(limit='bad.example.com')
|
||||
job = self.create_test_job(job_template=job_template)
|
||||
@ -893,6 +904,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
self.assertTrue('ssh-agent' in job.job_args)
|
||||
|
||||
def test_tag_and_task_options(self):
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_PLAYBOOK_WITH_TAGS)
|
||||
job_template = self.create_test_job_template(job_tags='runme',
|
||||
skip_tags='skipme',
|
||||
@ -972,6 +984,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
self.assertFalse('"--become-method"' in job.job_args)
|
||||
|
||||
def test_job_template_become_enabled(self):
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_PLAYBOOK)
|
||||
job_template = self.create_test_job_template(become_enabled=True)
|
||||
job = self.create_test_job(job_template=job_template)
|
||||
@ -1127,6 +1140,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
ssh_key_data=TEST_SSH_CERT_KEY)
|
||||
playbook = TEST_ENV_PLAYBOOK % {'env_var1': env_var1,
|
||||
'env_var2': env_var2}
|
||||
self.create_test_credential()
|
||||
self.create_test_project(playbook)
|
||||
job_template = self.create_test_job_template()
|
||||
job = self.create_test_job(job_template=job_template)
|
||||
@ -1154,6 +1168,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
self._test_cloud_credential_environment_variables('vmware')
|
||||
|
||||
def test_run_async_job(self):
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_ASYNC_OK_PLAYBOOK)
|
||||
job_template = self.create_test_job_template()
|
||||
job = self.create_test_job(job_template=job_template)
|
||||
@ -1184,6 +1199,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
# FIXME: We are not sure why proot needs to be disabled on this test
|
||||
# Maybe they are simply susceptable to timing and proot adds time
|
||||
settings.AWX_PROOT_ENABLED = False
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_ASYNC_FAIL_PLAYBOOK)
|
||||
job_template = self.create_test_job_template()
|
||||
job = self.create_test_job(job_template=job_template)
|
||||
@ -1214,6 +1230,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
# FIXME: We are not sure why proot needs to be disabled on this test
|
||||
# Maybe they are simply susceptable to timing and proot adds time
|
||||
settings.AWX_PROOT_ENABLED = False
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_ASYNC_TIMEOUT_PLAYBOOK)
|
||||
job_template = self.create_test_job_template()
|
||||
job = self.create_test_job(job_template=job_template)
|
||||
@ -1242,6 +1259,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
self.assertEqual(job.processed_hosts.count(), 1)
|
||||
|
||||
def test_run_async_job_fire_and_forget(self):
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_ASYNC_NOWAIT_PLAYBOOK)
|
||||
job_template = self.create_test_job_template()
|
||||
job = self.create_test_job(job_template=job_template)
|
||||
@ -1269,6 +1287,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
self.assertEqual(job.processed_hosts.count(), 1)
|
||||
|
||||
def test_run_job_with_roles(self):
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_PLAYBOOK_WITH_ROLES, TEST_ROLE_PLAYBOOKS)
|
||||
job_template = self.create_test_job_template()
|
||||
job = self.create_test_job(job_template=job_template)
|
||||
@ -1299,6 +1318,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
settings.AWX_PROOT_HIDE_PATHS = [os.path.join(settings.BASE_DIR, 'settings')]
|
||||
# Create another project alongside the one we're using to verify it
|
||||
# is hidden.
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_PLAYBOOK)
|
||||
other_project_path = self.project.local_path
|
||||
# Create a temp directory that should not be visible to the playbook.
|
||||
@ -1334,6 +1354,7 @@ class RunJobTest(BaseJobExecutionTest):
|
||||
# Enable proot for this test, specify invalid proot cmd.
|
||||
settings.AWX_PROOT_ENABLED = True
|
||||
settings.AWX_PROOT_CMD = 'PR00T'
|
||||
self.create_test_credential()
|
||||
self.create_test_project(TEST_PLAYBOOK)
|
||||
job_template = self.create_test_job_template()
|
||||
job = self.create_test_job(job_template=job_template)
|
||||
|
||||
53
awx/main/tests/unified_jobs.py
Normal file
53
awx/main/tests/unified_jobs.py
Normal file
@ -0,0 +1,53 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Python
|
||||
import mock
|
||||
from StringIO import StringIO
|
||||
from django.utils.timezone import now
|
||||
|
||||
# Django
|
||||
from django.test import SimpleTestCase
|
||||
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
|
||||
__all__ = ['UnifiedJobsUnitTest',]
|
||||
|
||||
class UnifiedJobsUnitTest(SimpleTestCase):
|
||||
|
||||
# stdout file present
|
||||
@mock.patch('os.path.exists', return_value=True)
|
||||
@mock.patch('codecs.open', return_value='my_file_handler')
|
||||
def test_result_stdout_raw_handle_file__found(self, exists, open):
|
||||
unified_job = UnifiedJob()
|
||||
unified_job.result_stdout_file = 'dummy'
|
||||
|
||||
result = unified_job.result_stdout_raw_handle()
|
||||
|
||||
self.assertEqual(result, 'my_file_handler')
|
||||
|
||||
# stdout file missing, job finished
|
||||
@mock.patch('os.path.exists', return_value=False)
|
||||
def test_result_stdout_raw_handle__missing(self, exists):
|
||||
unified_job = UnifiedJob()
|
||||
unified_job.result_stdout_file = 'dummy'
|
||||
unified_job.finished = now()
|
||||
|
||||
result = unified_job.result_stdout_raw_handle()
|
||||
|
||||
self.assertIsInstance(result, StringIO)
|
||||
self.assertEqual(result.read(), 'stdout capture is missing')
|
||||
|
||||
# stdout file missing, job not finished
|
||||
@mock.patch('os.path.exists', return_value=False)
|
||||
def test_result_stdout_raw_handle__pending(self, exists):
|
||||
unified_job = UnifiedJob()
|
||||
unified_job.result_stdout_file = 'dummy'
|
||||
unified_job.finished = None
|
||||
|
||||
result = unified_job.result_stdout_raw_handle()
|
||||
|
||||
self.assertIsInstance(result, StringIO)
|
||||
self.assertEqual(result.read(), 'Waiting for results...')
|
||||
|
||||
@ -36,11 +36,8 @@ import json
|
||||
import logging
|
||||
import os
|
||||
import pwd
|
||||
import sys
|
||||
import urllib
|
||||
import urlparse
|
||||
import time
|
||||
from contextlib import closing
|
||||
|
||||
# Requests
|
||||
import requests
|
||||
|
||||
@ -32,7 +32,6 @@
|
||||
import sys
|
||||
import time
|
||||
import datetime
|
||||
import json
|
||||
from copy import deepcopy
|
||||
from ansible import constants as C
|
||||
from ansible.cache.base import BaseCacheModule
|
||||
|
||||
@ -4,10 +4,10 @@
|
||||
# This file is a utility script that is not part of the AWX or Ansible
|
||||
# packages. It does not import any code from either package, nor does its
|
||||
# license apply to Ansible or AWX.
|
||||
#
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
#
|
||||
# Redistributions of source code must retain the above copyright notice, this
|
||||
# list of conditions and the following disclaimer.
|
||||
#
|
||||
@ -61,7 +61,7 @@ class TokenAuth(requests.auth.AuthBase):
|
||||
return request
|
||||
|
||||
class InventoryScript(object):
|
||||
|
||||
|
||||
def __init__(self, **options):
|
||||
self.options = options
|
||||
|
||||
@ -95,11 +95,11 @@ class InventoryScript(object):
|
||||
def run(self):
|
||||
try:
|
||||
self.base_url = self.options.get('base_url', '') or \
|
||||
os.getenv('REST_API_URL', '')
|
||||
os.getenv('REST_API_URL', '')
|
||||
if not self.base_url:
|
||||
raise ValueError('No REST API URL specified')
|
||||
self.auth_token = self.options.get('authtoken', '') or \
|
||||
os.getenv('REST_API_TOKEN', '')
|
||||
os.getenv('REST_API_TOKEN', '')
|
||||
parts = urlparse.urlsplit(self.base_url)
|
||||
if not (parts.username and parts.password) and not self.auth_token:
|
||||
raise ValueError('No username/password specified in REST API '
|
||||
@ -107,7 +107,7 @@ class InventoryScript(object):
|
||||
try:
|
||||
# Command line argument takes precedence over environment
|
||||
# variable.
|
||||
self.inventory_id = int(self.options.get('inventory_id', 0) or \
|
||||
self.inventory_id = int(self.options.get('inventory_id', 0) or
|
||||
os.getenv('INVENTORY_ID', 0))
|
||||
except ValueError:
|
||||
raise ValueError('Inventory ID must be an integer')
|
||||
|
||||
@ -84,7 +84,7 @@ class ServiceScanService(BaseService):
|
||||
else:
|
||||
pid = None
|
||||
else:
|
||||
pid = None
|
||||
pid = None # NOQA
|
||||
payload = {"name": service_name, "state": service_state, "goal": service_goal, "source": "upstart"}
|
||||
services.append(payload)
|
||||
|
||||
@ -104,7 +104,7 @@ class ServiceScanService(BaseService):
|
||||
service_state = "dead"
|
||||
elif len(line_data) == 3:
|
||||
service_name = line_data[0]
|
||||
service_pid = None
|
||||
service_pid = None # NOQA
|
||||
service_state = "stopped"
|
||||
else:
|
||||
continue
|
||||
|
||||
@ -3,9 +3,7 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
import glob
|
||||
from datetime import timedelta
|
||||
import tempfile
|
||||
|
||||
MONGO_DB = 'system_tracking'
|
||||
|
||||
@ -119,13 +117,13 @@ ALLOWED_HOSTS = []
|
||||
# reverse proxy.
|
||||
REMOTE_HOST_HEADERS = ['REMOTE_ADDR', 'REMOTE_HOST']
|
||||
|
||||
TEMPLATE_CONTEXT_PROCESSORS += (
|
||||
TEMPLATE_CONTEXT_PROCESSORS += ( # NOQA
|
||||
'django.core.context_processors.request',
|
||||
'awx.ui.context_processors.settings',
|
||||
'awx.ui.context_processors.version',
|
||||
)
|
||||
|
||||
MIDDLEWARE_CLASSES += (
|
||||
MIDDLEWARE_CLASSES += ( # NOQA
|
||||
'awx.main.middleware.HAMiddleware',
|
||||
'awx.main.middleware.ActivityStreamMiddleware',
|
||||
'crum.CurrentRequestUserMiddleware',
|
||||
@ -135,6 +133,13 @@ TEMPLATE_DIRS = (
|
||||
os.path.join(BASE_DIR, 'templates'),
|
||||
)
|
||||
|
||||
TEMPLATE_LOADERS = (
|
||||
('django.template.loaders.cached.Loader', (
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader',
|
||||
)),
|
||||
)
|
||||
|
||||
ROOT_URLCONF = 'awx.urls'
|
||||
|
||||
WSGI_APPLICATION = 'awx.wsgi.application'
|
||||
@ -247,7 +252,7 @@ EMAIL_USE_TLS = False
|
||||
# Use Django-Debug-Toolbar if installed.
|
||||
try:
|
||||
import debug_toolbar
|
||||
INSTALLED_APPS += ('debug_toolbar',)
|
||||
INSTALLED_APPS += (debug_toolbar.__name__,)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
@ -259,7 +264,7 @@ DEBUG_TOOLBAR_CONFIG = {
|
||||
# Use Django-devserver if installed.
|
||||
try:
|
||||
import devserver
|
||||
INSTALLED_APPS += ('devserver',)
|
||||
INSTALLED_APPS += (devserver.__name__,)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
@ -444,7 +449,7 @@ VMWARE_REGIONS_BLACKLIST = []
|
||||
|
||||
# Inventory variable name/values for determining whether a host is
|
||||
# active in vSphere.
|
||||
VMWARE_ENABLED_VAR = 'vmware_powerState'
|
||||
VMWARE_ENABLED_VAR = 'vmware_powerState'
|
||||
VMWARE_ENABLED_VALUE = 'poweredOn'
|
||||
|
||||
# Inventory variable name containing the unique instance ID.
|
||||
@ -609,7 +614,7 @@ LOGGING = {
|
||||
'class':'logging.handlers.RotatingFileHandler',
|
||||
'filters': ['require_debug_false'],
|
||||
'filename': os.path.join(LOG_ROOT, 'tower_warnings.log'),
|
||||
'maxBytes': 1024*1024*5, # 5 MB
|
||||
'maxBytes': 1024 * 1024 * 5, # 5 MB
|
||||
'backupCount': 5,
|
||||
'formatter':'simple',
|
||||
},
|
||||
@ -618,7 +623,7 @@ LOGGING = {
|
||||
'class':'logging.handlers.RotatingFileHandler',
|
||||
'filters': ['require_debug_false'],
|
||||
'filename': os.path.join(LOG_ROOT, 'callback_receiver.log'),
|
||||
'maxBytes': 1024*1024*5, # 5 MB
|
||||
'maxBytes': 1024 * 1024 * 5, # 5 MB
|
||||
'backupCount': 5,
|
||||
'formatter':'simple',
|
||||
},
|
||||
@ -627,7 +632,7 @@ LOGGING = {
|
||||
'class':'logging.handlers.RotatingFileHandler',
|
||||
'filters': ['require_debug_false'],
|
||||
'filename': os.path.join(LOG_ROOT, 'socketio_service.log'),
|
||||
'maxBytes': 1024*1024*5, # 5 MB
|
||||
'maxBytes': 1024 * 1024 * 5, # 5 MB
|
||||
'backupCount': 5,
|
||||
'formatter':'simple',
|
||||
},
|
||||
@ -636,7 +641,7 @@ LOGGING = {
|
||||
'class':'logging.handlers.RotatingFileHandler',
|
||||
'filters': ['require_debug_false'],
|
||||
'filename': os.path.join(LOG_ROOT, 'task_system.log'),
|
||||
'maxBytes': 1024*1024*5, # 5 MB
|
||||
'maxBytes': 1024 * 1024 * 5, # 5 MB
|
||||
'backupCount': 5,
|
||||
'formatter':'simple',
|
||||
},
|
||||
|
||||
@ -6,13 +6,12 @@
|
||||
# Python
|
||||
import sys
|
||||
import traceback
|
||||
import glob
|
||||
|
||||
# Django Split Settings
|
||||
from split_settings.tools import optional, include
|
||||
|
||||
# Load default settings.
|
||||
from defaults import *
|
||||
from defaults import * # NOQA
|
||||
|
||||
MONGO_DB = 'system_tracking_dev'
|
||||
|
||||
@ -31,27 +30,24 @@ AWX_PROOT_ENABLED = True
|
||||
# Use Django-Jenkins if installed. Only run tests for awx.main app.
|
||||
try:
|
||||
import django_jenkins
|
||||
INSTALLED_APPS += ('django_jenkins',)
|
||||
INSTALLED_APPS += (django_jenkins.__name__,)
|
||||
PROJECT_APPS = ('awx.main.tests', 'awx.api.tests', 'awx.fact.tests',)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if 'django_jenkins' in INSTALLED_APPS:
|
||||
JENKINS_TASKS = (
|
||||
'django_jenkins.tasks.run_pylint',
|
||||
'django_jenkins.tasks.run_flake8',
|
||||
# 'django_jenkins.tasks.run_pylint',
|
||||
# 'django_jenkins.tasks.run_flake8',
|
||||
# The following are not needed when including run_flake8
|
||||
# 'django_jenkins.tasks.run_pep8',
|
||||
# 'django_jenkins.tasks.run_pyflakes',
|
||||
# The following are handled by various grunt tasks and no longer required
|
||||
# 'django_jenkins.tasks.run_jshint',
|
||||
# 'django_jenkins.tasks.run_csslint',
|
||||
)
|
||||
)
|
||||
PEP8_RCFILE = "setup.cfg"
|
||||
PYLINT_RCFILE = ".pylintrc"
|
||||
CSSLINT_CHECKED_FILES = glob.glob(os.path.join(BASE_DIR, 'ui/static/less/*.less'))
|
||||
JSHINT_CHECKED_FILES = [os.path.join(BASE_DIR, 'ui/static/js'),
|
||||
os.path.join(BASE_DIR, 'ui/static/lib/ansible'),]
|
||||
|
||||
# Much faster than the default
|
||||
# https://docs.djangoproject.com/en/1.6/topics/auth/passwords/#how-django-stores-passwords
|
||||
|
||||
@ -4,10 +4,10 @@
|
||||
# Development settings for AWX project, but with DEBUG disabled
|
||||
|
||||
# Load development settings.
|
||||
from defaults import *
|
||||
from defaults import * # NOQA
|
||||
|
||||
# Load development settings.
|
||||
from development import *
|
||||
from development import * # NOQA
|
||||
|
||||
# Disable capturing DEBUG
|
||||
DEBUG = False
|
||||
|
||||
@ -12,7 +12,7 @@ import traceback
|
||||
from split_settings.tools import optional, include
|
||||
|
||||
# Load default settings.
|
||||
from defaults import *
|
||||
from defaults import * # NOQA
|
||||
|
||||
DEBUG = False
|
||||
TEMPLATE_DEBUG = DEBUG
|
||||
@ -49,7 +49,7 @@ LOGGING['handlers']['tower_warnings'] = {
|
||||
'class':'logging.handlers.RotatingFileHandler',
|
||||
'filters': ['require_debug_false'],
|
||||
'filename': '/var/log/tower/tower.log',
|
||||
'maxBytes': 1024*1024*5, # 5 MB
|
||||
'maxBytes': 1024 * 1024 * 5, # 5 MB
|
||||
'backupCount': 5,
|
||||
'formatter':'simple',
|
||||
}
|
||||
@ -60,7 +60,7 @@ LOGGING['handlers']['callback_receiver'] = {
|
||||
'class':'logging.handlers.RotatingFileHandler',
|
||||
'filters': ['require_debug_false'],
|
||||
'filename': '/var/log/tower/callback_receiver.log',
|
||||
'maxBytes': 1024*1024*5, # 5 MB
|
||||
'maxBytes': 1024 * 1024 * 5, # 5 MB
|
||||
'backupCount': 5,
|
||||
'formatter':'simple',
|
||||
}
|
||||
@ -70,7 +70,7 @@ LOGGING['handlers']['socketio_service'] = {
|
||||
'class':'logging.handlers.RotatingFileHandler',
|
||||
'filters': ['require_debug_false'],
|
||||
'filename': '/var/log/tower/socketio_service.log',
|
||||
'maxBytes': 1024*1024*5, # 5 MB
|
||||
'maxBytes': 1024 * 1024 * 5, # 5 MB
|
||||
'backupCount': 5,
|
||||
'formatter':'simple',
|
||||
}
|
||||
@ -80,7 +80,7 @@ LOGGING['handlers']['task_system'] = {
|
||||
'class':'logging.handlers.RotatingFileHandler',
|
||||
'filters': ['require_debug_false'],
|
||||
'filename': '/var/log/tower/task_system.log',
|
||||
'maxBytes': 1024*1024*5, # 5 MB
|
||||
'maxBytes': 1024 * 1024 * 5, # 5 MB
|
||||
'backupCount': 5,
|
||||
'formatter':'simple',
|
||||
}
|
||||
|
||||
@ -220,6 +220,18 @@ var tower = angular.module('Tower', [
|
||||
resolve: {
|
||||
features: ['FeaturesService', function(FeaturesService) {
|
||||
return FeaturesService.get();
|
||||
}],
|
||||
jobEventsSocket: ['Socket', '$rootScope', function(Socket, $rootScope) {
|
||||
if (!$rootScope.event_socket) {
|
||||
$rootScope.event_socket = Socket({
|
||||
scope: $rootScope,
|
||||
endpoint: "job_events"
|
||||
});
|
||||
$rootScope.event_socket.init();
|
||||
return true;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}]
|
||||
}
|
||||
}).
|
||||
@ -231,6 +243,18 @@ var tower = angular.module('Tower', [
|
||||
resolve: {
|
||||
features: ['FeaturesService', function(FeaturesService) {
|
||||
return FeaturesService.get();
|
||||
}],
|
||||
jobEventsSocket: ['Socket', '$rootScope', function(Socket, $rootScope) {
|
||||
if (!$rootScope.event_socket) {
|
||||
$rootScope.event_socket = Socket({
|
||||
scope: $rootScope,
|
||||
endpoint: "job_events"
|
||||
});
|
||||
$rootScope.event_socket.init();
|
||||
return true;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}]
|
||||
}
|
||||
}).
|
||||
@ -242,6 +266,18 @@ var tower = angular.module('Tower', [
|
||||
resolve: {
|
||||
features: ['FeaturesService', function(FeaturesService) {
|
||||
return FeaturesService.get();
|
||||
}],
|
||||
adhocEventsSocket: ['Socket', '$rootScope', function(Socket, $rootScope) {
|
||||
if (!$rootScope.adhoc_event_socket) {
|
||||
$rootScope.adhoc_event_socket = Socket({
|
||||
scope: $rootScope,
|
||||
endpoint: "ad_hoc_command_events"
|
||||
});
|
||||
$rootScope.adhoc_event_socket.init();
|
||||
return true;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}]
|
||||
}
|
||||
}).
|
||||
@ -942,16 +978,50 @@ var tower = angular.module('Tower', [
|
||||
// Listen for job changes and issue callbacks to initiate
|
||||
// DOM updates
|
||||
function openSocket() {
|
||||
var schedule_socket;
|
||||
|
||||
sock = Socket({ scope: $rootScope, endpoint: "jobs" });
|
||||
sock.init();
|
||||
sock.on("status_changed", function(data) {
|
||||
$log.debug('Job ' + data.unified_job_id + ' status changed to ' + data.status);
|
||||
$rootScope.$emit('JobStatusChange', data);
|
||||
$log.debug('Job ' + data.unified_job_id +
|
||||
' status changed to ' + data.status +
|
||||
' send to ' + $location.$$url);
|
||||
|
||||
// this acts as a router...it emits the proper
|
||||
// value based on what URL the user is currently
|
||||
// accessing.
|
||||
if ($location.$$url === '/jobs') {
|
||||
$rootScope.$emit('JobStatusChange-jobs', data);
|
||||
} else if (/\/jobs\/(\d)+\/stdout/.test($location.$$url) ||
|
||||
/\/ad_hoc_commands\/(\d)+/.test($location.$$url)) {
|
||||
$log.debug("sending status to standard out");
|
||||
$rootScope.$emit('JobStatusChange-jobStdout', data);
|
||||
} else if (/\/jobs\/(\d)+/.test($location.$$url)) {
|
||||
$rootScope.$emit('JobStatusChange-jobDetails', data);
|
||||
} else if ($location.$$url === '/home') {
|
||||
$rootScope.$emit('JobStatusChange-home', data);
|
||||
} else if ($location.$$url === '/portal') {
|
||||
$rootScope.$emit('JobStatusChange-portal', data);
|
||||
} else if ($location.$$url === '/projects') {
|
||||
$rootScope.$emit('JobStatusChange-projects', data);
|
||||
} else if (/\/inventory\/(\d)+\/manage/.test($location.$$url)) {
|
||||
$rootScope.$emit('JobStatusChange-inventory', data);
|
||||
}
|
||||
});
|
||||
sock.on("summary_complete", function(data) {
|
||||
$log.debug('Job summary_complete ' + data.unified_job_id);
|
||||
$rootScope.$emit('JobSummaryComplete', data);
|
||||
});
|
||||
|
||||
schedule_socket = Socket({
|
||||
scope: $rootScope,
|
||||
endpoint: "schedules"
|
||||
});
|
||||
schedule_socket.init();
|
||||
schedule_socket.on("schedule_changed", function(data) {
|
||||
$log.debug('Schedule ' + data.unified_job_id + ' status changed to ' + data.status);
|
||||
$rootScope.$emit('ScheduleStatusChange', data);
|
||||
});
|
||||
}
|
||||
openSocket();
|
||||
|
||||
|
||||
@ -115,9 +115,9 @@ Home.$inject = ['$scope', '$compile', '$routeParams', '$rootScope', '$location',
|
||||
* @description This controls the 'home/groups' page that is loaded from the dashboard
|
||||
*
|
||||
*/
|
||||
export function HomeGroups($log, $scope, $filter, $compile, $location, $routeParams, LogViewer, HomeGroupList, GenerateList, ProcessErrors, LoadBreadCrumbs, ReturnToCaller, ClearScope,
|
||||
export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $location, $routeParams, LogViewer, HomeGroupList, GenerateList, ProcessErrors, LoadBreadCrumbs, ReturnToCaller, ClearScope,
|
||||
GetBasePath, SearchInit, PaginateInit, FormatDate, GetHostsStatusMsg, GetSyncStatusMsg, ViewUpdateStatus, Stream, GroupsEdit, Wait,
|
||||
Alert, Rest, Empty, InventoryUpdate, Find, GroupsCancelUpdate, Store, Socket) {
|
||||
Alert, Rest, Empty, InventoryUpdate, Find, GroupsCancelUpdate, Store) {
|
||||
|
||||
ClearScope('htmlTemplate'); //Garbage collection. Don't leave behind any listeners/watchers from the prior
|
||||
//scope.
|
||||
@ -127,8 +127,7 @@ export function HomeGroups($log, $scope, $filter, $compile, $location, $routePar
|
||||
defaultUrl = GetBasePath('groups'),
|
||||
scope = $scope,
|
||||
modal_scope = $scope.$new(),
|
||||
opt, PreviousSearchParams,
|
||||
io;
|
||||
opt, PreviousSearchParams;
|
||||
|
||||
generator.inject(list, { mode: 'edit', scope: scope, breadCrumbs: true });
|
||||
|
||||
@ -296,10 +295,10 @@ export function HomeGroups($log, $scope, $filter, $compile, $location, $routePar
|
||||
|
||||
LoadBreadCrumbs();
|
||||
|
||||
io = Socket({ scope: $scope, endpoint: "jobs" });
|
||||
io.init();
|
||||
$log.debug('Watching for job updates: ');
|
||||
io.on("status_changed", function(data) {
|
||||
if ($rootScope.removeJobStatusChange) {
|
||||
$rootScope.removeJobStatusChange();
|
||||
}
|
||||
$rootScope.removeJobStatusChange = $rootScope.$on('JobStatusChange-home', function(e, data) {
|
||||
var stat, group;
|
||||
if (data.group_id) {
|
||||
group = Find({ list: scope[list.name], key: 'id', val: data.group_id });
|
||||
@ -539,7 +538,7 @@ export function HomeGroups($log, $scope, $filter, $compile, $location, $routePar
|
||||
|
||||
}
|
||||
|
||||
HomeGroups.$inject = ['$log', '$scope', '$filter', '$compile', '$location', '$routeParams', 'LogViewer', 'HomeGroupList', 'generateList', 'ProcessErrors', 'LoadBreadCrumbs', 'ReturnToCaller',
|
||||
HomeGroups.$inject = ['$rootScope', '$log', '$scope', '$filter', '$compile', '$location', '$routeParams', 'LogViewer', 'HomeGroupList', 'generateList', 'ProcessErrors', 'LoadBreadCrumbs', 'ReturnToCaller',
|
||||
'ClearScope', 'GetBasePath', 'SearchInit', 'PaginateInit', 'FormatDate', 'GetHostsStatusMsg', 'GetSyncStatusMsg', 'ViewUpdateStatus',
|
||||
'Stream', 'GroupsEdit', 'Wait', 'Alert', 'Rest', 'Empty', 'InventoryUpdate', 'Find', 'GroupsCancelUpdate', 'Store', 'Socket'
|
||||
];
|
||||
|
||||
@ -424,44 +424,19 @@ export function InventoriesAdd($scope, $rootScope, $compile, $location, $log, $r
|
||||
|
||||
data = {};
|
||||
for (fld in form.fields) {
|
||||
if (fld !== 'variables') {
|
||||
if (form.fields[fld].realName) {
|
||||
data[form.fields[fld].realName] = $scope[fld];
|
||||
} else {
|
||||
data[fld] = $scope[fld];
|
||||
}
|
||||
if (form.fields[fld].realName) {
|
||||
data[form.fields[fld].realName] = $scope[fld];
|
||||
} else {
|
||||
data[fld] = $scope[fld];
|
||||
}
|
||||
}
|
||||
|
||||
if ($scope.removeUpdateInventoryVariables) {
|
||||
$scope.removeUpdateInventoryVariables();
|
||||
}
|
||||
$scope.removeUpdateInventoryVariables = $scope.$on('UpdateInventoryVariables', function(e, data) {
|
||||
var inventory_id = data.id;
|
||||
var vars_to_send = {"variables": json_data};
|
||||
Rest.setUrl(data.related.variable_data);
|
||||
Rest.put(vars_to_send)
|
||||
.success(function () {
|
||||
Wait('stop');
|
||||
$location.path('/inventories/' + inventory_id + '/manage');
|
||||
})
|
||||
.error(function (data, status) {
|
||||
ProcessErrors( $scope, data, status, null, { hdr: 'Error!',
|
||||
msg: 'Failed to update inventory varaibles. PUT returned status: ' + status
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
Rest.setUrl(defaultUrl);
|
||||
Rest.post(data)
|
||||
.success(function (data) {
|
||||
var inventory_id = data.id;
|
||||
if ($scope.variables) {
|
||||
$scope.$emit('UpdateInventoryVariables', data);
|
||||
} else {
|
||||
Wait('stop');
|
||||
$location.path('/inventories/' + inventory_id + '/');
|
||||
}
|
||||
Wait('stop');
|
||||
$location.path('/inventories/' + inventory_id + '/');
|
||||
})
|
||||
.error(function (data, status) {
|
||||
ProcessErrors( $scope, data, status, form, { hdr: 'Error!',
|
||||
@ -597,40 +572,18 @@ export function InventoriesEdit($scope, $rootScope, $compile, $location, $log, $
|
||||
|
||||
data = {};
|
||||
for (fld in form.fields) {
|
||||
if (fld !== 'variables') {
|
||||
if (form.fields[fld].realName) {
|
||||
data[form.fields[fld].realName] = $scope[fld];
|
||||
} else {
|
||||
data[fld] = $scope[fld];
|
||||
}
|
||||
if (form.fields[fld].realName) {
|
||||
data[form.fields[fld].realName] = $scope[fld];
|
||||
} else {
|
||||
data[fld] = $scope[fld];
|
||||
}
|
||||
}
|
||||
|
||||
if ($scope.removeUpdateInventoryVariables) {
|
||||
$scope.removeUpdateInventoryVariables();
|
||||
}
|
||||
$scope.removeUpdateInventoryVariables = $scope.$on('UpdateInventoryVariables', function(e, data) {
|
||||
Rest.setUrl(data.related.variable_data);
|
||||
Rest.put(json_data)
|
||||
.success(function () {
|
||||
Wait('stop');
|
||||
$location.path('/inventories/');
|
||||
})
|
||||
.error(function (data, status) {
|
||||
ProcessErrors($scope, data, status, form, { hdr: 'Error!',
|
||||
msg: 'Failed to update inventory varaibles. PUT returned status: ' + status
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
Rest.setUrl(defaultUrl + inventory_id + '/');
|
||||
Rest.put(data)
|
||||
.success(function (data) {
|
||||
if ($scope.variables) {
|
||||
$scope.$emit('UpdateInventoryVariables', data);
|
||||
} else {
|
||||
$location.path('/inventories/');
|
||||
}
|
||||
.success(function () {
|
||||
Wait('stop');
|
||||
$location.path('/inventories/');
|
||||
})
|
||||
.error(function (data, status) {
|
||||
ProcessErrors($scope, data, status, form, { hdr: 'Error!',
|
||||
@ -849,12 +802,11 @@ export function InventoriesManage ($log, $scope, $rootScope, $location,
|
||||
ViewUpdateStatus, GroupsDelete, Store, HostsEdit, HostsDelete,
|
||||
EditInventoryProperties, ToggleHostEnabled, Stream, ShowJobSummary,
|
||||
InventoryGroupsHelp, HelpDialog, ViewJob,
|
||||
GroupsCopy, HostsCopy, Socket) {
|
||||
GroupsCopy, HostsCopy) {
|
||||
|
||||
var PreviousSearchParams,
|
||||
url,
|
||||
hostScope = $scope.$new(),
|
||||
io;
|
||||
hostScope = $scope.$new();
|
||||
|
||||
ClearScope();
|
||||
|
||||
@ -1095,38 +1047,33 @@ export function InventoriesManage ($log, $scope, $rootScope, $location,
|
||||
if ($scope.removeWatchUpdateStatus) {
|
||||
$scope.removeWatchUpdateStatus();
|
||||
}
|
||||
$scope.removeWatchUpdateStatus = $scope.$on('WatchUpdateStatus', function() {
|
||||
io = Socket({ scope: $scope, endpoint: "jobs" });
|
||||
io.init();
|
||||
$log.debug('Watching for job updates: ');
|
||||
io.on("status_changed", function(data) {
|
||||
var stat, group;
|
||||
if (data.group_id) {
|
||||
group = Find({ list: $scope.groups, key: 'id', val: data.group_id });
|
||||
if (data.status === "failed" || data.status === "successful") {
|
||||
if (data.group_id === $scope.selected_group_id || group) {
|
||||
// job completed, fefresh all groups
|
||||
$log.debug('Update completed. Refreshing the tree.');
|
||||
$scope.refreshGroups();
|
||||
}
|
||||
}
|
||||
else if (group) {
|
||||
// incremental update, just update
|
||||
$log.debug('Status of group: ' + data.group_id + ' changed to: ' + data.status);
|
||||
stat = GetSyncStatusMsg({
|
||||
status: data.status,
|
||||
has_inventory_sources: group.has_inventory_sources,
|
||||
source: group.source
|
||||
});
|
||||
$log.debug('changing tooltip to: ' + stat.tooltip);
|
||||
group.status = data.status;
|
||||
group.status_class = stat['class'];
|
||||
group.status_tooltip = stat.tooltip;
|
||||
group.launch_tooltip = stat.launch_tip;
|
||||
group.launch_class = stat.launch_class;
|
||||
$scope.removeWatchUpdateStatus = $scope.$on('JobStatusChange-inventory', function(data) {
|
||||
var stat, group;
|
||||
if (data.group_id) {
|
||||
group = Find({ list: $scope.groups, key: 'id', val: data.group_id });
|
||||
if (data.status === "failed" || data.status === "successful") {
|
||||
if (data.group_id === $scope.selected_group_id || group) {
|
||||
// job completed, fefresh all groups
|
||||
$log.debug('Update completed. Refreshing the tree.');
|
||||
$scope.refreshGroups();
|
||||
}
|
||||
}
|
||||
});
|
||||
else if (group) {
|
||||
// incremental update, just update
|
||||
$log.debug('Status of group: ' + data.group_id + ' changed to: ' + data.status);
|
||||
stat = GetSyncStatusMsg({
|
||||
status: data.status,
|
||||
has_inventory_sources: group.has_inventory_sources,
|
||||
source: group.source
|
||||
});
|
||||
$log.debug('changing tooltip to: ' + stat.tooltip);
|
||||
group.status = data.status;
|
||||
group.status_class = stat['class'];
|
||||
group.status_tooltip = stat.tooltip;
|
||||
group.launch_tooltip = stat.launch_tip;
|
||||
group.launch_class = stat.launch_class;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Load group on selection
|
||||
@ -1453,5 +1400,5 @@ InventoriesManage.$inject = ['$log', '$scope', '$rootScope', '$location',
|
||||
'GroupsDelete', 'Store', 'HostsEdit', 'HostsDelete',
|
||||
'EditInventoryProperties', 'ToggleHostEnabled', 'Stream', 'ShowJobSummary',
|
||||
'InventoryGroupsHelp', 'HelpDialog', 'ViewJob', 'GroupsCopy',
|
||||
'HostsCopy', 'Socket'
|
||||
'HostsCopy'
|
||||
];
|
||||
|
||||
@ -18,7 +18,6 @@ export function JobDetailController ($location, $rootScope, $scope, $compile, $r
|
||||
ClearScope();
|
||||
|
||||
var job_id = $routeParams.id,
|
||||
event_socket,
|
||||
scope = $scope,
|
||||
api_complete = false,
|
||||
refresh_count = 0,
|
||||
@ -99,12 +98,7 @@ export function JobDetailController ($location, $rootScope, $scope, $compile, $r
|
||||
"<p><i class=\"fa fa-circle unreachable-hosts-color\"></i> Unreachable</p>\n" +
|
||||
"<p><i class=\"fa fa-circle failed-hosts-color\"></i> Failed</p>\n";
|
||||
function openSocket() {
|
||||
event_socket = Socket({
|
||||
scope: scope,
|
||||
endpoint: "job_events"
|
||||
});
|
||||
event_socket.init();
|
||||
event_socket.on("job_events-" + job_id, function(data) {
|
||||
$rootScope.event_socket.on("job_events-" + job_id, function(data) {
|
||||
if (api_complete && data.id > lastEventId) {
|
||||
scope.waiting = false;
|
||||
data.event = data.event_name;
|
||||
@ -117,12 +111,12 @@ export function JobDetailController ($location, $rootScope, $scope, $compile, $r
|
||||
if ($rootScope.removeJobStatusChange) {
|
||||
$rootScope.removeJobStatusChange();
|
||||
}
|
||||
$rootScope.removeJobStatusChange = $rootScope.$on('JobStatusChange', function(e, data) {
|
||||
$rootScope.removeJobStatusChange = $rootScope.$on('JobStatusChange-jobDetails', function(e, data) {
|
||||
// if we receive a status change event for the current job indicating the job
|
||||
// is finished, stop event queue processing and reload
|
||||
if (parseInt(data.unified_job_id, 10) === parseInt(job_id,10)) {
|
||||
if (data.status === 'failed' || data.status === 'canceled' ||
|
||||
data.status === 'error' || data.status === 'successful') {
|
||||
data.status === 'error' || data.status === 'successful' || data.status === 'running') {
|
||||
$scope.liveEventProcessing = false;
|
||||
if ($rootScope.jobDetailInterval) {
|
||||
window.clearInterval($rootScope.jobDetailInterval);
|
||||
|
||||
@ -11,7 +11,7 @@
|
||||
*/
|
||||
|
||||
|
||||
export function JobStdoutController ($location, $log, $rootScope, $scope, $compile, $routeParams, ClearScope, GetBasePath, Wait, Rest, ProcessErrors, Socket) {
|
||||
export function JobStdoutController ($location, $log, $rootScope, $scope, $compile, $routeParams, ClearScope, GetBasePath, Wait, Rest, ProcessErrors) {
|
||||
|
||||
ClearScope();
|
||||
|
||||
@ -19,8 +19,6 @@ export function JobStdoutController ($location, $log, $rootScope, $scope, $compi
|
||||
api_complete = false,
|
||||
stdout_url,
|
||||
current_range,
|
||||
event_socket,
|
||||
status_socket,
|
||||
loaded_sections = [],
|
||||
event_queue = 0,
|
||||
auto_scroll_down=true, // programmatic scroll to bottom
|
||||
@ -35,44 +33,50 @@ export function JobStdoutController ($location, $log, $rootScope, $scope, $compi
|
||||
|
||||
|
||||
function openSockets() {
|
||||
status_socket = Socket({
|
||||
scope: $scope,
|
||||
endpoint: "jobs"
|
||||
});
|
||||
status_socket.init();
|
||||
status_socket.on("status_changed", function(data) {
|
||||
if (parseInt(data.unified_job_id, 10) === parseInt(job_id,10) && $scope.job) {
|
||||
$scope.job.status = data.status;
|
||||
if (data.status === 'failed' || data.status === 'canceled' ||
|
||||
data.status === 'error' || data.status === 'successful') {
|
||||
if ($rootScope.jobStdOutInterval) {
|
||||
window.clearInterval($rootScope.jobStdOutInterval);
|
||||
}
|
||||
if (live_event_processing) {
|
||||
if (loaded_sections.length === 0) {
|
||||
$scope.$emit('LoadStdout');
|
||||
}
|
||||
else {
|
||||
getNextSection();
|
||||
}
|
||||
}
|
||||
live_event_processing = false;
|
||||
if (/\/jobs\/(\d)+\/stdout/.test($location.$$url)) {
|
||||
$log.debug("socket watching on job_events-" + job_id);
|
||||
$rootScope.event_socket.on("job_events-" + job_id, function() {
|
||||
$log.debug("socket fired on job_events-" + job_id);
|
||||
if (api_complete) {
|
||||
event_queue++;
|
||||
}
|
||||
}
|
||||
});
|
||||
event_socket = Socket({
|
||||
scope: $scope,
|
||||
endpoint: "job_events"
|
||||
});
|
||||
event_socket.init();
|
||||
event_socket.on("job_events-" + job_id, function() {
|
||||
if (api_complete) {
|
||||
event_queue++;
|
||||
}
|
||||
});
|
||||
});
|
||||
} else if (/\/ad_hoc_commands\/(\d)+/.test($location.$$url)) {
|
||||
$log.debug("socket watching on ad_hoc_command_events-" + job_id);
|
||||
$rootScope.adhoc_event_socket.on("ad_hoc_command_events-" + job_id, function() {
|
||||
$log.debug("socket fired on ad_hoc_command_events-" + job_id);
|
||||
if (api_complete) {
|
||||
event_queue++;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
openSockets();
|
||||
|
||||
if ($rootScope.removeJobStatusChange) {
|
||||
$rootScope.removeJobStatusChange();
|
||||
}
|
||||
$rootScope.removeJobStatusChange = $rootScope.$on('JobStatusChange-jobStdout', function(e, data) {
|
||||
if (parseInt(data.unified_job_id, 10) === parseInt(job_id,10) && $scope.job) {
|
||||
$scope.job.status = data.status;
|
||||
if (data.status === 'failed' || data.status === 'canceled' ||
|
||||
data.status === 'error' || data.status === 'successful') {
|
||||
if ($rootScope.jobStdOutInterval) {
|
||||
window.clearInterval($rootScope.jobStdOutInterval);
|
||||
}
|
||||
if (live_event_processing) {
|
||||
if (loaded_sections.length === 0) {
|
||||
$scope.$emit('LoadStdout');
|
||||
}
|
||||
else {
|
||||
getNextSection();
|
||||
}
|
||||
}
|
||||
live_event_processing = false;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
$rootScope.jobStdOutInterval = setInterval( function() {
|
||||
if (event_queue > 0) {
|
||||
// events happened since the last check
|
||||
@ -260,7 +264,11 @@ export function JobStdoutController ($location, $log, $rootScope, $scope, $compi
|
||||
Rest.setUrl(url);
|
||||
Rest.get()
|
||||
.success( function(data) {
|
||||
$('#pre-container-content').append(data.content);
|
||||
if ($('#pre-container-content').html() === "stdout capture pending") {
|
||||
$('#pre-container-content').html(data.content);
|
||||
} else {
|
||||
$('#pre-container-content').append(data.content);
|
||||
}
|
||||
loaded_sections.push({
|
||||
start: (data.range.start < 0) ? 0 : data.range.start,
|
||||
end: data.range.end
|
||||
@ -283,5 +291,4 @@ export function JobStdoutController ($location, $log, $rootScope, $scope, $compi
|
||||
|
||||
}
|
||||
|
||||
JobStdoutController.$inject = [ '$location', '$log', '$rootScope', '$scope', '$compile', '$routeParams', 'ClearScope', 'GetBasePath', 'Wait', 'Rest', 'ProcessErrors',
|
||||
'Socket' ];
|
||||
JobStdoutController.$inject = [ '$location', '$log', '$rootScope', '$scope', '$compile', '$routeParams', 'ClearScope', 'GetBasePath', 'Wait', 'Rest', 'ProcessErrors'];
|
||||
|
||||
@ -16,7 +16,7 @@
|
||||
|
||||
export function JobsListController ($rootScope, $log, $scope, $compile, $routeParams,
|
||||
ClearScope, Breadcrumbs, LoadBreadCrumbs, LoadSchedulesScope,
|
||||
LoadJobsScope, AllJobsList, ScheduledJobsList, GetChoices, GetBasePath, Wait, Socket) {
|
||||
LoadJobsScope, AllJobsList, ScheduledJobsList, GetChoices, GetBasePath, Wait) {
|
||||
|
||||
ClearScope();
|
||||
|
||||
@ -24,35 +24,26 @@ export function JobsListController ($rootScope, $log, $scope, $compile, $routePa
|
||||
choicesCount = 0,
|
||||
listCount = 0,
|
||||
api_complete = false,
|
||||
schedule_socket,
|
||||
job_socket,
|
||||
max_rows;
|
||||
|
||||
function openSockets() {
|
||||
job_socket = Socket({
|
||||
scope: $scope,
|
||||
endpoint: "jobs"
|
||||
});
|
||||
job_socket.init();
|
||||
job_socket.on("status_changed", function() {
|
||||
// if (api_complete) {
|
||||
jobs_scope.refreshJobs();
|
||||
// }
|
||||
});
|
||||
schedule_socket = Socket({
|
||||
scope: $scope,
|
||||
endpoint: "schedules"
|
||||
});
|
||||
schedule_socket.init();
|
||||
schedule_socket.on("schedule_changed", function() {
|
||||
if (api_complete) {
|
||||
scheduled_scope.search('schedule');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
LoadBreadCrumbs();
|
||||
|
||||
if ($rootScope.removeJobStatusChange) {
|
||||
$rootScope.removeJobStatusChange();
|
||||
}
|
||||
$rootScope.removeJobStatusChange = $rootScope.$on('JobStatusChange-jobs', function() {
|
||||
jobs_scope.refreshJobs();
|
||||
});
|
||||
|
||||
if ($rootScope.removeScheduleStatusChange) {
|
||||
$rootScope.removeScheduleStatusChange();
|
||||
}
|
||||
$rootScope.removeScheduleStatusChange = $rootScope.$on('ScheduleStatusChange', function() {
|
||||
if (api_complete) {
|
||||
scheduled_scope.search('schedule');
|
||||
}
|
||||
});
|
||||
|
||||
if ($scope.removeListLoaded) {
|
||||
$scope.removeListLoaded();
|
||||
}
|
||||
@ -60,7 +51,6 @@ export function JobsListController ($rootScope, $log, $scope, $compile, $routePa
|
||||
listCount++;
|
||||
if (listCount === 2) {
|
||||
api_complete = true;
|
||||
openSockets();
|
||||
}
|
||||
});
|
||||
|
||||
@ -193,4 +183,4 @@ export function JobsListController ($rootScope, $log, $scope, $compile, $routePa
|
||||
|
||||
JobsListController.$inject = ['$rootScope', '$log', '$scope', '$compile', '$routeParams',
|
||||
'ClearScope', 'Breadcrumbs', 'LoadBreadCrumbs', 'LoadSchedulesScope', 'LoadJobsScope',
|
||||
'AllJobsList', 'ScheduledJobsList', 'GetChoices', 'GetBasePath', 'Wait', 'Socket'];
|
||||
'AllJobsList', 'ScheduledJobsList', 'GetChoices', 'GetBasePath', 'Wait'];
|
||||
|
||||
@ -86,7 +86,7 @@ export function PortalController($scope, $compile, $routeParams, $rootScope, $lo
|
||||
if ($rootScope.removeJobStatusChange) {
|
||||
$rootScope.removeJobStatusChange();
|
||||
}
|
||||
$rootScope.removeJobStatusChange = $rootScope.$on('JobStatusChange', function() {
|
||||
$rootScope.removeJobStatusChange = $rootScope.$on('JobStatusChange-portal', function() {
|
||||
jobs_scope.search('portal_job'); //processEvent(event);
|
||||
});
|
||||
|
||||
|
||||
@ -86,7 +86,7 @@ export function ProjectsList ($scope, $rootScope, $location, $log, $routeParams,
|
||||
if ($rootScope.removeJobStatusChange) {
|
||||
$rootScope.removeJobStatusChange();
|
||||
}
|
||||
$rootScope.removeJobStatusChange = $rootScope.$on('JobStatusChange', function(e, data) {
|
||||
$rootScope.removeJobStatusChange = $rootScope.$on('JobStatusChange-projects', function(e, data) {
|
||||
var project;
|
||||
$log.debug(data);
|
||||
if ($scope.projects) {
|
||||
@ -722,30 +722,6 @@ export function ProjectsEdit($scope, $rootScope, $compile, $location, $log, $rou
|
||||
callback: 'choicesReady'
|
||||
});
|
||||
|
||||
// Handle project update status changes
|
||||
if ($rootScope.removeJobStatusChange) {
|
||||
$rootScope.removeJobStatusChange();
|
||||
}
|
||||
$rootScope.removeJobStatusChange = $rootScope.$on('JobStatusChange', function(e, data) {
|
||||
if ($scope.project_obj && data.project_id === $scope.project_obj.id) {
|
||||
// This is the affected project
|
||||
$log.debug('Received event for project: ' + $scope.project_obj.name);
|
||||
$log.debug('Status changed to: ' + data.status);
|
||||
// Set the status and re-evaluate the update button tooltip and class
|
||||
$scope.project_obj.status = data.status;
|
||||
$scope.scm_update_tooltip = "Start an SCM update";
|
||||
$scope.scm_type_class = "";
|
||||
if (data.status === 'running' || data.status === 'updating') {
|
||||
$scope.scm_update_tooltip = "SCM update currently running";
|
||||
$scope.scm_type_class = "btn-disabled";
|
||||
}
|
||||
if (Empty($scope.project_obj.scm_type)) {
|
||||
$scope.scm_update_tooltip = 'Manual projects do not require an SCM update';
|
||||
$scope.scm_type_class = "btn-disabled";
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Save changes to the parent
|
||||
$scope.formSave = function () {
|
||||
var fld, i, params;
|
||||
|
||||
@ -80,11 +80,6 @@ export function SocketsController ($scope, $compile, ClearScope, Socket) {
|
||||
e.append(html);
|
||||
$compile(e)(job_events_scope);
|
||||
|
||||
schedules_socket.init();
|
||||
test_socket.init();
|
||||
jobs_socket.init();
|
||||
job_events_socket.init();
|
||||
|
||||
schedules_scope.url = schedules_socket.getUrl();
|
||||
test_scope.url = test_socket.getUrl();
|
||||
jobs_scope.url = jobs_socket.getUrl();
|
||||
|
||||
@ -28,7 +28,7 @@ export default
|
||||
scope[iterator + '_num_pages'] = Math.ceil((count / scope[iterator + '_page_size']));
|
||||
scope[iterator + '_num_pages'] = (scope[iterator + '_num_pages'] <= 0) ? 1 : scope[iterator + '_num_pages'];
|
||||
scope[iterator + '_total_rows'] = count;
|
||||
|
||||
$('#pagination-links li:eq(1)').removeAttr('class');
|
||||
// Which page are we on?
|
||||
if (Empty(next) && previous) {
|
||||
// no next page, but there is a previous page
|
||||
@ -36,6 +36,7 @@ export default
|
||||
} else if (next && Empty(previous)) {
|
||||
// next page available, but no previous page
|
||||
scope[iterator + '_page'] = 1;
|
||||
$('#pagination-links li:eq(1)').attr('class', 'disabled');
|
||||
} else if (next && previous) {
|
||||
// we're in between next and previous
|
||||
scope[iterator + '_page'] = parseInt(previous.match(/page=\d+/)[0].replace(/page=/, '')) + 1;
|
||||
@ -75,6 +76,9 @@ export default
|
||||
scope.getPage = function (page, set, iterator) {
|
||||
var new_url = scope[iterator + '_url'].replace(/.page\=\d+/, ''),
|
||||
connect = (/\/$/.test(new_url)) ? '?' : '&';
|
||||
if(scope[iterator + '_page'] === 1 && page === 0){
|
||||
return;
|
||||
}
|
||||
new_url += connect + 'page=' + page;
|
||||
new_url += (scope[iterator + 'SearchParams']) ? '&' + scope[iterator + 'SearchParams'] +
|
||||
'&page_size=' + scope[iterator + '_page_size'] : 'page_size=' + scope[iterator + 'PageSize'];
|
||||
@ -136,6 +140,9 @@ export default
|
||||
scope.getPage = function (page, set, iterator) {
|
||||
var new_url = scope[iterator + '_url'].replace(/.page\=\d+/, ''),
|
||||
connect = (/\/$/.test(new_url)) ? '?' : '&';
|
||||
if(scope[iterator + '_page'] === 1 && page === 0){
|
||||
return;
|
||||
}
|
||||
new_url += connect + 'page=' + page;
|
||||
new_url += (scope[iterator + 'SearchParams']) ? '&' + scope[iterator + 'SearchParams'] +
|
||||
'&page_size=' + scope[iterator + '_page_size'] : '&page_size=' + scope[iterator + 'PageSize'];
|
||||
|
||||
@ -38,7 +38,7 @@ function JobStatusGraphData(Rest, getBasePath, processErrors, $rootScope, $q) {
|
||||
destroyWatcher: angular.noop,
|
||||
setupWatcher: function(period, jobType) {
|
||||
this.destroyWatcher =
|
||||
$rootScope.$on('JobStatusChange', function() {
|
||||
$rootScope.$on('JobStatusChange-home', function() {
|
||||
getData(period, jobType).then(function(result) {
|
||||
$rootScope.
|
||||
$broadcast('DataReceived:JobStatusGraph',
|
||||
|
||||
@ -1003,7 +1003,7 @@ angular.module('FormGenerator', [GeneratorHelpers.name, 'Utilities', listGenerat
|
||||
}
|
||||
if ($AnsibleConfig.password_hasSymbol) {
|
||||
html += "<div class=\"error\" ng-show=\"" + this.form.name + '_form.' + fld +
|
||||
".$error.hasSymbol\">Your password must contain one of the following characters: `~!@#$%^&*()_-+=|}\]{\[;:\"\'?\/>.<,\n";
|
||||
".$error.hasSymbol\">Your password must contain one of the following characters: `~!@#$%^&*()_-+=|}\]{\[;:\"\'?\/>.<,</div>\n";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -799,7 +799,7 @@ angular.module('GeneratorHelpers', [systemStatus.name])
|
||||
html += "<li ng-hide=\"" + iterator + "_page -5 <= 1 \"><a href id=\"first-page-set\" ng-click=\"getPage(1,'" + set + "','" + iterator + "')\">" +
|
||||
"<i class=\"fa fa-angle-double-left\"></i></a></li>\n";
|
||||
|
||||
html += "<li ng-hide=\"" + iterator + "_page -1 <= 0\"><a href " +
|
||||
html += "<li><a href " +
|
||||
"id=\"previous-page\" ng-click=\"getPage(" + iterator + "_page - 1,'" + set + "','" + iterator + "')\">" +
|
||||
"<i class=\"fa fa-angle-left\"></i></a></li>\n";
|
||||
|
||||
|
||||
@ -48,7 +48,7 @@ angular.module('DashboardJobsWidget', ['RestServices', 'Utilities'])
|
||||
e.html(html);
|
||||
$compile(e)(scope);
|
||||
|
||||
$rootScope.$on('JobStatusChange', function() {
|
||||
$rootScope.$on('JobStatusChange-home', function() {
|
||||
jobs_scope.refreshJobs();
|
||||
});
|
||||
|
||||
|
||||
@ -58,3 +58,9 @@
|
||||
.ansi45 { background-color: #E850A8; }
|
||||
.ansi46 { background-color: @skipped; }
|
||||
.ansi47 { background-color: #F5F1DE; }
|
||||
|
||||
#pre-container-content > span {
|
||||
display: inline-block;
|
||||
white-space: pre-wrap;
|
||||
word-wrap: normal;
|
||||
}
|
||||
|
||||
@ -58,7 +58,7 @@ describeModule('DashboardGraphs')
|
||||
$rootScope.$on('DataReceived:JobStatusGraph', function(e, data) {
|
||||
result.resolve(data);
|
||||
});
|
||||
$rootScope.$emit('JobStatusChange');
|
||||
$rootScope.$emit('JobStatusChange-home');
|
||||
restStub.succeed({ data: expected });
|
||||
restStub.flush();
|
||||
}]);
|
||||
|
||||
@ -1 +1 @@
|
||||
from awx.wsgi import application
|
||||
from awx.wsgi import application # NOQA
|
||||
|
||||
@ -14,8 +14,8 @@
|
||||
# W391 - Blank line at end of file
|
||||
# W293 - Blank line contains whitespace
|
||||
ignore=E201,E203,E221,E225,E231,E241,E251,E261,E265,E302,E303,E501,W291,W391,W293
|
||||
exclude=awx/lib/site-packages,awx/ui,awx/api/urls.py,awx/main/migrations,awx/main/tests/data
|
||||
exclude=.tox,awx/lib/site-packages,awx/plugins/inventory/ec2.py,awx/plugins/inventory/gce.py,awx/plugins/inventory/vmware.py,awx/plugins/inventory/windows_azure.py,awx/plugins/inventory/openstack.py,awx/ui,awx/api/urls.py,awx/main/migrations,awx/main/tests/data
|
||||
|
||||
[flake8]
|
||||
ignore=E201,E203,E221,E225,E231,E241,E251,E261,E265,E302,E303,E501,W291,W391,W293,E731
|
||||
exclude=awx/lib/site-packages,awx/ui,awx/api/urls.py,awx/main/migrations,awx/main/tests/data,node_modules/,awx/projects/
|
||||
exclude=.tox,awx/lib/site-packages,awx/plugins/inventory/ec2.py,awx/plugins/inventory/gce.py,awx/plugins/inventory/vmware.py,awx/plugins/inventory/windows_azure.py,awx/plugins/inventory/openstack.py,awx/ui,awx/api/urls.py,awx/main/migrations,awx/main/tests/data,node_modules/,awx/projects/,tools/docker
|
||||
|
||||
@ -4,6 +4,33 @@
|
||||
import sos
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
SOSREPORT_TOWER_COMMANDS = [
|
||||
"ansible --version", # ansible core version
|
||||
"tower-manage --version", # tower version
|
||||
"supervisorctl status", # tower process status
|
||||
"pip list" # pip package list
|
||||
"tree -d /var/lib/awx", # show me the dirs
|
||||
"ls -ll /var/lib/awx", # check permissions
|
||||
"ls -ll /etc/tower",
|
||||
]
|
||||
|
||||
SOSREPORT_TOWER_DIRS = [
|
||||
"/etc/tower/",
|
||||
"/var/log/tower",
|
||||
"/var/log/httpd",
|
||||
"/var/log/apache2",
|
||||
"/var/log/redis",
|
||||
"/var/log/supervisor",
|
||||
"/var/log/syslog",
|
||||
"/var/log/udev",
|
||||
"/var/log/kern*",
|
||||
"/var/log/dist-upgrade",
|
||||
"/var/log/installer",
|
||||
"/var/log/unattended-upgrades",
|
||||
"/var/log/apport.log"
|
||||
]
|
||||
|
||||
|
||||
if LooseVersion(sos.__version__) >= LooseVersion('3.0'):
|
||||
from sos.plugins import Plugin, RedHatPlugin, UbuntuPlugin
|
||||
|
||||
@ -13,36 +40,10 @@ if LooseVersion(sos.__version__) >= LooseVersion('3.0'):
|
||||
|
||||
def setup(self):
|
||||
|
||||
commands = [
|
||||
"ansible --version", # ansible core version
|
||||
"awx-manage --version", # tower version
|
||||
"supervisorctl status", # tower process status
|
||||
"pip list" # pip package list
|
||||
"tree -d /var/lib/awx", # show me the dirs
|
||||
"ls -ll /var/lib/awx", # check permissions
|
||||
"ls -ll /etc/tower"
|
||||
]
|
||||
|
||||
dirs = [
|
||||
"/etc/tower/",
|
||||
"/var/log/tower",
|
||||
"/var/log/httpd",
|
||||
"/var/log/apache2",
|
||||
"/var/log/redis",
|
||||
"/var/log/supervisor",
|
||||
"/var/log/syslog",
|
||||
"/var/log/udev",
|
||||
"/var/log/kern*",
|
||||
"/var/log/dist-upgrade",
|
||||
"/var/log/installer",
|
||||
"/var/log/unattended-upgrades",
|
||||
"/var/log/apport.log"
|
||||
]
|
||||
|
||||
for path in dirs:
|
||||
for path in SOSREPORT_TOWER_DIRS:
|
||||
self.add_copy_spec(path)
|
||||
|
||||
for command in commands:
|
||||
for command in SOSREPORT_TOWER_COMMANDS:
|
||||
self.add_cmd_output(command)
|
||||
|
||||
else:
|
||||
@ -53,35 +54,9 @@ else:
|
||||
|
||||
def setup(self):
|
||||
|
||||
commands = [
|
||||
"ansible --version", # ansible core version
|
||||
"awx-manage --version", # tower version
|
||||
"supervisorctl status", # tower process status
|
||||
"pip list" # pip package list
|
||||
"tree -d /var/lib/awx", # show me the dirs
|
||||
"ls -ll /var/lib/awx", # check permissions
|
||||
"ls -ll /etc/tower"
|
||||
]
|
||||
|
||||
dirs = [
|
||||
"/etc/tower/",
|
||||
"/var/log/tower",
|
||||
"/var/log/httpd",
|
||||
"/var/log/apache2",
|
||||
"/var/log/redis",
|
||||
"/var/log/supervisor",
|
||||
"/var/log/syslog",
|
||||
"/var/log/udev",
|
||||
"/var/log/kern*",
|
||||
"/var/log/dist-upgrade",
|
||||
"/var/log/installer",
|
||||
"/var/log/unattended-upgrades",
|
||||
"/var/log/apport.log"
|
||||
]
|
||||
|
||||
for path in dirs:
|
||||
for path in SOSREPORT_TOWER_DIRS:
|
||||
self.addCopySpec(path)
|
||||
|
||||
for command in commands:
|
||||
for command in SOSREPORT_TOWER_COMMANDS:
|
||||
self.collectExtOutput(command)
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user