Merge pull request #8559 from ryanpetrello/yet-anooooooother-downstream-merge

Merge in some downstream bug fixes

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
This commit is contained in:
softwarefactory-project-zuul[bot] 2020-11-06 19:29:18 +00:00 committed by GitHub
commit 7d3bf36227
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 76 additions and 160 deletions

View File

@ -453,7 +453,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
if 'capability_map' not in self.context:
if hasattr(self, 'polymorphic_base'):
model = self.polymorphic_base.Meta.model
prefetch_list = self.polymorphic_base._capabilities_prefetch
prefetch_list = self.polymorphic_base.capabilities_prefetch
else:
model = self.Meta.model
prefetch_list = self.capabilities_prefetch
@ -640,12 +640,9 @@ class EmptySerializer(serializers.Serializer):
class UnifiedJobTemplateSerializer(BaseSerializer):
# As a base serializer, the capabilities prefetch is not used directly
_capabilities_prefetch = [
'admin', 'execute',
{'copy': ['jobtemplate.project.use', 'jobtemplate.inventory.use',
'organization.workflow_admin']}
]
# As a base serializer, the capabilities prefetch is not used directly,
# instead they are derived from the Workflow Job Template Serializer and the Job Template Serializer, respectively.
capabilities_prefetch = []
class Meta:
model = UnifiedJobTemplate
@ -695,7 +692,7 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
serializer.polymorphic_base = self
# capabilities prefetch is only valid for these models
if isinstance(obj, (JobTemplate, WorkflowJobTemplate)):
serializer.capabilities_prefetch = self._capabilities_prefetch
serializer.capabilities_prefetch = serializer_class.capabilities_prefetch
else:
serializer.capabilities_prefetch = None
return serializer.to_representation(obj)

View File

@ -1,11 +1,7 @@
# Generated by Django 2.2.11 on 2020-05-01 13:25
from django.db import migrations, models
from awx.main.migrations._inventory_source import create_scm_script_substitute
def convert_cloudforms_to_scm(apps, schema_editor):
create_scm_script_substitute(apps, 'cloudforms')
from awx.main.migrations._inventory_source import delete_cloudforms_inv_source
class Migration(migrations.Migration):
@ -15,7 +11,7 @@ class Migration(migrations.Migration):
]
operations = [
migrations.RunPython(convert_cloudforms_to_scm),
migrations.RunPython(delete_cloudforms_inv_source),
migrations.AlterField(
model_name='inventorysource',
name='source',

View File

@ -5,6 +5,7 @@ from uuid import uuid4
from django.utils.encoding import smart_text
from django.utils.timezone import now
from awx.main.utils.common import set_current_apps
from awx.main.utils.common import parse_yaml_or_json
logger = logging.getLogger('awx.main.migrations')
@ -91,43 +92,14 @@ def back_out_new_instance_id(apps, source, new_id):
))
def create_scm_script_substitute(apps, source):
"""Only applies for cloudforms in practice, but written generally.
Given a source type, this will replace all inventory sources of that type
with SCM inventory sources that source the script from Ansible core
"""
# the revision in the Ansible 2.9 stable branch this project will start out as
# it can still be updated manually later (but staying within 2.9 branch), if desired
ansible_rev = '6f83b9aff42331e15c55a171de0a8b001208c18c'
def delete_cloudforms_inv_source(apps, schema_editor):
set_current_apps(apps)
InventorySource = apps.get_model('main', 'InventorySource')
ContentType = apps.get_model('contenttypes', 'ContentType')
Project = apps.get_model('main', 'Project')
if not InventorySource.objects.filter(source=source).exists():
logger.debug('No sources of type {} to migrate'.format(source))
return
proj_name = 'Replacement project for {} type sources - {}'.format(source, uuid4())
right_now = now()
project = Project.objects.create(
name=proj_name,
created=right_now,
modified=right_now,
description='Created by migration',
polymorphic_ctype=ContentType.objects.get(model='project'),
# project-specific fields
scm_type='git',
scm_url='https://github.com/ansible/ansible.git',
scm_branch='stable-2.9',
scm_revision=ansible_rev
)
ct = 0
for inv_src in InventorySource.objects.filter(source=source).iterator():
inv_src.source = 'scm'
inv_src.source_project = project
inv_src.source_path = 'contrib/inventory/{}.py'.format(source)
inv_src.scm_last_revision = ansible_rev
inv_src.save(update_fields=['source', 'source_project', 'source_path', 'scm_last_revision'])
logger.debug('Changed inventory source {} to scm type'.format(inv_src.pk))
ct += 1
InventoryUpdate = apps.get_model('main', 'InventoryUpdate')
CredentialType = apps.get_model('main', 'CredentialType')
InventoryUpdate.objects.filter(inventory_source__source='cloudforms').delete()
InventorySource.objects.filter(source='cloudforms').delete()
ct = CredentialType.objects.filter(namespace='cloudforms').first()
if ct:
logger.info('Changed total of {} inventory sources from {} type to scm'.format(ct, source))
ct.credentials.all().delete()
ct.delete()

View File

@ -881,33 +881,6 @@ ManagedCredentialType(
}
)
ManagedCredentialType(
namespace='cloudforms',
kind='cloud',
name=ugettext_noop('Red Hat CloudForms'),
managed_by_tower=True,
inputs={
'fields': [{
'id': 'host',
'label': ugettext_noop('CloudForms URL'),
'type': 'string',
'help_text': ugettext_noop('Enter the URL for the virtual machine that '
'corresponds to your CloudForms instance. '
'For example, https://cloudforms.example.org')
}, {
'id': 'username',
'label': ugettext_noop('Username'),
'type': 'string'
}, {
'id': 'password',
'label': ugettext_noop('Password'),
'type': 'string',
'secret': True,
}],
'required': ['host', 'username', 'password'],
}
)
ManagedCredentialType(
namespace='gce',
kind='cloud',

View File

@ -675,33 +675,6 @@ def test_net_create_ok(post, organization, admin):
assert cred.inputs['authorize'] is True
#
# Cloudforms Credentials
#
@pytest.mark.django_db
def test_cloudforms_create_ok(post, organization, admin):
params = {
'credential_type': 1,
'name': 'Best credential ever',
'inputs': {
'host': 'some_host',
'username': 'some_username',
'password': 'some_password',
}
}
cloudforms = CredentialType.defaults['cloudforms']()
cloudforms.save()
params['organization'] = organization.id
response = post(reverse('api:credential_list'), params, admin)
assert response.status_code == 201
assert Credential.objects.count() == 1
cred = Credential.objects.all()[:1].get()
assert cred.inputs['host'] == 'some_host'
assert cred.inputs['username'] == 'some_username'
assert decrypt_field(cred, 'password') == 'some_password'
#
# GCE Credentials
#

View File

@ -282,10 +282,6 @@ def test_prefetch_ujt_project_capabilities(alice, project, job_template, mocker)
list_serializer.child.to_representation(project)
assert 'capability_map' not in list_serializer.child.context
# Models for which the prefetch is valid for do
list_serializer.child.to_representation(job_template)
assert set(list_serializer.child.context['capability_map'][job_template.id].keys()) == set(('copy', 'edit', 'start'))
@pytest.mark.django_db
def test_prefetch_group_capabilities(group, rando):

View File

@ -79,7 +79,6 @@ def test_default_cred_types():
'aws',
'azure_kv',
'azure_rm',
'cloudforms',
'conjur',
'galaxy_api_token',
'gce',

View File

@ -5,7 +5,7 @@ from awx.main.migrations import _inventory_source as invsrc
from django.apps import apps
from awx.main.models import InventorySource
from awx.main.models import InventorySource, InventoryUpdate, ManagedCredentialType, CredentialType, Credential
@pytest.mark.parametrize('vars,id_var,result', [
@ -42,16 +42,40 @@ def test_apply_new_instance_id(inventory_source):
@pytest.mark.django_db
def test_replacement_scm_sources(inventory):
inv_source = InventorySource.objects.create(
name='test',
inventory=inventory,
organization=inventory.organization,
source='ec2'
def test_cloudforms_inventory_removal(inventory):
ManagedCredentialType(
name='Red Hat CloudForms',
namespace='cloudforms',
kind='cloud',
managed_by_tower=True,
inputs={},
)
invsrc.create_scm_script_substitute(apps, 'ec2')
inv_source.refresh_from_db()
assert inv_source.source == 'scm'
assert inv_source.source_project
project = inv_source.source_project
assert 'Replacement project for' in project.name
CredentialType.defaults['cloudforms']().save()
cloudforms = CredentialType.objects.get(namespace='cloudforms')
Credential.objects.create(
name='test',
credential_type=cloudforms,
)
for source in ('ec2', 'cloudforms'):
i = InventorySource.objects.create(
name='test',
inventory=inventory,
organization=inventory.organization,
source=source,
)
InventoryUpdate.objects.create(
name='test update',
inventory_source=i,
source=source,
)
assert Credential.objects.count() == 1
assert InventorySource.objects.count() == 2 # ec2 + cf
assert InventoryUpdate.objects.count() == 2 # ec2 + cf
invsrc.delete_cloudforms_inv_source(apps, None)
assert InventorySource.objects.count() == 1 # ec2
assert InventoryUpdate.objects.count() == 1 # ec2
assert InventorySource.objects.first().source == 'ec2'
assert InventoryUpdate.objects.first().source == 'ec2'
assert Credential.objects.count() == 0
assert CredentialType.objects.filter(namespace='cloudforms').exists() is False

View File

@ -29,6 +29,7 @@ function AddEditCredentialsController (
const isExternal = credentialType.get('kind') === 'external';
const mode = $state.current.name.startsWith('credentials.add') ? 'add' : 'edit';
vm.isEditable = credential.get('summary_fields.user_capabilities.edit');
vm.mode = mode;
vm.strings = strings;
@ -52,6 +53,7 @@ function AddEditCredentialsController (
vm.form = credential.createFormSchema({ omit });
vm.form.disabled = !isEditable;
}
vm.form.disabled = !vm.isEditable;
vm.form._organization._disabled = !isOrgEditableByUser;
// Only exists for permissions compatibility

View File

@ -29,30 +29,10 @@ export default ['$rootScope', 'Rest', 'GetBasePath', 'ProcessErrors', '$q', 'Con
}
};
if (config.analytics_status === 'detailed') {
this.setDetailed(options, config);
} else if (config.analytics_status === 'anonymous') {
this.setAnonymous(options);
}
return options;
},
// Detailed mode sends:
// VisitorId: userid+hash of license_key
// AccountId: hash of license_key from license
setDetailed: function(options, config) {
// config.deployment_id is a hash of the tower license_key
options.visitor.id = $rootScope.current_user.id + '@' + config.deployment_id;
options.account.id = config.deployment_id;
},
// Anonymous mode sends:
// VisitorId: <hardcoded id that is the same across all anonymous>
// AccountId: <hardcoded id that is the same across all anonymous>
setAnonymous: function (options) {
options.visitor.id = 0;
options.account.id = "tower.ansible.com";
return options;
},
setRole: function(options) {

View File

@ -82,9 +82,11 @@ EXAMPLES = '''
- name: Export all tower assets
tower_export:
all: True
- name: Export all inventories
tower_export:
inventory: 'all'
- name: Export a job template named "My Template" and all Credentials
tower_export:
job_template: "My Template"
@ -135,27 +137,27 @@ def main():
# Otherwise we take either the string or None (if the parameter was not passed) to get one or no items
export_args[resource] = module.params.get(resource)
# Currently the import process does not return anything on error
# It simply just logs to pythons logger
# Setup a log gobbler to get error messages from import_assets
# Currently the export process does not return anything on error
# It simply just logs to Python's logger
# Set up a log gobbler to get error messages from export_assets
log_capture_string = StringIO()
ch = logging.StreamHandler(log_capture_string)
for logger_name in ['awxkit.api.pages.api', 'awxkit.api.pages.page']:
logger = logging.getLogger(logger_name)
logger.setLevel(logging.WARNING)
ch.setLevel(logging.WARNING)
logger.setLevel(logging.ERROR)
ch.setLevel(logging.ERROR)
logger.addHandler(ch)
log_contents = ''
# Run the import process
# Run the export process
try:
module.json_output['assets'] = module.get_api_v2_object().export_assets(**export_args)
module.exit_json(**module.json_output)
except Exception as e:
module.fail_json(msg="Failed to export assets {0}".format(e))
finally:
# Finally consume the logs incase there were any errors and die if there were
# Finally, consume the logs in case there were any errors and die if there were
log_contents = log_capture_string.getvalue()
log_capture_string.close()
if log_contents != '':

View File

@ -38,7 +38,7 @@ EXAMPLES = '''
- name: Export all assets
tower_export:
all: True
registeR: export_output
register: export_output
- name: Import all tower assets from our export
tower_import:
@ -51,7 +51,7 @@ EXAMPLES = '''
from ..module_utils.tower_awxkit import TowerAWXKitModule
# These two lines are not needed if awxkit changes to do progamatic notifications on issues
# These two lines are not needed if awxkit changes to do programatic notifications on issues
from ansible.module_utils.six.moves import StringIO
import logging
@ -76,13 +76,15 @@ def main():
module.fail_json(msg="Your version of awxkit does not appear to have import/export")
# Currently the import process does not return anything on error
# It simply just logs to pythons logger
# Setup a log gobbler to get error messages from import_assets
# It simply just logs to Python's logger
# Set up a log gobbler to get error messages from import_assets
logger = logging.getLogger('awxkit.api.pages.api')
logger.setLevel(logging.WARNING)
logger.setLevel(logging.ERROR)
log_capture_string = StringIO()
ch = logging.StreamHandler(log_capture_string)
ch.setLevel(logging.WARNING)
ch.setLevel(logging.ERROR)
logger.addHandler(ch)
log_contents = ''
@ -92,7 +94,7 @@ def main():
except Exception as e:
module.fail_json(msg="Failed to import assets {0}".format(e))
finally:
# Finally consume the logs incase there were any errors and die if there were
# Finally, consume the logs in case there were any errors and die if there were
log_contents = log_capture_string.getvalue()
log_capture_string.close()
if log_contents != '':