resolve conflict

This commit is contained in:
sundeep-co-in 2016-10-27 08:13:02 +05:30
commit c1a5fb3312
18 changed files with 4939 additions and 33 deletions

View File

@ -184,7 +184,7 @@ UI_RELEASE_FLAG_FILE = awx/ui/.release_built
deb deb-src debian debsign pbuilder reprepro setup_tarball \
virtualbox-ovf virtualbox-centos-7 virtualbox-centos-6 \
clean-bundle setup_bundle_tarball \
ui-docker-machine ui-docker ui-release \
ui-docker-machine ui-docker ui-release ui-devel \
ui-test ui-deps ui-test-ci ui-test-saucelabs jlaska
@ -574,6 +574,10 @@ ui-docker-machine: $(UI_DEPS_FLAG_FILE)
ui-docker: $(UI_DEPS_FLAG_FILE)
$(NPM_BIN) --prefix awx/ui run build-docker-cid
# Builds UI with development/debug settings enabled. Does not raise browser-sync or filesystem polling.
ui-devel: $(UI_DEPS_FLAG_FILE)
$(NPM_BIN) --prefix awx/ui run build-devel
ui-release: languages $(UI_RELEASE_FLAG_FILE)
# todo: include languages target when .po deliverables are added to source control

View File

@ -2244,18 +2244,22 @@ class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer)
pass
class WorkflowNodeBaseSerializer(BaseSerializer):
job_type = serializers.SerializerMethodField()
job_tags = serializers.SerializerMethodField()
limit = serializers.SerializerMethodField()
skip_tags = serializers.SerializerMethodField()
job_type = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
success_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
failure_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
always_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
fail_on_job_failure = serializers.BooleanField(
help_text=('If set to true, and if the job runs and fails, '
'the workflow is marked as failed.'),
default=True)
class Meta:
fields = ('*', '-name', '-description', 'id', 'url', 'related',
'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',
'inventory', 'credential', 'job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags')
'inventory', 'credential', 'job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags', 'fail_on_job_failure')
def get_related(self, obj):
res = super(WorkflowNodeBaseSerializer, self).get_related(obj)
@ -2263,17 +2267,12 @@ class WorkflowNodeBaseSerializer(BaseSerializer):
res['unified_job_template'] = obj.unified_job_template.get_absolute_url()
return res
def get_job_type(self, obj):
return obj.char_prompts.get('job_type', None)
def get_job_tags(self, obj):
return obj.char_prompts.get('job_tags', None)
def get_skip_tags(self, obj):
return obj.char_prompts.get('skip_tags', None)
def get_limit(self, obj):
return obj.char_prompts.get('limit', None)
def validate(self, attrs):
# char_prompts go through different validation, so remove them here
for fd in ['job_type', 'job_tags', 'skip_tags', 'limit']:
if fd in attrs:
attrs.pop(fd)
return super(WorkflowNodeBaseSerializer, self).validate(attrs)
class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer):

View File

@ -0,0 +1,14 @@
# Workflow Job Template Workflow Node List
Workflow nodes reference templates to execute and define the ordering
in which to execute them. After a job in this workflow finishes,
the subsequent actions are to:
- run nodes contained in "failure_nodes" or "always_nodes" if job failed
- run nodes contained in "success_nodes" or "always_nodes" if job succeeded
The workflow is marked as failed if any jobs run as part of that workflow fail
and have the field `fail_on_job_failure` set to true. If not, the workflow
job is marked as successful.
{% include "api/sub_list_create_api_view.md" %}

View File

@ -2406,6 +2406,7 @@ class JobTemplateLabelList(DeleteLastUnattachLabelMixin, SubListCreateAttachDeta
serializer_class = LabelSerializer
parent_model = JobTemplate
relationship = 'labels'
new_in_300 = True
def post(self, request, *args, **kwargs):
# If a label already exists in the database, attach it instead of erroring out
@ -2699,6 +2700,7 @@ class WorkflowJobTemplateList(ListCreateAPIView):
model = WorkflowJobTemplate
serializer_class = WorkflowJobTemplateListSerializer
always_allow_superuser = False
new_in_310 = True
# TODO: RBAC
'''
@ -2716,10 +2718,12 @@ class WorkflowJobTemplateDetail(RetrieveUpdateDestroyAPIView):
model = WorkflowJobTemplate
serializer_class = WorkflowJobTemplateSerializer
always_allow_superuser = False
new_in_310 = True
class WorkflowJobTemplateLabelList(JobTemplateLabelList):
parent_model = WorkflowJobTemplate
new_in_310 = True
# TODO:
@ -2727,6 +2731,7 @@ class WorkflowJobTemplateLaunch(GenericAPIView):
model = WorkflowJobTemplate
serializer_class = EmptySerializer
new_in_310 = True
def get(self, request, *args, **kwargs):
data = {}
@ -2752,6 +2757,12 @@ class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView):
parent_model = WorkflowJobTemplate
relationship = 'workflow_job_template_nodes'
parent_key = 'workflow_job_template'
new_in_310 = True
def update_raw_data(self, data):
for fd in ['job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags']:
data[fd] = None
return super(WorkflowJobTemplateWorkflowNodesList, self).update_raw_data(data)
# TODO:
class WorkflowJobTemplateJobsList(SubListAPIView):
@ -2767,12 +2778,14 @@ class WorkflowJobList(ListCreateAPIView):
model = WorkflowJob
serializer_class = WorkflowJobListSerializer
new_in_310 = True
# TODO:
class WorkflowJobDetail(RetrieveDestroyAPIView):
model = WorkflowJob
serializer_class = WorkflowJobSerializer
new_in_310 = True
class WorkflowJobWorkflowNodesList(SubListAPIView):
@ -2782,6 +2795,7 @@ class WorkflowJobWorkflowNodesList(SubListAPIView):
parent_model = WorkflowJob
relationship = 'workflow_job_nodes'
parent_key = 'workflow_job'
new_in_310 = True
class SystemJobTemplateList(ListAPIView):

View File

@ -0,0 +1,24 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0044_v310_project_playbook_files'),
]
operations = [
migrations.AddField(
model_name='workflowjobnode',
name='fail_on_job_failure',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='workflowjobtemplatenode',
name='fail_on_job_failure',
field=models.BooleanField(default=True),
),
]

View File

@ -60,6 +60,10 @@ class WorkflowNodeBase(CreatedModifiedModel):
default=None,
on_delete=models.SET_NULL,
)
fail_on_job_failure = models.BooleanField(
blank=True,
default=True,
)
# Prompting-related fields
inventory = models.ForeignKey(
'Inventory',
@ -93,6 +97,22 @@ class WorkflowNodeBase(CreatedModifiedModel):
data[fd] = self.char_prompts[fd]
return data
@property
def job_type(self):
return self.char_prompts.get('job_type', None)
@property
def job_tags(self):
return self.char_prompts.get('job_tags', None)
@property
def skip_tags(self):
return self.char_prompts.get('skip_tags', None)
@property
def limit(self):
return self.char_prompts.get('limit', None)
def get_prompts_warnings(self):
ujt_obj = self.unified_job_template
if ujt_obj is None:
@ -137,7 +157,7 @@ class WorkflowNodeBase(CreatedModifiedModel):
Return field names that should be copied from template node to job node.
'''
return ['workflow_job', 'unified_job_template',
'inventory', 'credential', 'char_prompts']
'inventory', 'credential', 'char_prompts', 'fail_on_job_failure']
class WorkflowJobTemplateNode(WorkflowNodeBase):
# TODO: Ensure the API forces workflow_job_template being set
@ -383,6 +403,9 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, JobNotificationMixin, Workflow
from awx.main.tasks import RunWorkflowJob
return RunWorkflowJob
def _has_failed(self):
return self.workflow_job_nodes.filter(job__status='failed', fail_on_job_failure=True).exists()
def socketio_emit_data(self):
return {}

View File

@ -73,10 +73,12 @@ def process_finished_workflow_jobs(workflow_jobs):
dag = WorkflowDAG(workflow_job)
if dag.is_workflow_done():
with transaction.atomic():
# TODO: detect if wfj failed
workflow_job.status = 'completed'
if workflow_job._has_failed():
workflow_job.status = 'failed'
else:
workflow_job.status = 'successful'
workflow_job.save()
workflow_job.websocket_emit_status('completed')
workflow_job.websocket_emit_status(workflow_job.status)
def rebuild_graph():
"""Regenerate the task graph by refreshing known tasks from Tower, purging

View File

@ -42,7 +42,9 @@ class WorkflowDAG(SimpleDAG):
nodes.extend(children_all)
elif job.status in ['successful']:
children_success = self.get_dependencies(obj, 'success_nodes')
nodes.extend(children_success)
children_always = self.get_dependencies(obj, 'always_nodes')
children_all = children_success + children_always
nodes.extend(children_all)
return [n['node_object'] for n in nodes_found]
def is_workflow_done(self):
@ -67,6 +69,8 @@ class WorkflowDAG(SimpleDAG):
nodes.extend(children_all)
elif job.status in ['successful']:
children_success = self.get_dependencies(obj, 'success_nodes')
nodes.extend(children_success)
children_always = self.get_dependencies(obj, 'always_nodes')
children_all = children_success + children_always
nodes.extend(children_all)
return True

View File

@ -90,3 +90,42 @@ class TestWorkflowJobTemplate:
assert len(parent_qs) == 1
assert parent_qs[0] == wfjt.workflow_job_template_nodes.all()[1]
@pytest.mark.django_db
class TestWorkflowJobFailure:
@pytest.fixture
def wfj(self):
return WorkflowJob.objects.create(name='test-wf-job')
def test_workflow_has_failed(self, wfj):
"""
Test that a single failed node with fail_on_job_failure = true
leads to the entire WF being marked as failed
"""
job = Job.objects.create(name='test-job', status='failed')
# Node has a failed job connected
WorkflowJobNode.objects.create(workflow_job=wfj, job=job)
assert wfj._has_failed()
def test_workflow_not_failed_unran_job(self, wfj):
"""
Test that an un-ran node will not mark workflow job as failed
"""
WorkflowJobNode.objects.create(workflow_job=wfj)
assert not wfj._has_failed()
def test_workflow_not_failed_successful_job(self, wfj):
"""
Test that a sucessful node will not mark workflow job as failed
"""
job = Job.objects.create(name='test-job', status='successful')
WorkflowJobNode.objects.create(workflow_job=wfj, job=job)
assert not wfj._has_failed()
def test_workflow_not_failed_failed_job_but_okay(self, wfj):
"""
Test that a failed node will not mark workflow job as failed
if the fail_on_job_failure is set to false
"""
job = Job.objects.create(name='test-job', status='failed')
WorkflowJobNode.objects.create(workflow_job=wfj, job=job, fail_on_job_failure=False)
assert not wfj._has_failed()

View File

@ -7,12 +7,14 @@ from awx.main.models import Job
@pytest.fixture
def job(mocker):
return mocker.MagicMock(**{
ret = mocker.MagicMock(**{
'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}',
'extra_vars_dict': {"secret_key": "my_password"},
'pk': 1, 'job_template.pk': 1, 'job_template.name': '',
'created_by.pk': 1, 'created_by.username': 'admin',
'launch_type': 'manual'})
ret.project = mocker.MagicMock(scm_revision='asdf1234')
return ret
@pytest.mark.survey
def test_job_survey_password_redaction():

View File

@ -139,6 +139,7 @@ class TestWorkflowJobCreate:
char_prompts=wfjt_node_no_prompts.char_prompts,
inventory=None, credential=None,
unified_job_template=wfjt_node_no_prompts.unified_job_template,
fail_on_job_failure=True,
workflow_job=workflow_job_unit)
def test_create_with_prompts(self, wfjt_node_with_prompts, workflow_job_unit, mocker):
@ -150,6 +151,7 @@ class TestWorkflowJobCreate:
inventory=wfjt_node_with_prompts.inventory,
credential=wfjt_node_with_prompts.credential,
unified_job_template=wfjt_node_with_prompts.unified_job_template,
fail_on_job_failure=True,
workflow_job=workflow_job_unit)
@mock.patch('awx.main.models.workflow.WorkflowNodeBase.get_parent_nodes', lambda self: [])
@ -215,7 +217,7 @@ class TestWorkflowWarnings:
def test_warn_scan_errors_node_prompts(self, job_node_with_prompts):
job_node_with_prompts.unified_job_template.job_type = 'scan'
job_node_with_prompts.job_type = 'run'
job_node_with_prompts.char_prompts['job_type'] = 'run'
job_node_with_prompts.inventory = Inventory(name='different-inventory', pk=23)
assert 'ignored' in job_node_with_prompts.get_prompts_warnings()
assert 'job_type' in job_node_with_prompts.get_prompts_warnings()['ignored']

View File

@ -154,7 +154,25 @@ def workflow_dag_finished(factory_node):
expected = []
return (dag, expected, True)
@pytest.fixture(params=['workflow_dag_multiple_roots', 'workflow_dag_level_2', 'workflow_dag_multiple_edges_labeled', 'workflow_dag_finished'])
@pytest.fixture
def workflow_dag_always(factory_node):
dag = WorkflowDAG()
data = [
factory_node(0, 'failed'),
factory_node(1, 'successful'),
factory_node(2, None),
]
[dag.add_node(d) for d in data]
dag.add_edge(data[0], data[1], 'always_nodes')
dag.add_edge(data[1], data[2], 'always_nodes')
expected = data[2:3]
return (dag, expected, False)
@pytest.fixture(params=['workflow_dag_multiple_roots', 'workflow_dag_level_2',
'workflow_dag_multiple_edges_labeled', 'workflow_dag_finished',
'workflow_dag_always'])
def workflow_dag(request):
return request.getfuncargvalue(request.param)

View File

@ -21,12 +21,21 @@ module.exports = function(grunt) {
// writes environment variables for development. current manages:
// browser-sync + websocket proxy
grunt.registerTask('sync', [
'browserSync:http',
'concurrent:watch'
]);
grunt.registerTask('dev', [
'clean:tmp',
'clean:static',
'concurrent:dev',
'browserSync:http',
'concurrent:watch'
]);
grunt.registerTask('devNoSync', [
'clean:tmp',
'clean:static',
'concurrent:devNoSync',
]);
grunt.registerTask('release', [

View File

@ -2,6 +2,10 @@ module.exports = {
dev: {
tasks: ['copy:vendor', 'copy:assets', 'copy:partials', 'copy:languages', 'copy:config', 'less:dev'],
},
// This concurrent target is intended for development ui builds that do not require raising browser-sync or filesystem polling
devNoSync: {
tasks: ['copy:vendor', 'copy:assets', 'copy:partials', 'copy:languages', 'copy:config', 'less:dev', 'webpack:dev'],
},
prod: {
tasks: ['newer:copy:vendor', 'newer:copy:assets', 'newer:copy:partials', 'newer:copy:languages', 'newer:copy:config', 'newer:less:prod']
},

4738
awx/ui/npm-shrinkwrap.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -16,8 +16,9 @@
"npm": "^3.10.3"
},
"scripts": {
"build-docker-machine": "ip=$(docker-machine ip $DOCKER_MACHINE_NAME); npm set ansible-tower:django_host ${ip}; grunt dev",
"build-docker-cid": "ip=`docker inspect --format '{{ .NetworkSettings.IPAddress }}' $DOCkER_CID` | npm set config ansible-tower:django_host ${ip}; grunt dev",
"build-docker-machine": "grunt dev; ip=$(docker-machine ip $DOCKER_MACHINE_NAME); npm set ansible-tower:django_host ${ip}; grunt sync",
"build-docker-cid": "grunt dev; ip=`docker inspect --format '{{ .NetworkSettings.IPAddress }}' $DOCkER_CID` | npm set config ansible-tower:django_host ${ip}; grunt sync",
"build-devel": "grunt devNoSync",
"pot": "grunt nggettext_extract",
"languages": "grunt nggettext_compile",
"build-release": "grunt release",

View File

@ -1,9 +1,9 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<config xmlns="http://zanata.org/namespace/config/">
<url>https://translate.stage.engineering.redhat.com/</url>
<project>ansible-tower</project>
<project-version>master</project-version>
<project-type>gettext</project-type>
<project>ansible-django</project>
<project-version>devel</project-version>
<project-type>podir</project-type>
<src-dir>awx/locale</src-dir>
<trans-dir>awx/locale</trans-dir>
<rules>

View File

@ -33,6 +33,8 @@ from django.core.management import call_command
PROJECT_CONFIG = "config/zanata.xml"
MIN_TRANS_PERCENT_SETTING = False
MIN_TRANS_PERCENT = '10'
def _handle_response(output, errors):
@ -59,7 +61,11 @@ def pull(lang=None, both=None):
"""
Pull translations .po from Zanata
"""
command = "zanata pull --project-config %(config)s --disable-ssl-cert"
if MIN_TRANS_PERCENT_SETTING:
command += " --min-doc-percent " + MIN_TRANS_PERCENT
if lang:
command += " --lang %s" % lang[0]
@ -72,6 +78,9 @@ def pull(lang=None, both=None):
def push(lang=None, both=None):
"""
Push django.pot to Zanata
At Zanata:
(1) project_type should be podir - {locale}/{filename}.po format
(2) only required languages should be kept enabled
"""
p = Popen("zanata push --project-config %(config)s --disable-ssl-cert" %
{'config': PROJECT_CONFIG}, stdout=PIPE, stderr=PIPE, shell=True)