flake8 fixes

This commit is contained in:
Chris Meyers
2016-09-08 14:26:30 -04:00
parent 28ec68e91b
commit e4025a7eff
10 changed files with 60 additions and 74 deletions

View File

@@ -2187,7 +2187,7 @@ class WorkflowJobSerializer(UnifiedJobSerializer):
res = super(WorkflowJobSerializer, self).get_related(obj) res = super(WorkflowJobSerializer, self).get_related(obj)
if obj.workflow_job_template: if obj.workflow_job_template:
res['workflow_job_template'] = reverse('api:workflow_job_template_detail', res['workflow_job_template'] = reverse('api:workflow_job_template_detail',
args=(obj.workflow_job_template.pk,)) args=(obj.workflow_job_template.pk,))
# TODO: # TODO:
#res['notifications'] = reverse('api:system_job_notifications_list', args=(obj.pk,)) #res['notifications'] = reverse('api:system_job_notifications_list', args=(obj.pk,))
res['workflow_nodes'] = reverse('api:workflow_job_workflow_nodes_list', args=(obj.pk,)) res['workflow_nodes'] = reverse('api:workflow_job_workflow_nodes_list', args=(obj.pk,))

View File

@@ -11,7 +11,6 @@ import socket
import sys import sys
import errno import errno
import logging import logging
import copy
from base64 import b64encode from base64 import b64encode
from collections import OrderedDict from collections import OrderedDict

View File

@@ -1172,9 +1172,6 @@ class WorkflowJobTemplateAccess(BaseAccess):
model = WorkflowJobTemplate model = WorkflowJobTemplate
def can_start(self, obj):
return self.can_read(obj)
def get_queryset(self): def get_queryset(self):
if self.user.is_superuser or self.user.is_system_auditor: if self.user.is_superuser or self.user.is_system_auditor:
qs = self.model.objects.all() qs = self.model.objects.all()
@@ -1234,7 +1231,9 @@ class WorkflowJobTemplateAccess(BaseAccess):
if self.user.is_superuser: if self.user.is_superuser:
return True return True
return self.user in obj.execute_role return self.can_read(obj)
# TODO: We should use execute role rather than read role
#return self.user in obj.execute_role
def can_change(self, obj, data): def can_change(self, obj, data):
data_for_change = data data_for_change = data

View File

@@ -220,7 +220,7 @@ class WorkflowDAG(SimpleDAG):
children_always = self.get_dependencies(obj, 'always_nodes') children_always = self.get_dependencies(obj, 'always_nodes')
children_all = children_failed + children_always children_all = children_failed + children_always
nodes.extend(children_all) nodes.extend(children_all)
elif job.status in ['successfult']: elif job.status in ['successful']:
children_success = self.get_dependencies(obj, 'success_nodes') children_success = self.get_dependencies(obj, 'success_nodes')
nodes.extend(children_success) nodes.extend(children_success)
else: else:
@@ -260,12 +260,22 @@ def do_spawn_workflow_jobs():
dag = WorkflowDAG(workflow_job) dag = WorkflowDAG(workflow_job)
spawn_nodes = dag.bfs_nodes_to_run() spawn_nodes = dag.bfs_nodes_to_run()
for spawn_node in spawn_nodes: for spawn_node in spawn_nodes:
# TODO: Inject job template template params as kwargs # TODO: Inject job template template params as kwargs.
# Make sure to take into account extra_vars merge logic
kv = {} kv = {}
job = spawn_node.unified_job_template.create_unified_job(**kv) job = spawn_node.unified_job_template.create_unified_job(**kv)
spawn_node.job = job spawn_node.job = job
spawn_node.save() spawn_node.save()
result = job.signal_start(**kv) can_start = job.signal_start(**kv)
if not can_start:
job.status = 'failed'
job.job_explanation = "Workflow job could not start because it was not in the right state or required manual credentials"
job.save(update_fields=['status', 'job_explanation'])
job.socketio_emit_status("failed")
# TODO: should we emit a status on the socket here similar to tasks.py tower_periodic_scheduler() ?
#emit_websocket_notification('/socket.io/jobs', '', dict(id=))
def rebuild_graph(message): def rebuild_graph(message):
"""Regenerate the task graph by refreshing known tasks from Tower, purging """Regenerate the task graph by refreshing known tasks from Tower, purging

View File

@@ -1,6 +1,9 @@
# Copyright (c) 2016 Ansible, Inc. # Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved. # All Rights Reserved.
# Python
#import urlparse
# Django # Django
from django.db import models from django.db import models
from django.core.urlresolvers import reverse from django.core.urlresolvers import reverse
@@ -202,8 +205,9 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, JobNotificationMixin, Workflow
def get_absolute_url(self): def get_absolute_url(self):
return reverse('api:workflow_job_detail', args=(self.pk,)) return reverse('api:workflow_job_detail', args=(self.pk,))
def get_ui_url(self): # TODO: Ask UI if this is needed ?
return urljoin(tower_settings.TOWER_URL_BASE, "/#/workflow_jobs/{}".format(self.pk)) #def get_ui_url(self):
# return urlparse.urljoin(tower_settings.TOWER_URL_BASE, "/#/workflow_jobs/{}".format(self.pk))
def is_blocked_by(self, obj): def is_blocked_by(self, obj):
return True return True

View File

@@ -14,6 +14,7 @@ from awx.main.models import (
Inventory, Inventory,
Label, Label,
WorkflowJobTemplate, WorkflowJobTemplate,
WorkflowNode,
) )
# mk methods should create only a single object of a single type. # mk methods should create only a single object of a single type.
@@ -169,7 +170,7 @@ def mk_workflow_node(workflow_job_template=None, unified_job_template=None,
success_nodes=None, failure_nodes=None, always_nodes=None, success_nodes=None, failure_nodes=None, always_nodes=None,
job=None, persisted=True): job=None, persisted=True):
workflow_node = WorkflowNode(workflow_job_template=workflow_job_template, workflow_node = WorkflowNode(workflow_job_template=workflow_job_template,
unified_job_template=job_template, unified_job_template=unified_job_template,
success_nodes=success_nodes, success_nodes=success_nodes,
failure_nodes=failure_nodes, failure_nodes=failure_nodes,
always_nodes=always_nodes, always_nodes=always_nodes,

View File

@@ -9,7 +9,6 @@ from awx.main.models import (
Inventory, Inventory,
Job, Job,
Label, Label,
WorkflowJobTemplate,
) )
from .objects import ( from .objects import (

View File

@@ -2,7 +2,6 @@
from awx.main.models import ( from awx.main.models import (
WorkflowNode, WorkflowNode,
WorkflowJobTemplate, WorkflowJobTemplate,
WorkflowJob,
) )
from awx.main.models.jobs import JobTemplate from awx.main.models.jobs import JobTemplate

View File

@@ -2,7 +2,6 @@
from awx.main.models import ( from awx.main.models import (
WorkflowNode, WorkflowNode,
WorkflowJobTemplate, WorkflowJobTemplate,
WorkflowJob,
) )
from awx.main.models.jobs import JobTemplate from awx.main.models.jobs import JobTemplate

View File

@@ -10,12 +10,12 @@ import pytest
def dag_root(): def dag_root():
dag = SimpleDAG() dag = SimpleDAG()
data = [ data = [
{ 1: 1 }, {1: 1},
{ 2: 2 }, {2: 2},
{ 3: 3 }, {3: 3},
{ 4: 4 }, {4: 4},
{ 5: 5 }, {5: 5},
{ 6: 6 }, {6: 6},
] ]
# Add all the nodes to the DAG # Add all the nodes to the DAG
[dag.add_node(d) for d in data] [dag.add_node(d) for d in data]
@@ -30,12 +30,12 @@ def dag_root():
def dag_simple_edge_labels(): def dag_simple_edge_labels():
dag = SimpleDAG() dag = SimpleDAG()
data = [ data = [
{ 1: 1 }, {1: 1},
{ 2: 2 }, {2: 2},
{ 3: 3 }, {3: 3},
{ 4: 4 }, {4: 4},
{ 5: 5 }, {5: 5},
{ 6: 6 }, {6: 6},
] ]
# Add all the nodes to the DAG # Add all the nodes to the DAG
[dag.add_node(d) for d in data] [dag.add_node(d) for d in data]
@@ -46,23 +46,18 @@ def dag_simple_edge_labels():
return dag return dag
'''
class TestSimpleDAG(object): class TestSimpleDAG(object):
def test_get_root_nodes(self, dag_root): def test_get_root_nodes(self, dag_root):
leafs = dag_root.get_leaf_nodes() leafs = dag_root.get_leaf_nodes()
for l in leafs:
print(l)
roots = dag_root.get_root_nodes() roots = dag_root.get_root_nodes()
for n in roots:
print(n)
def test_get_labeled_edges(self, dag_simple_edge_labels): def test_get_labeled_edges(self, dag_simple_edge_labels):
dag = dag_simple_edge_labels dag = dag_simple_edge_labels
nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'one') nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'one')
nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'two') nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'two')
print("Matching nodes: ") '''
for n in nodes:
print(n)
@pytest.fixture @pytest.fixture
def factory_node(): def factory_node():
@@ -74,41 +69,22 @@ def factory_node():
return wfn return wfn
return fn return fn
@pytest.fixture
def workflow_dag_multiple_roots(factory_node):
dag = WorkflowDAG()
data = [
factory_node(1, None),
factory_node(2, None),
factory_node(3, None),
factory_node(4, None),
factory_node(5, None),
factory_node(6, None),
]
[dag.add_node(d) for d in data]
dag.add_edge(data[0], data[3], 'success')
dag.add_edge(data[1], data[4], 'success')
dag.add_edge(data[2], data[5], 'success')
return dag
@pytest.fixture @pytest.fixture
def workflow_dag_level_2(factory_node): def workflow_dag_level_2(factory_node):
dag = WorkflowDAG() dag = WorkflowDAG()
data = [ data = [
factory_node(1, 'success'), factory_node(0, 'successful'),
factory_node(2, 'success'), factory_node(1, 'successful'),
factory_node(3, 'success'), factory_node(2, 'successful'),
factory_node(3, None),
factory_node(4, None), factory_node(4, None),
factory_node(5, None), factory_node(5, None),
factory_node(6, None),
] ]
[dag.add_node(d) for d in data] [dag.add_node(d) for d in data]
dag.add_edge(data[0], data[3], 'success') dag.add_edge(data[0], data[3], 'success_nodes')
dag.add_edge(data[1], data[4], 'success') dag.add_edge(data[1], data[4], 'success_nodes')
dag.add_edge(data[2], data[5], 'success') dag.add_edge(data[2], data[5], 'success_nodes')
return (dag, data[3:6], False) return (dag, data[3:6], False)
@@ -125,9 +101,9 @@ def workflow_dag_multiple_roots(factory_node):
] ]
[dag.add_node(d) for d in data] [dag.add_node(d) for d in data]
dag.add_edge(data[0], data[3], 'success') dag.add_edge(data[0], data[3], 'success_nodes')
dag.add_edge(data[1], data[4], 'success') dag.add_edge(data[1], data[4], 'success_nodes')
dag.add_edge(data[2], data[5], 'success') dag.add_edge(data[2], data[5], 'success_nodes')
expected = data[0:3] expected = data[0:3]
return (dag, expected, False) return (dag, expected, False)
@@ -145,11 +121,11 @@ def workflow_dag_multiple_edges_labeled(factory_node):
] ]
[dag.add_node(d) for d in data] [dag.add_node(d) for d in data]
dag.add_edge(data[0], data[1], 'success') dag.add_edge(data[0], data[1], 'success_nodes')
dag.add_edge(data[0], data[2], 'failure') dag.add_edge(data[0], data[2], 'failure_nodes')
dag.add_edge(data[2], data[3], 'success') dag.add_edge(data[2], data[3], 'success_nodes')
dag.add_edge(data[2], data[4], 'failure') dag.add_edge(data[2], data[4], 'failure_nodes')
dag.add_edge(data[4], data[5], 'failure') dag.add_edge(data[4], data[5], 'failure_nodes')
expected = data[5:6] expected = data[5:6]
return (dag, expected, False) return (dag, expected, False)
@@ -163,15 +139,15 @@ def workflow_dag_finished(factory_node):
factory_node(2, 'failed'), factory_node(2, 'failed'),
factory_node(3, None), factory_node(3, None),
factory_node(4, 'failed'), factory_node(4, 'failed'),
factory_node(5, 'success'), factory_node(5, 'successful'),
] ]
[dag.add_node(d) for d in data] [dag.add_node(d) for d in data]
dag.add_edge(data[0], data[1], 'success') dag.add_edge(data[0], data[1], 'success_nodes')
dag.add_edge(data[0], data[2], 'failure') dag.add_edge(data[0], data[2], 'failure_nodes')
dag.add_edge(data[2], data[3], 'success') dag.add_edge(data[2], data[3], 'success_nodes')
dag.add_edge(data[2], data[4], 'failure') dag.add_edge(data[2], data[4], 'failure_nodes')
dag.add_edge(data[4], data[5], 'failure') dag.add_edge(data[4], data[5], 'failure_nodes')
expected = [] expected = []
return (dag, expected, True) return (dag, expected, True)