cycle detection when multiple parents

This commit is contained in:
chris meyers 2018-10-25 13:46:03 -04:00 committed by mabashian
parent 29b4979736
commit 7b22d1b874
3 changed files with 56 additions and 32 deletions

View File

@ -87,6 +87,7 @@ from awx.api.renderers import * # noqa
from awx.api.serializers import * # noqa
from awx.api.metadata import RoleMetadata, JobTypeMetadata
from awx.main.constants import ACTIVE_STATES
from awx.main.scheduler.dag_workflow import WorkflowDAG
from awx.api.views.mixin import (
ActivityStreamEnforcementMixin,
SystemTrackingEnforcementMixin,
@ -143,6 +144,9 @@ from awx.api.views.root import ( # noqa
)
logger = logging.getLogger('awx.api.views')
def api_exception_handler(exc, context):
'''
Override default API exception handler to catch IntegrityError exceptions.
@ -2950,33 +2954,17 @@ class WorkflowJobTemplateNodeChildrenBaseList(WorkflowsEnforcementMixin, Enforce
if created:
return None
workflow_nodes = parent.workflow_job_template.workflow_job_template_nodes.all().\
prefetch_related('success_nodes', 'failure_nodes', 'always_nodes')
graph = {}
for workflow_node in workflow_nodes:
graph[workflow_node.pk] = dict(node_object=workflow_node, metadata={'parent': None, 'traversed': False})
if parent.id == sub.id:
return {"Error": _("Cycle detected.")}
find = False
for node_type in ['success_nodes', 'failure_nodes', 'always_nodes']:
for workflow_node in workflow_nodes:
parent_node = graph[workflow_node.pk]
related_nodes = getattr(parent_node['node_object'], node_type).all()
for related_node in related_nodes:
sub_node = graph[related_node.pk]
sub_node['metadata']['parent'] = parent_node
if not find and parent == workflow_node and sub == related_node and self.relationship == node_type:
find = True
if not find:
sub_node = graph[sub.pk]
parent_node = graph[parent.pk]
sub_node['metadata']['parent'] = parent_node
iter_node = sub_node
while iter_node is not None:
if iter_node['metadata']['traversed']:
return {"Error": _("Cycle detected.")}
iter_node['metadata']['traversed'] = True
iter_node = iter_node['metadata']['parent']
parent_node_type_relationship = getattr(parent, self.relationship)
parent_node_type_relationship.add(sub)
parent.save()
graph = WorkflowDAG(parent.workflow_job_template)
if graph.has_cycle():
parent_node_type_relationship.remove(sub)
return {"Error": _("Cycle detected.")}
return None

View File

@ -23,9 +23,9 @@ class SimpleDAG(object):
def run_status(obj):
dnr = "RUN"
status = "NA"
if obj.job:
if hasattr(obj, 'job') and obj.job and hasattr(obj.job, 'status'):
status = obj.job.status
if obj.do_not_run is True:
if hasattr(obj, 'do_not_run') and obj.do_not_run is True:
dnr = "DNR"
return "{}_{}_{}".format(dnr, status, obj.id)
@ -36,7 +36,7 @@ class SimpleDAG(object):
for n in self.nodes:
obj = n['node_object']
status = "NA"
if obj.job:
if hasattr(obj, 'job') and obj.job:
status = obj.job.status
color = 'black'
if status == 'successful':
@ -65,8 +65,12 @@ class SimpleDAG(object):
def add_edge(self, from_obj, to_obj, label=None):
from_obj_ord = self.find_ord(from_obj)
to_obj_ord = self.find_ord(to_obj)
if from_obj_ord is None or to_obj_ord is None:
raise LookupError("Object not found")
if from_obj_ord is None and to_obj_ord is None:
raise LookupError("From object {} and to object not found".format(from_obj, to_obj))
elif from_obj_ord is None:
raise LookupError("From object not found {}".format(from_obj))
elif to_obj_ord is None:
raise LookupError("To object not found {}".format(to_obj))
self.edges.append((from_obj_ord, to_obj_ord, label))
def add_edges(self, edgelist):
@ -116,3 +120,29 @@ class SimpleDAG(object):
if len(self.get_dependents(n['node_object'])) < 1:
roots.append(n)
return roots
def has_cycle(self):
node_objs = [node['node_object'] for node in self.get_root_nodes()]
nodes_visited = set([])
path = set([])
stack = node_objs
path_direction = 'DOWN'
while stack:
node_obj = stack.pop()
children = self.get_dependencies(node_obj)
for child in children:
if child['node_object'] not in nodes_visited:
stack.append(child['node_object'])
if node_obj in path:
return True
if not children:
path_direction = 'UP'
if path_direction == 'DOWN':
path.add(node_obj)
elif path_direction == 'UP':
path.discard(node_obj)
return False

View File

@ -12,8 +12,14 @@ class WorkflowDAG(SimpleDAG):
if workflow_job:
self._init_graph(workflow_job)
def _init_graph(self, workflow_job):
node_qs = workflow_job.workflow_job_nodes
def _init_graph(self, workflow_job_or_jt):
if hasattr(workflow_job_or_jt, 'workflow_job_template_nodes'):
node_qs = workflow_job_or_jt.workflow_job_template_nodes
elif hasattr(workflow_job_or_jt, 'workflow_job_nodes'):
node_qs = workflow_job_or_jt.workflow_job_nodes
else:
raise RuntimeError("Unexpected object {} {}".format(type(workflow_job_or_jt), workflow_job_or_jt))
workflow_nodes = node_qs.prefetch_related('success_nodes', 'failure_nodes', 'always_nodes').all()
for workflow_node in workflow_nodes:
self.add_node(workflow_node)