More small cleanup for zeromq work. Switch back to single-path

writing of job events
This commit is contained in:
Matthew Jones
2014-02-19 13:56:19 -05:00
parent 6f54574a94
commit 3231f966db
2 changed files with 1 additions and 21 deletions

View File

@@ -493,12 +493,6 @@ class RunJob(BaseTask):
return False
elif job.status in ('pending', 'waiting'):
job = self.update_model(job.pk, status='pending')
# Start another task to process job events.
# if settings.BROKER_URL.startswith('amqp://'):
# app = Celery('tasks', broker=settings.BROKER_URL)
# send_task('awx.main.tasks.save_job_events', kwargs={
# 'job_id': job.id,
# }, serializer='json')
return True
else:
return False
@@ -512,20 +506,6 @@ class RunJob(BaseTask):
# to tell the save job events task to end.
if settings.BROKER_URL.startswith('amqp://'):
pass
# job_events_exchange = Exchange('job_events', 'direct', durable=True)
# job_events_queue = Queue('job_events[%d]' % job.id,
# exchange=job_events_exchange,
# routing_key=('job_events[%d]' % job.id),
# auto_delete=True)
# with Connection(settings.BROKER_URL, transport_options={'confirm_publish': True}) as conn:
# with conn.Producer(serializer='json') as producer:
# msg = {
# 'job_id': job.id,
# 'event': '__complete__'
# }
# producer.publish(msg, exchange=job_events_exchange,
# routing_key=('job_events[%d]' % job.id),
# declare=[job_events_queue])
# Update job event fields after job has completed (only when using REST
# API callback).

View File

@@ -368,7 +368,7 @@ class BaseTestMixin(object):
def start_queue(self, consumer_port, queue_port):
self.queue_process = Process(target=run_subscriber,
args=(consumer_port, queue_port,))
args=(consumer_port, queue_port, False,))
self.queue_process.start()
def terminate_queue(self):