Merge pull request #1372 from chrismeyersfsu/old-celery3

celery 4.x to 3.x roll back
This commit is contained in:
Chris Meyers 2018-02-27 15:26:46 -05:00 committed by GitHub
commit d551566b4d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 70 additions and 78 deletions

View File

@ -324,7 +324,7 @@ celeryd:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -Q tower_broadcast_all -n celery@$(COMPOSE_HOST) --pidfile /tmp/celery_pid
celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -n celery@$(COMPOSE_HOST) --pidfile /tmp/celery_pid
# Run to start the zeromq callback receiver
receiver:

View File

@ -7,7 +7,7 @@ import sys
import warnings
from pkg_resources import get_distribution
from .celery import app as celery_app
from .celery import app as celery_app # noqa
__version__ = get_distribution('awx').version

View File

@ -1,3 +1,4 @@
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
@ -5,6 +6,7 @@ from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
from django.conf import settings # noqa
try:
@ -16,8 +18,8 @@ except ImportError: # pragma: no cover
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE)
app = Celery('awx')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
if __name__ == '__main__':
app.start()

View File

@ -208,7 +208,7 @@ class Command(BaseCommand):
help = 'Launch the job callback receiver'
def handle(self, *arg, **options):
with Connection(settings.CELERY_BROKER_URL) as conn:
with Connection(settings.BROKER_URL) as conn:
try:
worker = CallbackBrokerWorker(conn)
worker.run()

View File

@ -28,7 +28,7 @@ from rest_framework.exceptions import ParseError
from polymorphic.models import PolymorphicModel
# Django-Celery
from django_celery_results.models import TaskResult
from djcelery.models import TaskMeta
# AWX
from awx.main.models.base import * # noqa
@ -1093,8 +1093,8 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
def celery_task(self):
try:
if self.celery_task_id:
return TaskResult.objects.get(task_id=self.celery_task_id)
except TaskResult.DoesNotExist:
return TaskMeta.objects.get(task_id=self.celery_task_id)
except TaskMeta.DoesNotExist:
pass
def get_passwords_needed_to_start(self):
@ -1335,7 +1335,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
cancel_fields.append('job_explanation')
self.save(update_fields=cancel_fields)
self.websocket_emit_status("canceled")
if settings.CELERY_BROKER_URL.startswith('amqp://'):
if settings.BROKER_URL.startswith('amqp://'):
self._force_cancel()
return self.cancel_flag

View File

@ -19,7 +19,7 @@ __all__ = ['CallbackQueueDispatcher']
class CallbackQueueDispatcher(object):
def __init__(self):
self.callback_connection = getattr(settings, 'CELERY_BROKER_URL', None)
self.callback_connection = getattr(settings, 'BROKER_URL', None)
self.connection_queue = getattr(settings, 'CALLBACK_QUEUE', '')
self.connection = None
self.exchange = None

View File

@ -133,7 +133,7 @@ class TaskManager():
def get_active_tasks(self):
if not hasattr(settings, 'IGNORE_CELERY_INSPECTOR'):
app = Celery('awx')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.config_from_object('django.conf:settings')
inspector = Inspect(app=app)
active_task_queues = inspector.active()
else:

View File

@ -47,7 +47,6 @@ from crum import impersonate
# AWX
from awx import __version__ as awx_application_version
from awx import celery_app
from awx.main.constants import CLOUD_PROVIDERS, PRIVILEGE_ESCALATION_METHODS
from awx.main.models import * # noqa
from awx.main.models.unified_jobs import ACTIVE_STATES
@ -208,14 +207,14 @@ def handle_ha_toplogy_changes(self):
instance = Instance.objects.me()
logger.debug("Reconfigure celeryd queues task on host {}".format(self.request.hostname))
awx_app = Celery('awx')
awx_app.config_from_object('django.conf:settings', namespace='CELERY')
awx_app.config_from_object('django.conf:settings')
instances, removed_queues, added_queues = register_celery_worker_queues(awx_app, self.request.hostname)
for instance in instances:
logger.info("Workers on tower node '{}' removed from queues {} and added to queues {}"
.format(instance.hostname, removed_queues, added_queues))
updated_routes = update_celery_worker_routes(instance, settings)
logger.info("Worker on tower node '{}' updated celery routes {} all routes are now {}"
.format(instance.hostname, updated_routes, self.app.conf.CELERY_TASK_ROUTES))
.format(instance.hostname, updated_routes, self.app.conf.CELERY_ROUTES))
@worker_ready.connect
@ -234,7 +233,7 @@ def handle_update_celery_routes(sender=None, conf=None, **kwargs):
instance = Instance.objects.me()
added_routes = update_celery_worker_routes(instance, conf)
logger.info("Workers on tower node '{}' added routes {} all routes are now {}"
.format(instance.hostname, added_routes, conf.CELERY_TASK_ROUTES))
.format(instance.hostname, added_routes, conf.CELERY_ROUTES))
@celeryd_after_setup.connect
@ -2359,10 +2358,3 @@ def deep_copy_model_obj(
importlib.import_module(permission_check_func[0]), permission_check_func[1]
), permission_check_func[2])
permission_check_func(creater, copy_mapping.values())
celery_app.register_task(RunJob())
celery_app.register_task(RunProjectUpdate())
celery_app.register_task(RunInventoryUpdate())
celery_app.register_task(RunAdHocCommand())
celery_app.register_task(RunSystemJob())

View File

@ -73,7 +73,7 @@ def celery_memory_broker():
Allows django signal code to execute without the need for redis
'''
settings.CELERY_BROKER_URL='memory://localhost/'
settings.BROKER_URL='memory://localhost/'
@pytest.fixture

View File

@ -8,11 +8,11 @@ from datetime import timedelta
('admin_checks', 'awx.main.tasks.run_administrative_checks'),
('tower_scheduler', 'awx.main.tasks.awx_periodic_scheduler'),
])
def test_CELERY_BEAT_SCHEDULE(mocker, job_name, function_path):
assert job_name in settings.CELERY_BEAT_SCHEDULE
assert 'schedule' in settings.CELERY_BEAT_SCHEDULE[job_name]
assert type(settings.CELERY_BEAT_SCHEDULE[job_name]['schedule']) is timedelta
assert settings.CELERY_BEAT_SCHEDULE[job_name]['task'] == function_path
def test_CELERYBEAT_SCHEDULE(mocker, job_name, function_path):
assert job_name in settings.CELERYBEAT_SCHEDULE
assert 'schedule' in settings.CELERYBEAT_SCHEDULE[job_name]
assert type(settings.CELERYBEAT_SCHEDULE[job_name]['schedule']) is timedelta
assert settings.CELERYBEAT_SCHEDULE[job_name]['task'] == function_path
# Ensures that the function exists
mocker.patch(function_path)

View File

@ -17,7 +17,7 @@ from awx.main.utils.ha import (
@pytest.fixture
def conf():
class Conf():
CELERY_TASK_ROUTES = dict()
CELERY_ROUTES = dict()
CELERYBEAT_SCHEDULE = dict()
return Conf()
@ -88,14 +88,14 @@ class TestUpdateCeleryWorkerRoutes():
instance.is_controller = mocker.MagicMock(return_value=is_controller)
assert update_celery_worker_routes(instance, conf) == expected_routes
assert conf.CELERY_TASK_ROUTES == expected_routes
assert conf.CELERY_ROUTES == expected_routes
def test_update_celery_worker_routes_deleted(self, mocker, conf):
instance = mocker.MagicMock()
instance.hostname = 'east-1'
instance.is_controller = mocker.MagicMock(return_value=False)
conf.CELERY_TASK_ROUTES = {'awx.main.tasks.awx_isolated_heartbeat': 'foobar'}
conf.CELERY_ROUTES = {'awx.main.tasks.awx_isolated_heartbeat': 'foobar'}
update_celery_worker_routes(instance, conf)
assert 'awx.main.tasks.awx_isolated_heartbeat' not in conf.CELERY_TASK_ROUTES
assert 'awx.main.tasks.awx_isolated_heartbeat' not in conf.CELERY_ROUTES

View File

@ -48,12 +48,12 @@ def update_celery_worker_routes(instance, conf):
if instance.is_controller():
tasks.append('awx.main.tasks.awx_isolated_heartbeat')
else:
if 'awx.main.tasks.awx_isolated_heartbeat' in conf.CELERY_TASK_ROUTES:
del conf.CELERY_TASK_ROUTES['awx.main.tasks.awx_isolated_heartbeat']
if 'awx.main.tasks.awx_isolated_heartbeat' in conf.CELERY_ROUTES:
del conf.CELERY_ROUTES['awx.main.tasks.awx_isolated_heartbeat']
for t in tasks:
conf.CELERY_TASK_ROUTES[t] = {'queue': instance.hostname, 'routing_key': instance.hostname}
routes_updated[t] = conf.CELERY_TASK_ROUTES[t]
conf.CELERY_ROUTES[t] = {'queue': instance.hostname, 'routing_key': instance.hostname}
routes_updated[t] = conf.CELERY_ROUTES[t]
return routes_updated

View File

@ -5,6 +5,7 @@ import os
import re # noqa
import sys
import ldap
import djcelery
from datetime import timedelta
from kombu import Queue, Exchange
@ -266,7 +267,7 @@ INSTALLED_APPS = (
'oauth2_provider',
'rest_framework',
'django_extensions',
'django_celery_results',
'djcelery',
'channels',
'polymorphic',
'taggit',
@ -452,28 +453,30 @@ DEVSERVER_DEFAULT_PORT = '8013'
# Set default ports for live server tests.
os.environ.setdefault('DJANGO_LIVE_TEST_SERVER_ADDRESS', 'localhost:9013-9199')
djcelery.setup_loader()
BROKER_POOL_LIMIT = None
CELERY_BROKER_URL = 'amqp://guest:guest@localhost:5672//'
BROKER_URL = 'amqp://guest:guest@localhost:5672//'
CELERY_EVENT_QUEUE_TTL = 5
CELERY_TASK_DEFAULT_QUEUE = 'tower'
CELERY_DEFAULT_QUEUE = 'tower'
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_TRACK_STARTED = True
CELERY_TASK_TIME_LIMIT = None
CELERY_TASK_SOFT_TIME_LIMIT = None
CELERY_WORKER_POOL_RESTARTS = True
CELERY_BEAT_SCHEDULER = 'celery.beat.PersistentScheduler'
CELERY_BEAT_MAX_LOOP_INTERVAL = 60
CELERY_RESULT_BACKEND = 'django-db'
CELERY_TRACK_STARTED = True
CELERYD_TASK_TIME_LIMIT = None
CELERYD_TASK_SOFT_TIME_LIMIT = None
CELERYD_POOL_RESTARTS = True
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
CELERY_IMPORTS = ('awx.main.scheduler.tasks',)
CELERY_TASK_QUEUES = (
CELERY_QUEUES = (
Queue('tower', Exchange('tower'), routing_key='tower'),
Broadcast('tower_broadcast_all')
)
CELERY_TASK_ROUTES = {}
CELERY_ROUTES = {}
CELERY_BEAT_SCHEDULE = {
CELERYBEAT_SCHEDULER = 'celery.beat.PersistentScheduler'
CELERYBEAT_MAX_LOOP_INTERVAL = 60
CELERYBEAT_SCHEDULE = {
'tower_scheduler': {
'task': 'awx.main.tasks.awx_periodic_scheduler',
'schedule': timedelta(seconds=30),

View File

@ -79,15 +79,15 @@ if is_testing(sys.argv):
}
# Celery AMQP configuration.
CELERY_BROKER_URL = "amqp://{}:{}@{}/{}".format(os.environ.get("RABBITMQ_USER"),
os.environ.get("RABBITMQ_PASS"),
os.environ.get("RABBITMQ_HOST"),
urllib.quote(os.environ.get("RABBITMQ_VHOST", "/"), safe=''))
BROKER_URL = "amqp://{}:{}@{}/{}".format(os.environ.get("RABBITMQ_USER"),
os.environ.get("RABBITMQ_PASS"),
os.environ.get("RABBITMQ_HOST"),
urllib.quote(os.environ.get("RABBITMQ_VHOST", "/"), safe=''))
CHANNEL_LAYERS = {
'default': {'BACKEND': 'asgi_amqp.AMQPChannelLayer',
'ROUTING': 'awx.main.routing.channel_routing',
'CONFIG': {'url': CELERY_BROKER_URL}}
'CONFIG': {'url': BROKER_URL}}
}
# Set True to enable additional logging from the job_event_callback plugin

View File

@ -3,7 +3,7 @@ nodaemon = True
umask = 022
[program:celery]
command = /var/lib/awx/venv/awx/bin/celery worker -A awx -B -l debug --autoscale=4 -Ofair -s /var/lib/awx/beat.db -Q tower_broadcast_all -n celery@%(ENV_HOSTNAME)s
command = /var/lib/awx/venv/awx/bin/celery worker -A awx -B -l debug --autoscale=4 -Ofair -s /var/lib/awx/beat.db -n celery@%(ENV_HOSTNAME)s
directory = /var/lib/awx
environment = LANGUAGE="en_US.UTF-8",LANG="en_US.UTF-8",LC_ALL="en_US.UTF-8",LC_CTYPE="en_US.UTF-8"
#user = {{ aw_user }}

View File

@ -8,6 +8,7 @@ localhost ansible_connection=local ansible_python_interpreter="/usr/bin/env pyth
# by default the base will be used to search for ansible/awx_web and ansible/awx_task
dockerhub_base=ansible
dockerhub_version=latest
rabbitmq_version=3.6.14
# This will create or update a default admin (superuser) account in AWX, if not provided
# then these default values are used
@ -93,4 +94,4 @@ pg_port=5432
# AWX project data folder. If you need access to the location where AWX stores the projects
# it manages from the docker host, you can set this to turn it into a volume for the container.
#project_data_dir=/var/lib/awx/projects
#project_data_dir=/var/lib/awx/projects

View File

@ -22,9 +22,6 @@ data:
REMOTE_HOST_HEADERS = ['HTTP_X_FORWARDED_FOR']
CELERY_TASK_QUEUES += (Queue(CLUSTER_HOST_ID, Exchange(CLUSTER_HOST_ID), routing_key=CLUSTER_HOST_ID),)
CELERY_TASK_ROUTES['awx.main.tasks.cluster_node_heartbeat'] = {'queue': CLUSTER_HOST_ID, 'routing_key': CLUSTER_HOST_ID}
CELERY_TASK_ROUTES['awx.main.tasks.purge_old_stdout_files'] = {'queue': CLUSTER_HOST_ID, 'routing_key': CLUSTER_HOST_ID}
STATIC_ROOT = '/var/lib/awx/public/static'
PROJECTS_ROOT = '/var/lib/awx/projects'
JOBOUTPUT_ROOT = '/var/lib/awx/job_status'
@ -77,7 +74,7 @@ data:
'PORT': "{{ pg_port }}",
}
}
CELERY_BROKER_URL = 'amqp://{}:{}@{}:{}/{}'.format(
BROKER_URL = 'amqp://{}:{}@{}:{}/{}'.format(
"awx",
"abcdefg",
"localhost",
@ -86,7 +83,7 @@ data:
CHANNEL_LAYERS = {
'default': {'BACKEND': 'asgi_amqp.AMQPChannelLayer',
'ROUTING': 'awx.main.routing.channel_routing',
'CONFIG': {'url': CELERY_BROKER_URL}}
'CONFIG': {'url': BROKER_URL}}
}
CACHES = {
'default': {

View File

@ -33,7 +33,7 @@ spec:
- name: DATABASE_HOST
value: {{ pg_hostname|default('postgresql') }}
- name: DATABASE_PORT
value: ({{ pg_port|default('5432') }})
value: "{{ pg_port|default('5432') }}"
- name: DATABASE_PASSWORD
value: {{ pg_password }}
- name: MEMCACHED_HOST
@ -45,7 +45,7 @@ spec:
- name: AWX_ADMIN_PASSWORD
value: {{ default_admin_password|default('password') }}
- name: awx-rabbit
image: ansible/awx_rabbitmq:latest
image: ansible/awx_rabbitmq:{{ rabbitmq_version }}
imagePullPolicy: Always
env:
# For consupmption by rabbitmq-env.conf

View File

@ -7,12 +7,11 @@ backports.ssl-match-hostname==3.5.0.1
boto==2.46.1
boto3==1.4.4
channels==1.1.8
celery==4.1
celery==3.1.25
daphne==1.3.0
Django==1.11.7
django-auth-ldap==1.2.8
django-celery-beat==1.1.0
django-celery-results==1.0.1
django-celery==3.2.2
django-crum==0.7.1
django-extensions==1.7.8
django-jsonfield==1.0.1

View File

@ -5,7 +5,8 @@
# pip-compile --output-file requirements/requirements.txt requirements/requirements.in
#
adal==0.4.5 # via msrestazure
amqp==2.2.2 # via kombu
amqp==1.4.9 # via kombu
anyjson==0.3.3 # via kombu
apache-libcloud==2.0.0
appdirs==1.4.2
asgi-amqp==1.0.3
@ -36,12 +37,12 @@ babel==2.3.4 # via osc-lib, oslo.i18n, python-cinderclient, python-
backports.functools-lru-cache==1.4 # via jaraco.functools
backports.ssl-match-hostname==3.5.0.1
baron==0.6.6 # via redbaron
billiard==3.5.0.3 # via celery
billiard==3.3.0.23 # via celery
boto3==1.4.4
boto==2.46.1
botocore==1.5.72 # via boto3, s3transfer
celery==4.1
#certifi==2017.11.5 # via msrest
celery==3.1.25
#certifi==2018.1.18 # via msrest
cffi==1.10.0 # via cryptography
channels==1.1.8
cliff==2.7.0 # via osc-lib, python-designateclient, python-neutronclient, python-openstackclient
@ -54,8 +55,7 @@ decorator==4.0.11 # via shade
defusedxml==0.4.1 # via python-saml
deprecation==1.0.1 # via openstacksdk
django-auth-ldap==1.2.8
django-celery-beat==1.1.0
django-celery-results==1.0.1
django-celery==3.2.2
django-crum==0.7.1
django-extensions==1.7.8
django-jsonfield==1.0.1
@ -73,7 +73,6 @@ djangorestframework==3.7.3
#docutils==0.14 # via botocore
dogpile.cache==0.6.3 # via python-ironicclient, shade
enum34==1.1.6 # via cryptography, msrest
ephem==3.7.6.0 # via django-celery-beat
funcsigs==1.0.2 # via debtcollector, oslo.utils
functools32==3.2.3.post2 # via jsonschema
futures==3.1.1 # via azure-storage, requests-futures, s3transfer, shade
@ -97,18 +96,18 @@ jaraco.stream==1.1.2 # via irc
jaraco.text==1.9.2 # via irc, jaraco.collections
jmespath==0.9.3 # via boto3, botocore, shade
jsonpatch==1.16 # via openstacksdk, shade, warlock
jsonpickle==0.9.5 # via asgi_amqp
jsonpickle==0.9.5 # via asgi-amqp
jsonpointer==1.10 # via jsonpatch
jsonschema==2.6.0
keyring==10.3.3 # via msrestazure
keystoneauth1==2.21.0 # via openstacksdk, os-client-config, osc-lib, python-cinderclient, python-designateclient, python-glanceclient, python-ironicclient, python-keystoneclient, python-neutronclient, python-novaclient, python-openstackclient, shade
kombu==4.1.0 # via celery
kombu==3.0.37 # via asgi-amqp, celery
lxml==3.8.0 # via dm.xmlsec.binding, pyvmomi
m2crypto==0.25.1
markdown==2.6.7
monotonic==1.3 # via oslo.utils
more-itertools==3.2.0 # via irc, jaraco.functools, jaraco.itertools
msgpack-python==0.4.8 # via oslo.serialization
msgpack-python==0.4.8 # via asgi-amqp, oslo.serialization
msrest==0.4.10 # via azure-common, msrestazure
msrestazure==0.4.9 # via azure-common
munch==2.1.1 # via shade
@ -170,7 +169,7 @@ secretstorage==2.3.1 # via keyring
service-identity==16.0.0
shade==1.20.0
simplejson==3.11.1 # via osc-lib, python-cinderclient, python-neutronclient, python-novaclient
six==1.10.0 # via asgiref, autobahn, automat, cliff, cmd2, cryptography, debtcollector, django-extensions, irc, jaraco.classes, jaraco.collections, jaraco.itertools, jaraco.logging, jaraco.stream, keystoneauth1, more-itertools, munch, openstacksdk, osc-lib, oslo.config, oslo.i18n, oslo.serialization, oslo.utils, pygerduty, pyopenssl, pyrad, python-cinderclient, python-dateutil, python-designateclient, python-glanceclient, python-ironicclient, python-keystoneclient, python-memcached, python-neutronclient, python-novaclient, python-openstackclient, pyvmomi, shade, slackclient, social-auth-app-django, social-auth-core, stevedore, tacacs-plus, tempora, twilio, txaio, warlock, websocket-client
six==1.10.0 # via asgi-amqp, asgiref, autobahn, automat, cliff, cmd2, cryptography, debtcollector, django-extensions, irc, jaraco.classes, jaraco.collections, jaraco.itertools, jaraco.logging, jaraco.stream, keystoneauth1, more-itertools, munch, openstacksdk, osc-lib, oslo.config, oslo.i18n, oslo.serialization, oslo.utils, pygerduty, pyopenssl, pyrad, python-cinderclient, python-dateutil, python-designateclient, python-glanceclient, python-ironicclient, python-keystoneclient, python-memcached, python-neutronclient, python-novaclient, python-openstackclient, pyvmomi, shade, slackclient, social-auth-app-django, social-auth-core, stevedore, tacacs-plus, tempora, twilio, txaio, warlock, websocket-client
slackclient==1.0.6
social-auth-app-django==2.0.0
social-auth-core==1.5.0
@ -184,7 +183,6 @@ txaio==2.8.2 # via autobahn
typing==3.6.2 # via m2crypto
unicodecsv==0.14.1 # via cliff
uwsgi==2.0.14
vine==1.1.4 # via amqp
warlock==1.2.0 # via python-glanceclient
websocket-client==0.44.0 # via slackclient
wrapt==1.10.10 # via debtcollector, positional, python-glanceclient