mirror of
https://github.com/ansible/awx.git
synced 2026-01-20 14:11:24 -03:30
Merge branch 'devel' of https://github.com/ansible/ansible-tower into can_CRUD
This commit is contained in:
commit
23791cb91e
48
Makefile
48
Makefile
@ -10,7 +10,12 @@ NPM_BIN ?= npm
|
||||
DEPS_SCRIPT ?= packaging/bundle/deps.py
|
||||
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
|
||||
VENV_BASE ?= /tower_devel/venv
|
||||
GCLOUD_AUTH ?= $(shell gcloud auth print-access-token)
|
||||
COMPOSE_TAG ?= devel
|
||||
# NOTE: This defaults the container image version to the branch that's active
|
||||
# COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
|
||||
VENV_BASE ?= /venv
|
||||
SCL_PREFIX ?=
|
||||
CELERY_SCHEDULE_FILE ?= /celerybeat-schedule
|
||||
|
||||
@ -246,7 +251,7 @@ virtualenv_ansible:
|
||||
if [ ! -d "$(VENV_BASE)/ansible" ]; then \
|
||||
virtualenv --system-site-packages --setuptools $(VENV_BASE)/ansible && \
|
||||
$(VENV_BASE)/ansible/bin/pip install -I setuptools==23.0.0 && \
|
||||
$(VENV_BASE)/ansible/bin/pip install -I pip==8.1.1; \
|
||||
$(VENV_BASE)/ansible/bin/pip install -I pip==8.1.2; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
@ -258,7 +263,7 @@ virtualenv_tower:
|
||||
if [ ! -d "$(VENV_BASE)/tower" ]; then \
|
||||
virtualenv --system-site-packages --setuptools $(VENV_BASE)/tower && \
|
||||
$(VENV_BASE)/tower/bin/pip install -I setuptools==23.0.0 && \
|
||||
$(VENV_BASE)/tower/bin/pip install -I pip==8.1.1; \
|
||||
$(VENV_BASE)/tower/bin/pip install -I pip==8.1.2; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
@ -293,7 +298,7 @@ requirements_jenkins:
|
||||
else \
|
||||
pip install -Ir requirements/requirements_jenkins..txt; \
|
||||
fi && \
|
||||
$(NPM_BIN) install csslint jshint
|
||||
$(NPM_BIN) install csslint
|
||||
|
||||
requirements: requirements_ansible requirements_tower
|
||||
|
||||
@ -430,13 +435,22 @@ check: flake8 pep8 # pyflakes pylint
|
||||
TEST_DIRS=awx/main/tests
|
||||
# Run all API unit tests.
|
||||
test:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
fi; \
|
||||
py.test $(TEST_DIRS)
|
||||
|
||||
test_unit:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
fi; \
|
||||
py.test awx/main/tests/unit
|
||||
|
||||
# Run all API unit tests with coverage enabled.
|
||||
test_coverage:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
fi; \
|
||||
py.test --create-db --cov=awx --cov-report=xml --junitxml=./reports/junit.xml $(TEST_DIRS)
|
||||
|
||||
# Output test coverage as HTML (into htmlcov directory).
|
||||
@ -473,6 +487,12 @@ ui-test: ui-deps-built
|
||||
ui-test-ci: ui-deps-built
|
||||
$(NPM_BIN) --prefix awx/ui run test:ci
|
||||
|
||||
testjs_ci:
|
||||
echo "Update UI unittests later" #ui-test-ci
|
||||
|
||||
jshint: ui-deps-built
|
||||
grunt --gruntfile awx/ui/Gruntfile.js jshint #Depends on node 6.x and npm 3.x installed on Jenkins slave
|
||||
|
||||
ui-test-saucelabs: ui-deps-built
|
||||
$(NPM_BIN) --prefix awx/ui run test:saucelabs
|
||||
|
||||
@ -719,12 +739,20 @@ install:
|
||||
export SCL_PREFIX HTTPD_SCL_PREFIX
|
||||
$(PYTHON) setup.py install $(SETUP_INSTALL_ARGS)
|
||||
|
||||
# Docker Compose Development environment
|
||||
docker-compose:
|
||||
docker-compose -f tools/docker-compose.yml up --no-recreate
|
||||
docker-auth:
|
||||
docker login -e 1234@5678.com -u oauth2accesstoken -p "$(GCLOUD_AUTH)" https://gcr.io
|
||||
|
||||
docker-compose-test:
|
||||
cd tools && docker-compose run --rm --service-ports tower /bin/bash
|
||||
# Docker Compose Development environment
|
||||
docker-compose: docker-auth
|
||||
TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose.yml up --no-recreate
|
||||
|
||||
docker-compose-test: docker-auth
|
||||
cd tools && TAG=$(COMPOSE_TAG) docker-compose run --rm --service-ports tower /bin/bash
|
||||
|
||||
docker-compose-build:
|
||||
docker build -t ansible/tower_devel -f tools/docker-compose/Dockerfile .
|
||||
docker tag ansible/tower_devel gcr.io/ansible-tower-engineering/tower_devel:$(COMPOSE_TAG)
|
||||
#docker push gcr.io/ansible-tower-engineering/tower_devel:$(COMPOSE_TAG)
|
||||
|
||||
MACHINE?=default
|
||||
docker-clean:
|
||||
@ -732,7 +760,7 @@ docker-clean:
|
||||
eval $$(docker-machine env $(MACHINE))
|
||||
docker stop $$(docker ps -a -q)
|
||||
-docker rm $$(docker ps -f name=tools_tower -a -q)
|
||||
-docker rmi tools_tower
|
||||
-docker images | grep "tower_devel" | awk '{print $3}' | xargs docker rmi
|
||||
|
||||
docker-refresh: docker-clean docker-compose
|
||||
|
||||
|
||||
@ -2263,8 +2263,6 @@ class JobTemplateLaunch(RetrieveAPIView, GenericAPIView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'start', obj):
|
||||
raise PermissionDenied()
|
||||
|
||||
if 'credential' not in request.data and 'credential_id' in request.data:
|
||||
request.data['credential'] = request.data['credential_id']
|
||||
@ -2643,14 +2641,13 @@ class SystemJobTemplateLaunch(GenericAPIView):
|
||||
|
||||
model = SystemJobTemplate
|
||||
serializer_class = EmptySerializer
|
||||
is_job_start = True
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
return Response({})
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'start', obj):
|
||||
raise PermissionDenied()
|
||||
|
||||
new_job = obj.create_unified_job(**request.data)
|
||||
new_job.signal_start(**request.data)
|
||||
@ -2757,8 +2754,6 @@ class JobStart(GenericAPIView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'start', obj):
|
||||
raise PermissionDenied()
|
||||
if obj.can_start:
|
||||
result = obj.signal_start(**request.data)
|
||||
if not result:
|
||||
@ -2796,8 +2791,6 @@ class JobRelaunch(RetrieveAPIView, GenericAPIView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'start', obj):
|
||||
raise PermissionDenied()
|
||||
|
||||
# Note: is_valid() may modify request.data
|
||||
# It will remove any key/value pair who's key is not in the 'passwords_needed_to_start' list
|
||||
@ -3243,8 +3236,6 @@ class AdHocCommandRelaunch(GenericAPIView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'start', obj):
|
||||
raise PermissionDenied()
|
||||
|
||||
# Re-validate ad hoc command against serializer to check if module is
|
||||
# still allowed.
|
||||
|
||||
@ -747,7 +747,9 @@ class Job(UnifiedJob, JobOptions):
|
||||
'''
|
||||
if self.survey_passwords:
|
||||
extra_vars = json.loads(self.extra_vars)
|
||||
extra_vars.update(self.survey_passwords)
|
||||
for key, value in self.survey_passwords.items():
|
||||
if key in extra_vars:
|
||||
extra_vars[key] = value
|
||||
return json.dumps(extra_vars)
|
||||
else:
|
||||
return self.extra_vars
|
||||
|
||||
@ -3,43 +3,9 @@
|
||||
|
||||
import json
|
||||
|
||||
from redis import StrictRedis
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
__all__ = ['FifoQueue']
|
||||
|
||||
|
||||
# Determine, based on settings.BROKER_URL (for celery), what the correct Redis
|
||||
# connection settings are.
|
||||
redis_kwargs = {}
|
||||
broker_url = settings.BROKER_URL
|
||||
if not broker_url.lower().startswith('redis://'):
|
||||
raise RuntimeError('Error importing awx.main.queue: Cannot use queue with '
|
||||
'a non-Redis broker configured for celery.\n'
|
||||
'Broker is set to: %s' % broker_url)
|
||||
broker_url = broker_url[8:]
|
||||
|
||||
# There may or may not be a password; address both situations by checking
|
||||
# for an "@" in the broker URL.
|
||||
if '@' in broker_url:
|
||||
broker_auth, broker_host = broker_url.split('@')
|
||||
redis_kwargs['password'] = broker_auth.split(':')[1]
|
||||
else:
|
||||
broker_host = broker_url
|
||||
|
||||
# Ignore anything after a / in the broker host.
|
||||
broker_host = broker_host.split('/')[0]
|
||||
|
||||
# If a custom port is present, parse it out.
|
||||
if ':' in broker_host:
|
||||
broker_host, broker_port = broker_host.split(':')
|
||||
redis_kwargs['port'] = int(broker_port)
|
||||
|
||||
# Now create a StrictRedis object that knows how to connect appropriately.
|
||||
redis = StrictRedis(broker_host, **redis_kwargs)
|
||||
|
||||
|
||||
# TODO: Figure out wtf to do with this class
|
||||
class FifoQueue(object):
|
||||
"""An abstraction class implemented for a simple push/pull queue.
|
||||
|
||||
@ -54,14 +20,16 @@ class FifoQueue(object):
|
||||
|
||||
def __len__(self):
|
||||
"""Return the length of the Redis list."""
|
||||
return redis.llen(self._queue_name)
|
||||
#return redis.llen(self._queue_name)
|
||||
return 0
|
||||
|
||||
def push(self, value):
|
||||
"""Push a value onto the right side of the queue."""
|
||||
redis.rpush(self._queue_name, json.dumps(value))
|
||||
#redis.rpush(self._queue_name, json.dumps(value))
|
||||
|
||||
def pop(self):
|
||||
"""Retrieve a value from the left side of the queue."""
|
||||
answer = redis.lpop(self._queue_name)
|
||||
#answer = redis.lpop(self._queue_name)
|
||||
answer = None
|
||||
if answer:
|
||||
return json.loads(answer)
|
||||
|
||||
@ -891,27 +891,14 @@ class RunJob(BaseTask):
|
||||
args.extend(['-u', ssh_username])
|
||||
if 'ssh_password' in kwargs.get('passwords', {}):
|
||||
args.append('--ask-pass')
|
||||
try:
|
||||
if Version(kwargs['ansible_version']) < Version('1.9'):
|
||||
if become_method and become_method == "sudo" and become_username != "":
|
||||
args.extend(['-U', become_username])
|
||||
if become_method and become_method == "sudo" and "become_password" in kwargs.get("passwords", {}):
|
||||
args.append("--ask-sudo-pass")
|
||||
if become_method and become_method == "su" and become_username != "":
|
||||
args.extend(['-R', become_username])
|
||||
if become_method and become_method == "su" and "become_password" in kwargs.get("passwords", {}):
|
||||
args.append("--ask-su-pass")
|
||||
else:
|
||||
if job.become_enabled:
|
||||
args.append('--become')
|
||||
if become_method:
|
||||
args.extend(['--become-method', become_method])
|
||||
if become_username:
|
||||
args.extend(['--become-user', become_username])
|
||||
if 'become_password' in kwargs.get('passwords', {}):
|
||||
args.append('--ask-become-pass')
|
||||
except ValueError:
|
||||
pass
|
||||
if job.become_enabled:
|
||||
args.append('--become')
|
||||
if become_method:
|
||||
args.extend(['--become-method', become_method])
|
||||
if become_username:
|
||||
args.extend(['--become-user', become_username])
|
||||
if 'become_password' in kwargs.get('passwords', {}):
|
||||
args.append('--ask-become-pass')
|
||||
# Support prompting for a vault password.
|
||||
if 'vault_password' in kwargs.get('passwords', {}):
|
||||
args.append('--ask-vault-pass')
|
||||
@ -1623,27 +1610,14 @@ class RunAdHocCommand(BaseTask):
|
||||
args.append('--ask-pass')
|
||||
# We only specify sudo/su user and password if explicitly given by the
|
||||
# credential. Credential should never specify both sudo and su.
|
||||
try:
|
||||
if Version(kwargs['ansible_version']) < Version('1.9'):
|
||||
if become_method and become_method == "sudo" and become_username != "":
|
||||
args.extend(['-U', become_username])
|
||||
if become_method and become_method == "sudo" and "become_password" in kwargs.get("passwords", {}):
|
||||
args.append("--ask-sudo-pass")
|
||||
if become_method and become_method == "su" and become_username != "":
|
||||
args.extend(['-R', become_username])
|
||||
if become_method and become_method == "su" and "become_password" in kwargs.get("passwords", {}):
|
||||
args.append("--ask-su-pass")
|
||||
else:
|
||||
if ad_hoc_command.become_enabled:
|
||||
args.append('--become')
|
||||
if become_method:
|
||||
args.extend(['--become-method', become_method])
|
||||
if become_username:
|
||||
args.extend(['--become-user', become_username])
|
||||
if 'become_password' in kwargs.get('passwords', {}):
|
||||
args.append('--ask-become-pass')
|
||||
except ValueError:
|
||||
pass
|
||||
if ad_hoc_command.become_enabled:
|
||||
args.append('--become')
|
||||
if become_method:
|
||||
args.extend(['--become-method', become_method])
|
||||
if become_username:
|
||||
args.extend(['--become-user', become_username])
|
||||
if 'become_password' in kwargs.get('passwords', {}):
|
||||
args.append('--ask-become-pass')
|
||||
|
||||
if ad_hoc_command.forks: # FIXME: Max limit?
|
||||
args.append('--forks=%d' % ad_hoc_command.forks)
|
||||
|
||||
@ -40,7 +40,9 @@ class PaginatedDecoratorTests(TestCase):
|
||||
# Ensure the response looks like what it should.
|
||||
r = json.loads(response.rendered_content)
|
||||
self.assertEqual(r['count'], 26)
|
||||
self.assertEqual(r['next'], '/dummy/?page=2&page_size=5')
|
||||
self.assertIn(r['next'],
|
||||
(u'/dummy/?page=2&page_size=5',
|
||||
u'/dummy/?page_size=5&page=2'))
|
||||
self.assertEqual(r['previous'], None)
|
||||
self.assertEqual(r['results'], ['a', 'b', 'c', 'd', 'e'])
|
||||
|
||||
@ -55,8 +57,12 @@ class PaginatedDecoratorTests(TestCase):
|
||||
# Ensure the response looks like what it should.
|
||||
r = json.loads(response.rendered_content)
|
||||
self.assertEqual(r['count'], 26)
|
||||
self.assertEqual(r['next'], '/dummy/?page=4&page_size=5')
|
||||
self.assertEqual(r['previous'], '/dummy/?page=2&page_size=5')
|
||||
self.assertIn(r['next'],
|
||||
(u'/dummy/?page=4&page_size=5',
|
||||
u'/dummy/?page_size=5&page=4'))
|
||||
self.assertIn(r['previous'],
|
||||
(u'/dummy/?page=2&page_size=5',
|
||||
u'/dummy/?page_size=5&page=2'))
|
||||
self.assertEqual(r['results'], ['a', 'b', 'c', 'd', 'e'])
|
||||
|
||||
def test_last_page(self):
|
||||
@ -71,5 +77,7 @@ class PaginatedDecoratorTests(TestCase):
|
||||
r = json.loads(response.rendered_content)
|
||||
self.assertEqual(r['count'], 26)
|
||||
self.assertEqual(r['next'], None)
|
||||
self.assertEqual(r['previous'], '/dummy/?page=5&page_size=5')
|
||||
self.assertIn(r['previous'],
|
||||
(u'/dummy/?page=5&page_size=5',
|
||||
u'/dummy/?page_size=5&page=5'))
|
||||
self.assertEqual(r['results'], ['a', 'b', 'c', 'd', 'e'])
|
||||
|
||||
@ -32,6 +32,21 @@ def test_job_survey_password_redaction():
|
||||
'secret_key': '$encrypted$',
|
||||
'SSN': '$encrypted$'}
|
||||
|
||||
@pytest.mark.survey
|
||||
def test_survey_passwords_not_in_extra_vars():
|
||||
"""Tests that survey passwords not included in extra_vars are
|
||||
not included when displaying job information"""
|
||||
job = Job(
|
||||
name="test-survey-not-in",
|
||||
extra_vars=json.dumps({
|
||||
'submitter_email': 'foobar@redhat.com'}),
|
||||
survey_passwords={
|
||||
'secret_key': '$encrypted$',
|
||||
'SSN': '$encrypted$'})
|
||||
assert json.loads(job.display_extra_vars()) == {
|
||||
'submitter_email': 'foobar@redhat.com',
|
||||
}
|
||||
|
||||
def test_job_safe_args_redacted_passwords(job):
|
||||
"""Verify that safe_args hides passwords in the job extra_vars"""
|
||||
kwargs = {'ansible_version': '2.1'}
|
||||
|
||||
@ -1015,6 +1015,11 @@ LOGGING = {
|
||||
'level': 'WARNING',
|
||||
'propagate': False,
|
||||
},
|
||||
'qpid.messaging': {
|
||||
'handlers': ['console', 'file', 'tower_warnings'],
|
||||
'propagate': False,
|
||||
'level': 'WARNING',
|
||||
},
|
||||
'py.warnings': {
|
||||
'handlers': ['console'],
|
||||
},
|
||||
|
||||
@ -48,8 +48,23 @@ if is_testing(sys.argv):
|
||||
|
||||
MONGO_DB = 'system_tracking_test'
|
||||
|
||||
# Django Caching Configuration
|
||||
if is_testing():
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
},
|
||||
}
|
||||
else:
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
|
||||
'LOCATION': 'memcached:11211',
|
||||
},
|
||||
}
|
||||
|
||||
# Celery AMQP configuration.
|
||||
BROKER_URL = 'redis://redis/'
|
||||
BROKER_URL = 'qpid://qpid:5672'
|
||||
|
||||
# Mongo host configuration
|
||||
MONGO_HOST = NotImplemented
|
||||
|
||||
@ -21,6 +21,7 @@ django-extensions==1.5.9
|
||||
git+https://github.com/chrismeyersfsu/django-jsonbfield@fix-sqlite_serialization#egg=jsonbfield
|
||||
django-polymorphic==0.7.2
|
||||
django-radius==1.0.0
|
||||
# NOTE: Remove when we transition packaging
|
||||
django-redis-cache==1.6.5
|
||||
djangorestframework==3.3.2
|
||||
djangorestframework-yaml==1.0.2
|
||||
@ -96,6 +97,7 @@ python-heatclient==0.8.1
|
||||
python-ironicclient==1.0.0
|
||||
python-keystoneclient==2.1.1
|
||||
python-ldap==2.4.20
|
||||
python-memcached==1.58
|
||||
python-neutronclient==4.0.0
|
||||
python-novaclient==3.2.0
|
||||
python-openid==2.2.5
|
||||
@ -108,10 +110,12 @@ python-troveclient==1.4.0
|
||||
pytz==2015.7
|
||||
PyYAML==3.11
|
||||
pyzmq==14.5.0
|
||||
qpid-python==0.32.1
|
||||
rackspace-auth-openstack==1.3
|
||||
rackspace-novaclient==1.5
|
||||
rax-default-network-flags-python-novaclient-ext==0.3.2
|
||||
rax-scheduled-images-python-novaclient-ext==0.3.1
|
||||
# NOTE: Remove this when we transition packaging
|
||||
redis==2.10.3
|
||||
requests-oauthlib==0.5.0
|
||||
requests==2.9.1
|
||||
|
||||
@ -5,8 +5,9 @@ unittest2
|
||||
pep8
|
||||
flake8
|
||||
pyflakes==1.0.0 # Pinned until PR merges https://gitlab.com/pycqa/flake8/merge_requests/56
|
||||
pytest
|
||||
pytest==2.9.2
|
||||
pytest-cov
|
||||
pytest-django
|
||||
pytest-pythonpath
|
||||
pytest-mock
|
||||
qpid-tools
|
||||
|
||||
@ -6,7 +6,7 @@ pylint
|
||||
flake8
|
||||
distribute==0.7.3
|
||||
unittest2
|
||||
pytest
|
||||
pytest==2.9.2
|
||||
pytest-cov
|
||||
pytest-django
|
||||
pytest-pythonpath
|
||||
|
||||
@ -2,13 +2,14 @@ version: '2'
|
||||
services:
|
||||
# Primary Tower Development Container
|
||||
tower:
|
||||
build: ./docker-compose
|
||||
image: gcr.io/ansible-tower-engineering/tower_devel:${TAG}
|
||||
ports:
|
||||
- "8080:8080"
|
||||
- "8013:8013"
|
||||
links:
|
||||
- postgres
|
||||
- redis
|
||||
- memcached
|
||||
- qpid
|
||||
# - sync
|
||||
# volumes_from:
|
||||
# - sync
|
||||
@ -19,9 +20,12 @@ services:
|
||||
postgres:
|
||||
image: postgres:9.4.1
|
||||
|
||||
# Redis Container
|
||||
redis:
|
||||
image: redis:3.0.1
|
||||
memcached:
|
||||
image: memcached:alpine
|
||||
|
||||
qpid:
|
||||
image: fedora/qpid:latest
|
||||
entrypoint: qpidd --auth=no
|
||||
|
||||
# Source Code Synchronization Container
|
||||
# sync:
|
||||
|
||||
@ -1,31 +1,32 @@
|
||||
FROM ubuntu:14.04
|
||||
FROM centos:7
|
||||
|
||||
RUN locale-gen en_US.UTF-8
|
||||
ENV LANG en_US.UTF-8
|
||||
ENV LANGUAGE en_US:en
|
||||
ENV LC_ALL en_US.UTF-8
|
||||
RUN apt-get update && apt-get install -y software-properties-common python-software-properties curl
|
||||
RUN add-apt-repository -y ppa:chris-lea/redis-server; add-apt-repository -y ppa:chris-lea/zeromq; add-apt-repository -y ppa:chris-lea/node.js; add-apt-repository -y ppa:ansible/ansible; add-apt-repository -y ppa:jal233/proot;
|
||||
RUN curl -sL https://deb.nodesource.com/setup_6.x | bash -
|
||||
RUN curl -sL https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
||||
RUN echo "deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main" | tee /etc/apt/sources.list.d/postgres-9.4.list
|
||||
RUN apt-get update && apt-get install -y openssh-server ansible mg vim tmux git mercurial subversion python-dev python-psycopg2 make postgresql-client libpq-dev nodejs python-psutil libxml2-dev libxslt-dev lib32z1-dev libsasl2-dev libldap2-dev libffi-dev libzmq-dev proot python-pip libxmlsec1-dev swig redis-server libgss-dev libkrb5-dev && apt-get autoremove --purge -y && rm -rf /var/lib/apt/lists/*
|
||||
RUN pip install flake8 pytest pytest-pythonpath pytest-django pytest-cov pytest-mock dateutils django-debug-toolbar==1.4 pyflakes==1.0.0 virtualenv
|
||||
# RUN locale-gen en_US.UTF-8
|
||||
# ENV LANG en_US.UTF-8
|
||||
# ENV LANGUAGE en_US:en
|
||||
# ENV LC_ALL en_US.UTF-8
|
||||
ADD Makefile /tmp/Makefile
|
||||
RUN mkdir /tmp/requirements
|
||||
ADD requirements/requirements.txt requirements/requirements_ansible.txt requirements/requirements_dev.txt requirements/requirements_jenkins.txt /tmp/requirements/
|
||||
RUN yum -y update && yum -y install curl epel-release
|
||||
RUN curl --silent --location https://rpm.nodesource.com/setup_6.x | bash -
|
||||
RUN yum -y localinstall http://yum.postgresql.org/9.4/redhat/rhel-6-x86_64/pgdg-centos94-9.4-1.noarch.rpm
|
||||
ADD tools/docker-compose/proot.repo /etc/yum.repos.d/proot.repo
|
||||
RUN yum -y update && yum -y install openssh-server ansible mg vim tmux git mercurial subversion python-devel python-psycopg2 make postgresql postgresql-devel nodejs python-psutil libxml2-devel libxslt-devel libstdc++.so.6 gcc cyrus-sasl-devel cyrus-sasl openldap-devel libffi-devel zeromq-devel proot python-pip xmlsec1-devel swig krb5-devel xmlsec1-openssl xmlsec1 xmlsec1-openssl-devel libtool-ltdl-devel
|
||||
RUN pip install flake8 pytest==2.9.2 pytest-pythonpath pytest-django pytest-cov pytest-mock dateutils django-debug-toolbar==1.4 pyflakes==1.0.0 virtualenv
|
||||
RUN /usr/bin/ssh-keygen -q -t rsa -N "" -f /root/.ssh/id_rsa
|
||||
RUN mkdir -p /etc/tower
|
||||
RUN mkdir -p /data/db
|
||||
ADD license /etc/awx/license
|
||||
ADD license /etc/tower/license
|
||||
ADD tools/docker-compose/license /etc/tower/license
|
||||
RUN pip2 install honcho
|
||||
RUN wget https://github.com/Yelp/dumb-init/releases/download/v1.0.1/dumb-init_1.0.1_amd64.deb
|
||||
RUN dpkg -i dumb-init_*.deb
|
||||
RUN rm -rf /dumb-init_*.deb
|
||||
ADD ansible-tower.egg-link /tmp/ansible-tower.egg-link
|
||||
ADD tower-manage /usr/local/bin/tower-manage
|
||||
ADD awx-manage /usr/local/bin/awx-manage
|
||||
ADD ansible_tower.egg-info /tmp/ansible_tower.egg-info
|
||||
RUN curl -LO https://github.com/Yelp/dumb-init/releases/download/v1.1.3/dumb-init_1.1.3_amd64 && chmod +x ./dumb-init_1.1.3_amd64 && mv ./dumb-init_1.1.3_amd64 /usr/bin/dumb-init
|
||||
ADD tools/docker-compose/ansible-tower.egg-link /tmp/ansible-tower.egg-link
|
||||
ADD tools/docker-compose/tower-manage /usr/local/bin/tower-manage
|
||||
ADD tools/docker-compose/awx-manage /usr/local/bin/awx-manage
|
||||
ADD tools/docker-compose/ansible_tower.egg-info /tmp/ansible_tower.egg-info
|
||||
RUN ln -s /tower_devel/tools/docker-compose/start_development.sh /start_development.sh
|
||||
|
||||
WORKDIR /tmp
|
||||
RUN SWIG_FEATURES="-cpperraswarn -includeall -D__`uname -m`__ -I/usr/include/openssl" VENV_BASE="/venv" make requirements_dev
|
||||
WORKDIR /
|
||||
EXPOSE 8013 8080 22
|
||||
ENTRYPOINT ["/usr/bin/dumb-init"]
|
||||
CMD /start_development.sh
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
#!/tower_devel/venv/tower/bin/python
|
||||
#!/venv/tower/bin/python
|
||||
# EASY-INSTALL-ENTRY-SCRIPT: 'ansible-tower==3.0.0-0.devel','console_scripts','awx-manage'
|
||||
__requires__ = 'ansible-tower==3.0.0-0.devel'
|
||||
import sys
|
||||
|
||||
8
tools/docker-compose/proot.repo
Normal file
8
tools/docker-compose/proot.repo
Normal file
@ -0,0 +1,8 @@
|
||||
[jlaska-proot]
|
||||
name=Copr repo for proot owned by jlaska
|
||||
baseurl=https://copr-be.cloud.fedoraproject.org/results/jlaska/proot/epel-7-$basearch/
|
||||
skip_if_unavailable=True
|
||||
gpgcheck=1
|
||||
gpgkey=https://copr-be.cloud.fedoraproject.org/results/jlaska/proot/pubkey.gpg
|
||||
enabled=1
|
||||
enabled_metadata=1
|
||||
@ -3,10 +3,13 @@ set +x
|
||||
|
||||
# Wait for the databases to come up
|
||||
ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=postgres port=5432" all
|
||||
ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=redis port=6379" all
|
||||
ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=memcached port=11211" all
|
||||
ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=qpid port=5672" all
|
||||
|
||||
# In case Tower in the container wants to connect to itself, use "docker exec" to attach to the container otherwise
|
||||
/etc/init.d/ssh start
|
||||
# TODO: FIX
|
||||
#/etc/init.d/ssh start
|
||||
|
||||
ansible -i "127.0.0.1," -c local -v -m postgresql_user -U postgres -a "name=awx-dev password=AWXsome1 login_user=postgres login_host=postgres" all
|
||||
ansible -i "127.0.0.1," -c local -v -m postgresql_db -U postgres -a "name=awx-dev owner=awx-dev login_user=postgres login_host=postgres" all
|
||||
|
||||
@ -25,11 +28,11 @@ cp -nR /tmp/ansible_tower.egg-info /tower_devel/ || true
|
||||
#if [ -f "awx/lib/.deps_built" ]; then
|
||||
# echo "Skipping dependency build - remove awx/lib/.deps_built to force a rebuild"
|
||||
#else
|
||||
make requirements_dev
|
||||
#SWIG_FEATURES="-cpperraswarn -includeall -D__`uname -m`__ -I/usr/include/openssl" make requirements_dev
|
||||
# touch awx/lib/.deps_built
|
||||
#fi
|
||||
|
||||
cp /tmp/ansible-tower.egg-link /tower_devel/venv/tower/lib/python2.7/site-packages/ansible-tower.egg-link
|
||||
cp /tmp/ansible-tower.egg-link /venv/tower/lib/python2.7/site-packages/ansible-tower.egg-link
|
||||
|
||||
# Tower bootstrapping
|
||||
make version_file
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user