From 097fba690bd78c3ca4a0edcce7a814b980ad9a9c Mon Sep 17 00:00:00 2001 From: Leigh Johnson Date: Fri, 1 Apr 2016 10:35:07 -0400 Subject: [PATCH 1/9] update docker-compose build, resolves #1377 --- tools/docker-compose.yml | 12 ------------ tools/docker-compose/Dockerfile | 2 +- tools/docker-compose/start_development.sh | 1 - tools/docker/start_development.sh | 1 - tools/docker/start_everything.sh | 1 - 5 files changed, 1 insertion(+), 16 deletions(-) diff --git a/tools/docker-compose.yml b/tools/docker-compose.yml index 5cfd679b8a..4b506c4f36 100644 --- a/tools/docker-compose.yml +++ b/tools/docker-compose.yml @@ -6,7 +6,6 @@ tower: links: - postgres - redis - - mongo - graphite volumes: - ../:/tower_devel @@ -18,17 +17,6 @@ redis: image: redis:3.0.1 # ports: # - 6379:6379 -mongo: - image: mongo:3.0 - # ports: - # - 27017:27017 -dockerui: - image: dockerui/dockerui - ports: - - "9000:9000" - privileged: true - volumes: - - /var/run/docker.sock:/var/run/docker.sock graphite: image: hopsoft/graphite-statsd ports: diff --git a/tools/docker-compose/Dockerfile b/tools/docker-compose/Dockerfile index 72568d3dfc..8c05abf424 100644 --- a/tools/docker-compose/Dockerfile +++ b/tools/docker-compose/Dockerfile @@ -8,7 +8,7 @@ RUN apt-get update && apt-get install -y software-properties-common python-softw RUN add-apt-repository -y ppa:chris-lea/redis-server; add-apt-repository -y ppa:chris-lea/zeromq; add-apt-repository -y ppa:chris-lea/node.js; add-apt-repository -y ppa:ansible/ansible; add-apt-repository -y ppa:jal233/proot; RUN curl -sL https://deb.nodesource.com/setup_0.12 | bash - RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 && apt-key adv --fetch-keys http://www.postgresql.org/media/keys/ACCC4CF8.asc -RUN echo "deb http://repo.mongodb.org/apt/ubuntu "$(lsb_release -sc)"/mongodb-org/3.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-3.0.list && echo "deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main" | tee /etc/apt/sources.list.d/postgres-9.4.list +RUN echo "deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main" | tee /etc/apt/sources.list.d/postgres-9.4.list RUN apt-get update RUN apt-get install -y openssh-server ansible mg vim tmux git mercurial subversion python-dev python-psycopg2 make postgresql-client libpq-dev nodejs python-psutil libxml2-dev libxslt-dev lib32z1-dev libsasl2-dev libldap2-dev libffi-dev libzmq-dev proot python-pip libxmlsec1-dev swig redis-server && rm -rf /var/lib/apt/lists/* RUN pip install flake8 diff --git a/tools/docker-compose/start_development.sh b/tools/docker-compose/start_development.sh index 4368f41779..c1711624d7 100755 --- a/tools/docker-compose/start_development.sh +++ b/tools/docker-compose/start_development.sh @@ -4,7 +4,6 @@ set +x # Wait for the databases to come up ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=postgres port=5432" all ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=redis port=6379" all -ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=mongo port=27017" all # In case Tower in the container wants to connect to itself, use "docker exec" to attach to the container otherwise /etc/init.d/ssh start diff --git a/tools/docker/start_development.sh b/tools/docker/start_development.sh index c500ea0cde..96b4d49d9e 100755 --- a/tools/docker/start_development.sh +++ b/tools/docker/start_development.sh @@ -3,7 +3,6 @@ /etc/init.d/ssh start /etc/init.d/postgresql start /etc/init.d/redis-server start -nohup mongod & if ! [ -d "/tower_devel/awx/lib/site-packages" ]; then ln -s /tower/awx/lib/site-packages /tower_devel/awx/lib/site-packages fi diff --git a/tools/docker/start_everything.sh b/tools/docker/start_everything.sh index 9c901a982c..4002a300a8 100755 --- a/tools/docker/start_everything.sh +++ b/tools/docker/start_everything.sh @@ -2,7 +2,6 @@ /etc/init.d/postgresql start /etc/init.d/redis-server start -nohup mongod --smallfiles & (cd /tower_devel && make server && /bin/bash) From 25f6200dd56dee8d48c774b2cc585e5f0dde61f2 Mon Sep 17 00:00:00 2001 From: Leigh Johnson Date: Fri, 1 Apr 2016 10:50:51 -0400 Subject: [PATCH 2/9] remove old docker build instructions --- tools/docker/Dockerfile | 36 --- tools/docker/README | 2 - tools/docker/license | 12 - tools/docker/local_settings.py | 489 ------------------------------ tools/docker/start_development.sh | 9 - tools/docker/start_everything.sh | 7 - tools/docker/start_local.sh | 3 - tools/docker/start_superlocal.sh | 3 - 8 files changed, 561 deletions(-) delete mode 100644 tools/docker/Dockerfile delete mode 100644 tools/docker/README delete mode 100644 tools/docker/license delete mode 100644 tools/docker/local_settings.py delete mode 100755 tools/docker/start_development.sh delete mode 100755 tools/docker/start_everything.sh delete mode 100755 tools/docker/start_local.sh delete mode 100755 tools/docker/start_superlocal.sh diff --git a/tools/docker/Dockerfile b/tools/docker/Dockerfile deleted file mode 100644 index 42f9865722..0000000000 --- a/tools/docker/Dockerfile +++ /dev/null @@ -1,36 +0,0 @@ -FROM ubuntu:14.04 - -RUN locale-gen en_US.UTF-8 -ENV LANG en_US.UTF-8 -ENV LANGUAGE en_US:en -ENV LC_ALL en_US.UTF-8 -RUN apt-get update -RUN apt-get install -y software-properties-common python-software-properties curl -RUN add-apt-repository -y ppa:chris-lea/redis-server; add-apt-repository -y ppa:chris-lea/zeromq; add-apt-repository -y ppa:chris-lea/node.js; add-apt-repository ppa:ansible/ansible -RUN curl -sL https://deb.nodesource.com/setup_0.12 | bash - -RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 && apt-key adv --fetch-keys http://www.postgresql.org/media/keys/ACCC4CF8.asc -RUN echo "deb http://repo.mongodb.org/apt/ubuntu "$(lsb_release -sc)"/mongodb-org/3.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-3.0.list && echo "deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main" | tee /etc/apt/sources.list.d/postgres-9.4.list -RUN apt-get update -RUN apt-get install -y openssh-server ansible mg vim tmux git mercurial subversion python-dev python-psycopg2 make postgresql nodejs redis-server mongodb-org python-psutil libxml2-dev libxslt-dev lib32z1-dev libsasl2-dev libldap2-dev libffi-dev libzmq-dev postgresql-server-dev-9.4 proot python-pip && rm -rf /var/lib/apt/lists/* -RUN git clone http://d7f73f8614295715a4777bf15f46d0d486ad0e21:x-oauth-basic@github.com/ansible/ansible-commander.git tower -RUN cd /tower ; make develop -RUN cd /tower ; make requirements_dev -RUN cd /tower ; make version_file -RUN /etc/init.d/postgresql start ; su - postgres -c 'ansible -i "127.0.0.1," -c local -v -m postgresql_user -U postgres -a "name=awx-dev password=AWXsome1 login_user=postgres" all'; /etc/init.d/postgresql stop -RUN /etc/init.d/postgresql start ; su - postgres -c 'ansible -i "127.0.0.1," -c local -v -m postgresql_db -U postgres -a "name=awx-dev owner=awx-dev login_user=postgres" all'; /etc/init.d/postgresql stop -RUN /usr/bin/ssh-keygen -q -t rsa -N "" -f /root/.ssh/id_rsa -ADD local_settings.py /tower/awx/settings/local_settings.py -RUN mkdir -p /etc/tower -RUN mkdir -p /data/db -ADD license /etc/awx/license -ADD license /etc/tower/license -RUN pip2 install honcho -RUN /etc/init.d/postgresql start; cd /tower ; make migrate ; make init ; /etc/init.d/postgresql stop -RUN wget https://github.com/Yelp/dumb-init/releases/download/v1.0.0/dumb-init_1.0.0_amd64.deb -RUN dpkg -i dumb-init_*.deb -ADD start_everything.sh /start_everything.sh -ADD start_development.sh /start_development.sh - -EXPOSE 8013 8080 27017 22 -ENTRYPOINT ["/usr/bin/dumb-init"] -CMD /start_development.sh diff --git a/tools/docker/README b/tools/docker/README deleted file mode 100644 index 1d1829ec11..0000000000 --- a/tools/docker/README +++ /dev/null @@ -1,2 +0,0 @@ -docker build --no-cache=true --rm=true -t ansible/tower_devel:latest . -docker run --name tower_test -it --memory="4g" --cpuset="0,1" -v /Users/meyers/ansible/:/tower_devel -p 8013:8013 -p 8080:8080 -p 27017:27017 -p 2222:22 ansible/tower_devel diff --git a/tools/docker/license b/tools/docker/license deleted file mode 100644 index 339622237f..0000000000 --- a/tools/docker/license +++ /dev/null @@ -1,12 +0,0 @@ -{ - "subscription_name": "Enterprise Tower up to 100000 Nodes", - "features": {}, - "instance_count": 100000, - "trial": false, - "contact_email": "gross@weiner.com", - "company_name": "Dr. James Grossweiner", - "license_type": "enterprise", - "contact_name": "Dr. James Grossweiner", - "license_date": 2524608000, - "license_key": "cf2da1fdbc196c3f9a354d871cdad572dcd2e309a1e49616d8d9c600a9b7fc91" -} \ No newline at end of file diff --git a/tools/docker/local_settings.py b/tools/docker/local_settings.py deleted file mode 100644 index 44e2ae79dc..0000000000 --- a/tools/docker/local_settings.py +++ /dev/null @@ -1,489 +0,0 @@ -# Copyright (c) 2015 Ansible, Inc. -# All Rights Reserved. - -# Local Django settings for AWX project. Rename to "local_settings.py" and -# edit as needed for your development environment. - -# All variables defined in awx/settings/development.py will already be loaded -# into the global namespace before this file is loaded, to allow for reading -# and updating the default settings as needed. - -############################################################################### -# MISC PROJECT SETTINGS -############################################################################### - -ADMINS = ( - # ('Your Name', 'your_email@domain.com'), -) - -MANAGERS = ADMINS - -# Database settings to use PostgreSQL for development. -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql_psycopg2', - 'NAME': 'awx-dev', - 'USER': 'awx-dev', - 'PASSWORD': 'AWXsome1', - 'HOST': 'localhost', - 'PORT': '', - } -} - -# Use SQLite for unit tests instead of PostgreSQL. If the lines below are -# commented out, Django will create the test_awx-dev database in PostgreSQL to -# run unit tests. -if len(sys.argv) >= 2 and sys.argv[1] == 'test': - DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': os.path.join(BASE_DIR, 'awx.sqlite3'), - # Test database cannot be :memory: for celery/inventory tests. - 'TEST_NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3'), - } - } - -# Celery AMQP configuration. -BROKER_URL = 'redis://localhost/' - -# Set True to enable additional logging from the job_event_callback plugin -JOB_CALLBACK_DEBUG = False - -# Absolute filesystem path to the directory to host projects (with playbooks). -# This directory should NOT be web-accessible. -PROJECTS_ROOT = os.path.join(BASE_DIR, 'projects') - -# Absolute filesystem path to the directory for job status stdout -# This directory should not be web-accessible -JOBOUTPUT_ROOT = os.path.join(BASE_DIR, 'job_status') - -# The UUID of the system, for HA. -SYSTEM_UUID = '00000000-0000-0000-0000-000000000000' - -# Local time zone for this installation. Choices can be found here: -# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name -# although not all choices may be available on all operating systems. -# On Unix systems, a value of None will cause Django to use the same -# timezone as the operating system. -# If running in a Windows environment this must be set to the same as your -# system time zone. -TIME_ZONE = 'America/New_York' - -# Language code for this installation. All choices can be found here: -# http://www.i18nguy.com/unicode/language-identifiers.html -LANGUAGE_CODE = 'en-us' - -# SECURITY WARNING: keep the secret key used in production secret! -# Hardcoded values can leak through source control. Consider loading -# the secret key from an environment variable or a file instead. -SECRET_KEY = 'p7z7g1ql4%6+(6nlebb6hdk7sd^&fnjpal308%n%+p^_e6vo1y' - -# HTTP headers and meta keys to search to determine remote host name or IP. Add -# additional items to this list, such as "HTTP_X_FORWARDED_FOR", if behind a -# reverse proxy. -REMOTE_HOST_HEADERS = ['REMOTE_ADDR', 'REMOTE_HOST'] - -# Define additional environment variables to be passed to subprocess started by -# the celery task. -#AWX_TASK_ENV['FOO'] = 'BAR' - -# If set, use -vvv for project updates instead of -v for more output. -# PROJECT_UPDATE_VVV=True - -# Set verbosity for inventory import command when running inventory updates. -# INVENTORY_UPDATE_VERBOSITY=1 - -############################################################################### -# EMAIL SETTINGS -############################################################################### - -# Email address that error messages come from. -SERVER_EMAIL = 'root@localhost' - -# The email backend to use. For possible shortcuts see django.core.mail. -# The default is to use the SMTP backend. -# Third-party backends can be specified by providing a Python path -# to a module that defines an EmailBackend class. -EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' - -# Host for sending email. -EMAIL_HOST = 'localhost' - -# Port for sending email. -EMAIL_PORT = 25 - -# Optional SMTP authentication information for EMAIL_HOST. -EMAIL_HOST_USER = '' -EMAIL_HOST_PASSWORD = '' -EMAIL_USE_TLS = False - -# Default email address to use for various automated correspondence from -# the site managers. -DEFAULT_FROM_EMAIL = 'webmaster@localhost' - -# Subject-line prefix for email messages send with django.core.mail.mail_admins -# or ...mail_managers. Make sure to include the trailing space. -EMAIL_SUBJECT_PREFIX = '[AWX] ' - -############################################################################### -# LOGGING SETTINGS -############################################################################### - -# Enable logging to syslog. Setting level to ERROR captures 500 errors, -# WARNING also logs 4xx responses. -LOGGING['handlers']['syslog'] = { - 'level': 'WARNING', - 'filters': [], - 'class': 'django.utils.log.NullHandler', - 'formatter': 'simple', -} - -# Enable the following lines to also log to a file. -#LOGGING['handlers']['file'] = { -# 'class': 'logging.FileHandler', -# 'filename': os.path.join(BASE_DIR, 'awx.log'), -# 'formatter': 'simple', -#} - -# Enable the following lines to turn on lots of permissions-related logging. -#LOGGING['loggers']['awx.main.access']['propagate'] = True -#LOGGING['loggers']['awx.main.signals']['propagate'] = True -#LOGGING['loggers']['awx.main.permissions']['propagate'] = True - -# Enable the following lines to turn on LDAP auth logging. -#LOGGING['loggers']['django_auth_ldap']['handlers'] = ['console'] -#LOGGING['loggers']['django_auth_ldap']['level'] = 'DEBUG' - -############################################################################### -# LDAP AUTHENTICATION SETTINGS -############################################################################### - -# Refer to django-auth-ldap docs for more details: -# http://pythonhosted.org/django-auth-ldap/authentication.html - -# LDAP server URI, such as "ldap://ldap.example.com:389" (non-SSL) or -# "ldaps://ldap.example.com:636" (SSL). LDAP authentication is disable if this -# parameter is empty. -AUTH_LDAP_SERVER_URI = '' - -# DN of user to bind for all search queries. Normally in the format -# "CN=Some User,OU=Users,DC=example,DC=com" but may also be specified as -# "DOMAIN\username" for Active Directory. -AUTH_LDAP_BIND_DN = '' - -# Password using to bind above user account. -AUTH_LDAP_BIND_PASSWORD = '' - -# Enable TLS when the connection is not using SSL. -AUTH_LDAP_START_TLS = False - -# Imports needed for remaining LDAP configuration. -import ldap -from django_auth_ldap.config import LDAPSearch, LDAPSearchUnion -from django_auth_ldap.config import ActiveDirectoryGroupType - -# LDAP search query to find users. -AUTH_LDAP_USER_SEARCH = LDAPSearch( - 'OU=Users,DC=example,DC=com', # Base DN - ldap.SCOPE_SUBTREE, # SCOPE_BASE, SCOPE_ONELEVEL, SCOPE_SUBTREE - '(sAMAccountName=%(user)s)', # Query -) - -# Alternative to user search, if user DNs are all of the same format. -#AUTH_LDAP_USER_DN_TEMPLATE = 'uid=%(user)s,OU=Users,DC=example,DC=com' - -# Mapping of LDAP to user atrributes (key is user attribute name, value is LDAP -# attribute name). -AUTH_LDAP_USER_ATTR_MAP = { - 'first_name': 'givenName', - 'last_name': 'sn', - 'email': 'mail', -} - -# LDAP search query to find groups. Does not support LDAPSearchUnion. -AUTH_LDAP_GROUP_SEARCH = LDAPSearch( - 'DC=example,DC=com', # Base DN - ldap.SCOPE_SUBTREE, # SCOPE_BASE, SCOPE_ONELEVEL, SCOPE_SUBTREE - '(objectClass=group)', # Query -) -# Type of group returned by the search above. Should be one of the types -# listed at: http://pythonhosted.org/django-auth-ldap/groups.html#types-of-groups -AUTH_LDAP_GROUP_TYPE = ActiveDirectoryGroupType() - -# Group DN required to login. If specified, user must be a member of this -# group to login via LDAP. -#AUTH_LDAP_REQUIRE_GROUP = '' - -# Group DN denied from login. If specified, user will not be allowed to login -# if a member of this group. -#AUTH_LDAP_DENY_GROUP = '' - -# User profile flags updated from group membership (key is user attribute name, -# value is group DN). -AUTH_LDAP_USER_FLAGS_BY_GROUP = { - #'is_superuser': 'CN=Domain Admins,CN=Users,DC=example,DC=com', -} - -# Mapping between organization admins/users and LDAP groups. Keys are -# organization names (will be created if not present). Values are dictionaries -# of options for each organization's membership, where each can contain the -# following parameters: -# - remove: True/False. Defaults to False. Specifies the default for -# remove_admins or remove_users if those parameters aren't explicitly set. -# - admins: None, True/False, string or list/tuple of strings. -# If None, organization admins will not be updated. -# If True/False, all LDAP users will be added/removed as admins. -# If a string or list of strings, specifies the group DN(s). User will be -# added as an org admin if the user is a member of ANY of these groups. -# - remove_admins: True/False. Defaults to False. If True, a user who is not a -# member of the given groups will be removed from the organization's admins. -# - users: None, True/False, string or list/tuple of strings. Same rules apply -# as for admins. -# - remove_users: True/False. Defaults to False. If True, a user who is not a -# member of the given groups will be removed from the organization's users. -AUTH_LDAP_ORGANIZATION_MAP = { - #'Test Org': { - # 'admins': 'CN=Domain Admins,CN=Users,DC=example,DC=com', - # 'users': ['CN=Domain Users,CN=Users,DC=example,DC=com'], - #}, - #'Test Org 2': { - # 'admins': ['CN=Administrators,CN=Builtin,DC=example,DC=com'], - # 'users': True, - #}, -} - -# Mapping between team members (users) and LDAP groups. Keys are team names -# (will be created if not present). Values are dictionaries of options for -# each team's membership, where each can contain the following parameters: -# - organization: string. The name of the organization to which the team -# belongs. The team will be created if the combination of organization and -# team name does not exist. The organization will first be created if it -# does not exist. -# - users: None, True/False, string or list/tuple of strings. -# If None, team members will not be updated. -# If True/False, all LDAP users will be added/removed as team members. -# If a string or list of strings, specifies the group DN(s). User will be -# added as a team member if the user is a member of ANY of these groups. -# - remove: True/False. Defaults to False. If True, a user who is not a member -# of the given groups will be removed from the team. -AUTH_LDAP_TEAM_MAP = { - 'My Team': { - 'organization': 'Test Org', - 'users': ['CN=Domain Users,CN=Users,DC=example,DC=com'], - 'remove': True, - }, - 'Other Team': { - 'organization': 'Test Org 2', - 'users': 'CN=Other Users,CN=Users,DC=example,DC=com', - 'remove': False, - }, -} - -############################################################################### -# SCM TEST SETTINGS -############################################################################### - -# Define these variables to enable more complete testing of project support for -# SCM updates. The test repositories listed do not have to contain any valid -# playbooks. - -try: - path = os.path.expanduser(os.path.expandvars('~/.ssh/id_rsa')) - TEST_SSH_KEY_DATA = file(path, 'rb').read() -except IOError: - TEST_SSH_KEY_DATA = '' - -TEST_GIT_USERNAME = '' -TEST_GIT_PASSWORD = '' -TEST_GIT_KEY_DATA = TEST_SSH_KEY_DATA -TEST_GIT_PUBLIC_HTTPS = 'https://github.com/ansible/ansible.github.com.git' -TEST_GIT_PRIVATE_HTTPS = 'https://github.com/ansible/product-docs.git' -TEST_GIT_PRIVATE_SSH = 'git@github.com:ansible/product-docs.git' - -TEST_HG_USERNAME = '' -TEST_HG_PASSWORD = '' -TEST_HG_KEY_DATA = TEST_SSH_KEY_DATA -TEST_HG_PUBLIC_HTTPS = 'https://bitbucket.org/cchurch/django-hotrunner' -TEST_HG_PRIVATE_HTTPS = '' -TEST_HG_PRIVATE_SSH = '' - -TEST_SVN_USERNAME = '' -TEST_SVN_PASSWORD = '' -TEST_SVN_PUBLIC_HTTPS = 'https://github.com/ansible/ansible.github.com' -TEST_SVN_PRIVATE_HTTPS = 'https://github.com/ansible/product-docs' - -# To test repo access via SSH login to localhost. -import getpass -TEST_SSH_LOOPBACK_USERNAME = getpass.getuser() -TEST_SSH_LOOPBACK_PASSWORD = '' - -############################################################################### -# LDAP TEST SETTINGS -############################################################################### - -# LDAP connection and authentication settings for unit tests only. LDAP tests -# will be skipped if TEST_AUTH_LDAP_SERVER_URI is not configured. - -TEST_AUTH_LDAP_SERVER_URI = '' -TEST_AUTH_LDAP_BIND_DN = '' -TEST_AUTH_LDAP_BIND_PASSWORD = '' -TEST_AUTH_LDAP_START_TLS = False - -# LDAP username/password for testing authentication. -TEST_AUTH_LDAP_USERNAME = '' -TEST_AUTH_LDAP_PASSWORD = '' - -# LDAP search query to find users. -TEST_AUTH_LDAP_USER_SEARCH = LDAPSearch( - 'CN=Users,DC=example,DC=com', - ldap.SCOPE_SUBTREE, - '(sAMAccountName=%(user)s)', -) - -# Alternative to user search. -#TEST_AUTH_LDAP_USER_DN_TEMPLATE = 'sAMAccountName=%(user)s,OU=Users,DC=example,DC=com' - -# Mapping of LDAP attributes to user attributes. -TEST_AUTH_LDAP_USER_ATTR_MAP = { - 'first_name': 'givenName', - 'last_name': 'sn', - 'email': 'mail', -} - -# LDAP search query for finding groups. -TEST_AUTH_LDAP_GROUP_SEARCH = LDAPSearch( - 'DC=example,DC=com', - ldap.SCOPE_SUBTREE, - '(objectClass=group)', -) -# Type of group returned by the search above. -TEST_AUTH_LDAP_GROUP_TYPE = ActiveDirectoryGroupType() - -# Test DNs for a group required to login. User should be a member of the first -# group, but not a member of the second. -TEST_AUTH_LDAP_REQUIRE_GROUP = 'CN=Domain Admins,CN=Users,DC=example,DC=com' -TEST_AUTH_LDAP_REQUIRE_GROUP_FAIL = 'CN=Guest,CN=Users,DC=example,DC=com' - -# Test DNs for a group denied from login. User should not be a member of the -# first group, but should be a member of the second. -TEST_AUTH_LDAP_DENY_GROUP = 'CN=Guest,CN=Users,DC=example,DC=com' -TEST_AUTH_LDAP_DENY_GROUP_FAIL = 'CN=Domain Admins,CN=Users,DC=example,DC=com' - -# User profile flags updated from group membership. Test user should be a -# member of the group. -TEST_AUTH_LDAP_USER_FLAGS_BY_GROUP = { - 'is_superuser': 'CN=Domain Admins,CN=Users,DC=example,DC=com', -} - -# Test mapping between organization admins/users and LDAP groups. -TEST_AUTH_LDAP_ORGANIZATION_MAP = { - 'Test Org': { - 'admins': 'CN=Domain Admins,CN=Users,DC=example,DC=com', - 'users': ['CN=Domain Users,CN=Users,DC=example,DC=com'], - }, - 'Test Org 2': { - 'admins': ['CN=Administrators,CN=Builtin,DC=example,DC=com'], - 'users': True, - }, -} -# Expected results from organization mapping. After login, should user be an -# admin/user in the given organization? -TEST_AUTH_LDAP_ORGANIZATION_MAP_RESULT = { - 'Test Org': {'admins': True, 'users': False}, - 'Test Org 2': {'admins': False, 'users': True}, -} - -# Second test mapping to test remove parameters. -TEST_AUTH_LDAP_ORGANIZATION_MAP_2 = { - 'Test Org': { - 'admins': 'CN=Domain Users,CN=Users,DC=example,DC=com', - 'users': True, - 'remove_admins': True, - 'remove_users': False, - }, - 'Test Org 2': { - 'admins': ['CN=Domain Admins,CN=Users,DC=example,DC=com', - 'CN=Administrators,CN=Builtin,DC=example,DC=com'], - 'users': False, - 'remove': True, - }, -} - -# Expected results from second organization mapping. -TEST_AUTH_LDAP_ORGANIZATION_MAP_2_RESULT = { - 'Test Org': {'admins': False, 'users': True}, - 'Test Org 2': {'admins': True, 'users': False}, -} - -# Test mapping between team users and LDAP groups. -TEST_AUTH_LDAP_TEAM_MAP = { - 'Domain Users Team': { - 'organization': 'Test Org', - 'users': ['CN=Domain Users,CN=Users,DC=example,DC=com'], - 'remove': False, - }, - 'Admins Team': { - 'organization': 'Admins Org', - 'users': 'CN=Domain Admins,CN=Users,DC=example,DC=com', - 'remove': True, - }, - 'Everyone Team': { - 'organization': 'Test Org 2', - 'users': True, - }, -} -# Expected results from team mapping. After login, should user be a member of -# the given team? -TEST_AUTH_LDAP_TEAM_MAP_RESULT = { - 'Domain Users Team': {'users': False}, - 'Admins Team': {'users': True}, - 'Everyone Team': {'users': True}, -} - -# Second test mapping for teams to remove user. -TEST_AUTH_LDAP_TEAM_MAP_2 = { - 'Domain Users Team': { - 'organization': 'Test Org', - 'users': ['CN=Domain Users,CN=Users,DC=example,DC=com'], - 'remove': False, - }, - 'Admins Team': { - 'organization': 'Admins Org', - 'users': 'CN=Administrators,CN=Builtin,DC=example,DC=com', - 'remove': True, - }, - 'Everyone Team': { - 'organization': 'Test Org 2', - 'users': False, - 'remove': False, - }, -} -# Expected results from second team mapping. After login, should user be a -# member of the given team? -TEST_AUTH_LDAP_TEAM_MAP_2_RESULT = { - 'Domain Users Team': {'users': False}, - 'Admins Team': {'users': False}, - 'Everyone Team': {'users': True}, -} - -############################################################################### -# INVENTORY IMPORT TEST SETTINGS -############################################################################### - -# Define these variables to enable more complete testing of inventory import -# from cloud providers. - -# EC2 credentials -TEST_AWS_ACCESS_KEY_ID = '' -TEST_AWS_SECRET_ACCESS_KEY = '' -TEST_AWS_REGIONS = 'all' - -# Rackspace credentials -TEST_RACKSPACE_USERNAME = '' -TEST_RACKSPACE_API_KEY = '' -TEST_RACKSPACE_REGIONS = 'all' - -# VMware credentials -TEST_VMWARE_HOST = '' -TEST_VMWARE_USER = '' -TEST_VMWARE_PASSWORD = '' diff --git a/tools/docker/start_development.sh b/tools/docker/start_development.sh deleted file mode 100755 index 96b4d49d9e..0000000000 --- a/tools/docker/start_development.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -/etc/init.d/ssh start -/etc/init.d/postgresql start -/etc/init.d/redis-server start -if ! [ -d "/tower_devel/awx/lib/site-packages" ]; then - ln -s /tower/awx/lib/site-packages /tower_devel/awx/lib/site-packages -fi -/bin/bash diff --git a/tools/docker/start_everything.sh b/tools/docker/start_everything.sh deleted file mode 100755 index 4002a300a8..0000000000 --- a/tools/docker/start_everything.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -/etc/init.d/postgresql start -/etc/init.d/redis-server start - -(cd /tower_devel && make server && -/bin/bash) diff --git a/tools/docker/start_local.sh b/tools/docker/start_local.sh deleted file mode 100755 index b769f950af..0000000000 --- a/tools/docker/start_local.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -/etc/init.d/postgresql start diff --git a/tools/docker/start_superlocal.sh b/tools/docker/start_superlocal.sh deleted file mode 100755 index 6d303b0693..0000000000 --- a/tools/docker/start_superlocal.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -/etc/init.d/postgresql start ; /etc/init.d/rabbitmq-server start; cd /tower_devel; make server From a01b2913bbf09f6890ac9a9294f6d339179a3620 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Mon, 28 Mar 2016 15:30:38 -0400 Subject: [PATCH 3/9] mockity mock mock --- awx/api/serializers.py | 19 ++- awx/main/tests/functional/conftest.py | 12 +- awx/main/tests/unit/api/test_generics.py | 80 ++++++++++ awx/main/tests/unit/api/test_serializers.py | 156 ++++++++++++++++++++ awx/main/tests/unit/api/test_views.py | 52 +++++++ 5 files changed, 307 insertions(+), 12 deletions(-) create mode 100644 awx/main/tests/unit/api/test_generics.py create mode 100644 awx/main/tests/unit/api/test_serializers.py create mode 100644 awx/main/tests/unit/api/test_views.py diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 0cca493965..8846d11860 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -1551,6 +1551,14 @@ class JobOptionsSerializer(BaseSerializer): args=(obj.cloud_credential.pk,)) return res + def _summary_field_labels(self, obj): + return [{'id': x.id, 'name': x.name} for x in obj.labels.all().order_by('-name')[:10]] + + def get_summary_fields(self, obj): + res = super(JobOptionsSerializer, self).get_summary_fields(obj) + res['labels'] = self._summary_field_labels(obj) + return res + def to_representation(self, obj): ret = super(JobOptionsSerializer, self).to_representation(obj) if obj is None: @@ -1607,6 +1615,9 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer): res['callback'] = reverse('api:job_template_callback', args=(obj.pk,)) return res + def _recent_jobs(self, obj): + return [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in obj.jobs.filter(active=True).order_by('-created')[:10]] + def get_summary_fields(self, obj): d = super(JobTemplateSerializer, self).get_summary_fields(obj) if obj.survey_spec is not None and ('name' in obj.survey_spec and 'description' in obj.survey_spec): @@ -1625,8 +1636,7 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer): else: d['can_copy'] = False d['can_edit'] = False - d['recent_jobs'] = [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in obj.jobs.filter(active=True).order_by('-created')[:10]] - d['labels'] = [{'id': x.id, 'name': x.name} for x in obj.labels.all().order_by('-name')[:10]] + d['recent_jobs'] = self._recent_jobs(obj) return d def validate(self, attrs): @@ -1668,11 +1678,6 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer): res['relaunch'] = reverse('api:job_relaunch', args=(obj.pk,)) return res - def get_summary_fields(self, obj): - d = super(JobSerializer, self).get_summary_fields(obj) - d['labels'] = [{'id': x.id, 'name': x.name} for x in obj.labels.all().order_by('-name')[:10]] - return d - def to_internal_value(self, data): # When creating a new job and a job template is specified, populate any # fields not provided in data from the job template. diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index 98b2c92781..64740dd7d5 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -265,14 +265,16 @@ def team(organization): def permission_inv_read(organization, inventory, team): return Permission.objects.create(inventory=inventory, team=team, permission_type=PERM_INVENTORY_READ) - @pytest.fixture -def job_template_labels(organization): +def job_template(organization): jt = JobTemplate(name='test-job_template') jt.save() - jt.labels.create(name="label-1", organization=organization) - jt.labels.create(name="label-2", organization=organization) - return jt +@pytest.fixture +def job_template_labels(organization, job_template): + job_template.labels.create(name="label-1", organization=organization) + job_template.labels.create(name="label-2", organization=organization) + + return job_template diff --git a/awx/main/tests/unit/api/test_generics.py b/awx/main/tests/unit/api/test_generics.py new file mode 100644 index 0000000000..42fe141c49 --- /dev/null +++ b/awx/main/tests/unit/api/test_generics.py @@ -0,0 +1,80 @@ + +# Python +import pytest + +# DRF +from rest_framework import status +from rest_framework.response import Response + +# AWX +from awx.api.generics import ParentMixin, SubListCreateAttachDetachAPIView + +@pytest.fixture +def get_object_or_404(mocker): + # pytest patch without return_value generates a random value, we are counting on this + return mocker.patch('awx.api.generics.get_object_or_404') + +@pytest.fixture +def get_object_or_400(mocker): + return mocker.patch('awx.api.generics.get_object_or_400') + +@pytest.fixture +def mock_response_new(mocker): + m = mocker.patch('awx.api.generics.Response.__new__') + m.return_value = m + return m + +@pytest.fixture +def parent_relationship_factory(mocker): + def rf(serializer_class, relationship_name, relationship_value=mocker.Mock()): + mock_parent_relationship = mocker.MagicMock(**{'%s.add.return_value' % relationship_name: relationship_value}) + mocker.patch('awx.api.generics.ParentMixin.get_parent_object', return_value=mock_parent_relationship) + + serializer = serializer_class() + [setattr(serializer, x, '') for x in ['relationship', 'model', 'parent_model']] + serializer.relationship = relationship_name + + return (serializer, mock_parent_relationship) + return rf + +# TODO: Test create and associate failure (i.e. id doesn't exist or record already exists) +# TODO: Mock and check return (Response) +class TestSubListCreateAttachDetachAPIView: + def test_attach_create_and_associate(self, mocker, get_object_or_400, parent_relationship_factory, mock_response_new): + (serializer, mock_parent_relationship) = parent_relationship_factory(SubListCreateAttachDetachAPIView, 'wife') + create_return_value = mocker.MagicMock(status_code=status.HTTP_201_CREATED) + serializer.create = mocker.Mock(return_value=create_return_value) + + mock_request = mocker.MagicMock(data=dict()) + ret = serializer.attach(mock_request, None, None) + + assert ret == mock_response_new + serializer.create.assert_called_with(mock_request, None, None) + mock_parent_relationship.wife.add.assert_called_with(get_object_or_400.return_value) + mock_response_new.assert_called_with(Response, create_return_value.data, status=status.HTTP_201_CREATED, headers={'Location': create_return_value['Location']}) + + def test_attach_associate_only(self, mocker, get_object_or_400, parent_relationship_factory, mock_response_new): + (serializer, mock_parent_relationship) = parent_relationship_factory(SubListCreateAttachDetachAPIView, 'wife') + serializer.create = mocker.Mock(return_value=mocker.MagicMock()) + + mock_request = mocker.MagicMock(data=dict(id=1)) + ret = serializer.attach(mock_request, None, None) + + assert ret == mock_response_new + serializer.create.assert_not_called() + mock_parent_relationship.wife.add.assert_called_with(get_object_or_400.return_value) + mock_response_new.assert_called_with(Response, status=status.HTTP_204_NO_CONTENT) + +class TestParentMixin: + def test_get_parent_object(self, mocker, get_object_or_404): + parent_mixin = ParentMixin() + parent_mixin.lookup_field = 'foo' + parent_mixin.kwargs = dict(foo='bar') + parent_mixin.parent_model = 'parent_model' + mock_parent_mixin = mocker.MagicMock(wraps=parent_mixin) + + return_value = mock_parent_mixin.get_parent_object() + + get_object_or_404.assert_called_with(parent_mixin.parent_model, **parent_mixin.kwargs) + assert get_object_or_404.return_value == return_value + diff --git a/awx/main/tests/unit/api/test_serializers.py b/awx/main/tests/unit/api/test_serializers.py new file mode 100644 index 0000000000..3cac6a34d8 --- /dev/null +++ b/awx/main/tests/unit/api/test_serializers.py @@ -0,0 +1,156 @@ +# Python +import pytest +import mock + +# AWX +from awx.api.serializers import JobTemplateSerializer, JobSerializer, JobOptionsSerializer +from awx.main.models import Label, Job + +@pytest.fixture +def job_template(mocker): + return mocker.MagicMock(pk=5) + +@pytest.fixture +def job(mocker, job_template): + return mocker.MagicMock(pk=5, job_template=job_template) + +@pytest.fixture +def labels(mocker): + return [Label(id=x, name='label-%d' % x) for x in xrange(0, 25)] + +@pytest.fixture +def jobs(mocker): + return [Job(id=x, name='job-%d' % x) for x in xrange(0, 25)] + +class GetRelatedMixin: + def _assert(self, model_obj, related, resource_name, related_resource_name): + assert related_resource_name in related + assert related[related_resource_name] == '/api/v1/%s/%d/%s/' % (resource_name, model_obj.pk, related_resource_name) + + def _mock_and_run(self, serializer_class, model_obj): + serializer = serializer_class() + related = serializer.get_related(model_obj) + return related + + def _test_get_related(self, serializer_class, model_obj, resource_name, related_resource_name): + related = self._mock_and_run(serializer_class, model_obj) + self._assert(model_obj, related, resource_name, related_resource_name) + return related + +class GetSummaryFieldsMixin: + def _assert(self, summary, summary_field_name): + assert summary_field_name in summary + + def _mock_and_run(self, serializer_class, model_obj): + serializer = serializer_class() + return serializer.get_summary_fields(model_obj) + + def _test_get_summary_fields(self, serializer_class, model_obj, summary_field_name): + summary = self._mock_and_run(serializer_class, model_obj) + self._assert(summary, summary_field_name) + return summary + +@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {}) +@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {}) +class TestJobTemplateSerializerGetRelated(GetRelatedMixin): + @pytest.mark.parametrize("related_resource_name", [ + 'jobs', + 'schedules', + 'activity_stream', + 'launch', + 'notifiers_any', + 'notifiers_success', + 'notifiers_error', + 'survey_spec', + 'labels', + 'callback', + ]) + def test_get_related(self, job_template, related_resource_name): + self._test_get_related(JobTemplateSerializer, job_template, 'job_templates', related_resource_name) + + def test_callback_absent(self, job_template): + job_template.host_config_key = None + related = self._mock_and_run(JobTemplateSerializer, job_template) + assert 'callback' not in related + +class TestJobTemplateSerializerGetSummaryFields(GetSummaryFieldsMixin): + def test__recent_jobs(self, mocker, job_template, jobs): + + job_template.jobs.filter = mocker.MagicMock(**{'order_by.return_value': jobs}) + job_template.jobs.filter.return_value = job_template.jobs.filter + + serializer = JobTemplateSerializer() + recent_jobs = serializer._recent_jobs(job_template) + + job_template.jobs.filter.assert_called_with(active=True) + job_template.jobs.filter.order_by.assert_called_with('-created') + assert len(recent_jobs) == 10 + for x in jobs[:10]: + assert recent_jobs == [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in jobs[:10]] + + def test_survey_spec_exists(self, mocker, job_template): + job_template.survey_spec = {'name': 'blah', 'description': 'blah blah'} + self._test_get_summary_fields(JobTemplateSerializer, job_template, 'survey') + + def test_survey_spec_absent(self, mocker, job_template): + job_template.survey_spec = None + summary = self._mock_and_run(JobTemplateSerializer, job_template) + assert 'survey' not in summary + + @pytest.mark.skip(reason="RBAC needs to land") + def test_can_copy_true(self, mocker, job_template): + pass + + @pytest.mark.skip(reason="RBAC needs to land") + def test_can_copy_false(self, mocker, job_template): + pass + + @pytest.mark.skip(reason="RBAC needs to land") + def test_can_edit_true(self, mocker, job_template): + pass + + @pytest.mark.skip(reason="RBAC needs to land") + def test_can_edit_false(self, mocker, job_template): + pass + +@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {}) +@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {}) +class TestJobSerializerGetRelated(GetRelatedMixin): + @pytest.mark.parametrize("related_resource_name", [ + 'job_events', + 'job_plays', + 'job_tasks', + 'relaunch', + 'labels', + ]) + def test_get_related(self, mocker, job, related_resource_name): + self._test_get_related(JobSerializer, job, 'jobs', related_resource_name) + + def test_job_template_present(self, job): + job.job_template.active = True + serializer = JobSerializer() + related = serializer.get_related(job) + assert 'job_template' in related + + def test_job_template_absent(self, job): + job.job_template.active = False + serializer = JobSerializer() + related = serializer.get_related(job) + assert 'job_template' not in related + +@mock.patch('awx.api.serializers.BaseSerializer.get_summary_fields', lambda x,y: {}) +class TestJobOptionsSerializerGetSummaryFields(GetSummaryFieldsMixin): + def test__summary_field_labels_10_max(self, mocker, job_template, labels): + job_template.labels.all = mocker.MagicMock(**{'order_by.return_value': labels}) + job_template.labels.all.return_value = job_template.labels.all + + serializer = JobOptionsSerializer() + summary_labels = serializer._summary_field_labels(job_template) + + job_template.labels.all.order_by.assert_called_with('-name') + assert len(summary_labels) == 10 + assert summary_labels == [{'id': x.id, 'name': x.name} for x in labels[:10]] + + def test_labels_exists(self, mocker, job_template): + self._test_get_summary_fields(JobOptionsSerializer, job_template, 'labels') + diff --git a/awx/main/tests/unit/api/test_views.py b/awx/main/tests/unit/api/test_views.py new file mode 100644 index 0000000000..6a7668c472 --- /dev/null +++ b/awx/main/tests/unit/api/test_views.py @@ -0,0 +1,52 @@ +# Python +import pytest + +# AWX +from awx.api.views import ApiV1RootView + +@pytest.fixture +def mock_response_new(mocker): + m = mocker.patch('awx.api.views.Response.__new__') + m.return_value = m + return m + +class TestApiV1RootView: + def test_get_endpoints(self, mocker, mock_response_new): + endpoints = [ + 'authtoken', + 'ping', + 'config', + 'settings', + 'me', + 'dashboard', + 'organizations', + 'users', + 'projects', + 'teams', + 'credentials', + 'inventory', + 'inventory_scripts', + 'inventory_sources', + 'groups', + 'hosts', + 'job_templates', + 'jobs', + 'ad_hoc_commands', + 'system_job_templates', + 'system_jobs', + 'schedules', + 'notifiers', + 'notifications', + 'labels', + 'unified_job_templates', + 'unified_jobs', + 'activity_stream', + ] + view = ApiV1RootView() + ret = view.get(mocker.MagicMock()) + + assert ret == mock_response_new + data_arg = mock_response_new.mock_calls[0][1][1] + for endpoint in endpoints: + assert endpoint in data_arg + From 4882bef180d73215c1bd96865a7923b9b5467242 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Fri, 1 Apr 2016 15:13:46 -0400 Subject: [PATCH 4/9] fix job templates endpoint rbacism update --- awx/api/serializers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 04e6f59134..b170199bdb 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -1631,7 +1631,7 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer): return res def _recent_jobs(self, obj): - return [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in obj.jobs.filter(active=True).order_by('-created')[:10]] + return [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in obj.jobs.all().order_by('-created')[:10]] def get_summary_fields(self, obj): d = super(JobTemplateSerializer, self).get_summary_fields(obj) From 28acc9516dc0daf04e6d992569f916c31a70effc Mon Sep 17 00:00:00 2001 From: Akita Noek Date: Fri, 1 Apr 2016 15:18:48 -0400 Subject: [PATCH 5/9] Hopefully fix ContentType problem hit during 2.4 -> 3.0 upgrade migration #1380 --- awx/main/migrations/_rbac.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/main/migrations/_rbac.py b/awx/main/migrations/_rbac.py index d6f202e75a..318c6d4667 100644 --- a/awx/main/migrations/_rbac.py +++ b/awx/main/migrations/_rbac.py @@ -1,6 +1,5 @@ import logging -from django.contrib.contenttypes.models import ContentType from django.utils.encoding import smart_text from django.db.models import Q @@ -31,6 +30,7 @@ def migrate_users(apps, schema_editor): User = apps.get_model('auth', "User") Role = apps.get_model('main', "Role") RolePermission = apps.get_model('main', "RolePermission") + ContentType = apps.get_model('contenttypes', "ContentType") for user in User.objects.iterator(): try: From cdcf81cffa68133aec0095aff1893bc5b6b46c93 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Mon, 4 Apr 2016 09:56:36 -0400 Subject: [PATCH 6/9] rbac active removal test cases fixes --- awx/main/tests/unit/api/test_serializers.py | 23 ++++++++++----------- 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/awx/main/tests/unit/api/test_serializers.py b/awx/main/tests/unit/api/test_serializers.py index 3cac6a34d8..50e6322d1d 100644 --- a/awx/main/tests/unit/api/test_serializers.py +++ b/awx/main/tests/unit/api/test_serializers.py @@ -76,14 +76,14 @@ class TestJobTemplateSerializerGetRelated(GetRelatedMixin): class TestJobTemplateSerializerGetSummaryFields(GetSummaryFieldsMixin): def test__recent_jobs(self, mocker, job_template, jobs): - job_template.jobs.filter = mocker.MagicMock(**{'order_by.return_value': jobs}) - job_template.jobs.filter.return_value = job_template.jobs.filter + job_template.jobs.all = mocker.MagicMock(**{'order_by.return_value': jobs}) + job_template.jobs.all.return_value = job_template.jobs.all serializer = JobTemplateSerializer() recent_jobs = serializer._recent_jobs(job_template) - job_template.jobs.filter.assert_called_with(active=True) - job_template.jobs.filter.order_by.assert_called_with('-created') + job_template.jobs.all.assert_called_once_with() + job_template.jobs.all.order_by.assert_called_once_with('-created') assert len(recent_jobs) == 10 for x in jobs[:10]: assert recent_jobs == [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in jobs[:10]] @@ -126,18 +126,17 @@ class TestJobSerializerGetRelated(GetRelatedMixin): def test_get_related(self, mocker, job, related_resource_name): self._test_get_related(JobSerializer, job, 'jobs', related_resource_name) - def test_job_template_present(self, job): - job.job_template.active = True - serializer = JobSerializer() - related = serializer.get_related(job) - assert 'job_template' in related - - def test_job_template_absent(self, job): - job.job_template.active = False + def test_job_template_absent(self, mocker, job): + job.job_template = None serializer = JobSerializer() related = serializer.get_related(job) assert 'job_template' not in related + def test_job_template_present(self, job): + related = self._mock_and_run(JobSerializer, job) + assert 'job_template' in related + assert related['job_template'] == '/api/v1/%s/%d/' % ('job_templates', job.job_template.pk) + @mock.patch('awx.api.serializers.BaseSerializer.get_summary_fields', lambda x,y: {}) class TestJobOptionsSerializerGetSummaryFields(GetSummaryFieldsMixin): def test__summary_field_labels_10_max(self, mocker, job_template, labels): From b6706662a4d29b9ac6e7559c36f3cf2303fba044 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Mon, 4 Apr 2016 09:59:06 -0400 Subject: [PATCH 7/9] Disable database settings until after 3.0 --- awx/api/urls.py | 3 ++- awx/api/views.py | 3 ++- awx/main/tests/old/settings.py | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/awx/api/urls.py b/awx/api/urls.py index f3b24c147a..e1fa5165d1 100644 --- a/awx/api/urls.py +++ b/awx/api/urls.py @@ -285,7 +285,8 @@ v1_urls = patterns('awx.api.views', url(r'^me/$', 'user_me_list'), url(r'^dashboard/$', 'dashboard_view'), url(r'^dashboard/graphs/jobs/$','dashboard_jobs_graph_view'), - url(r'^settings/', include(settings_urls)), + # TODO: Uncomment aftger 3.0 when we bring database settings endpoints back + # url(r'^settings/', include(settings_urls)), url(r'^schedules/', include(schedule_urls)), url(r'^organizations/', include(organization_urls)), url(r'^users/', include(user_urls)), diff --git a/awx/api/views.py b/awx/api/views.py index 0c406dc610..5daf08ac5c 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -112,7 +112,8 @@ class ApiV1RootView(APIView): data['authtoken'] = reverse('api:auth_token_view') data['ping'] = reverse('api:api_v1_ping_view') data['config'] = reverse('api:api_v1_config_view') - data['settings'] = reverse('api:settings_list') + # TODO: Uncomment after 3.0 when we bring database settings endpoints back + # data['settings'] = reverse('api:settings_list') data['me'] = reverse('api:user_me_list') data['dashboard'] = reverse('api:dashboard_view') data['organizations'] = reverse('api:organization_list') diff --git a/awx/main/tests/old/settings.py b/awx/main/tests/old/settings.py index f0d7cf63ac..d2ab4440c5 100644 --- a/awx/main/tests/old/settings.py +++ b/awx/main/tests/old/settings.py @@ -46,7 +46,7 @@ TEST_TOWER_SETTINGS_MANIFEST = { } @override_settings(TOWER_SETTINGS_MANIFEST=TEST_TOWER_SETTINGS_MANIFEST) -class SettingsTest(BaseTest): +class SettingsPlaceholder(BaseTest): def setUp(self): super(SettingsTest, self).setUp() From b01fe663affb9023f4e27bdd8e147e6a8ed59c96 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Mon, 4 Apr 2016 11:57:22 -0400 Subject: [PATCH 8/9] Fix up tests for vacated settings --- awx/main/tests/old/settings.py | 1 + awx/main/tests/unit/api/test_views.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/awx/main/tests/old/settings.py b/awx/main/tests/old/settings.py index d2ab4440c5..93ce2963a1 100644 --- a/awx/main/tests/old/settings.py +++ b/awx/main/tests/old/settings.py @@ -46,6 +46,7 @@ TEST_TOWER_SETTINGS_MANIFEST = { } @override_settings(TOWER_SETTINGS_MANIFEST=TEST_TOWER_SETTINGS_MANIFEST) +@pytest.mark.skip(reason="Settings deferred to 3.1") class SettingsPlaceholder(BaseTest): def setUp(self): diff --git a/awx/main/tests/unit/api/test_views.py b/awx/main/tests/unit/api/test_views.py index 6a7668c472..a5d63906c5 100644 --- a/awx/main/tests/unit/api/test_views.py +++ b/awx/main/tests/unit/api/test_views.py @@ -16,7 +16,7 @@ class TestApiV1RootView: 'authtoken', 'ping', 'config', - 'settings', + #'settings', 'me', 'dashboard', 'organizations', From 469bff1d0beaef478a62a5ff3b49e75545603fb4 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Mon, 4 Apr 2016 12:04:31 -0400 Subject: [PATCH 9/9] Fix required fields in notifier serializer Handle the situation where the password field was left as-is when posting the existing encrypted field --- awx/api/serializers.py | 8 ++++++-- awx/main/models/notifications.py | 3 ++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index b170199bdb..89fda89a55 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2139,6 +2139,10 @@ class NotifierSerializer(BaseSerializer): incorrect_type_fields = [] if 'notification_configuration' not in attrs: return attrs + if self.context['view'].kwargs: + object_actual = self.context['view'].get_object() + else: + object_actual = None for field in notification_class.init_parameters: if field not in attrs['notification_configuration']: missing_fields.append(field) @@ -2149,8 +2153,8 @@ class NotifierSerializer(BaseSerializer): if not type(field_val) in expected_types: incorrect_type_fields.append((field, field_type)) continue - if field_type == "password" and field_val.startswith('$encrypted$'): - missing_fields.append(field) + if field_type == "password" and field_val == "$encrypted$" and object_actual is not None: + attrs['notification_configuration'][field] = object_actual.notification_configuration[field] error_list = [] if missing_fields: error_list.append("Missing required fields for Notification Configuration: {}".format(missing_fields)) diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index 29a51cf9ac..40dcba67ac 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -68,6 +68,8 @@ class Notifier(CommonModel): update_fields = kwargs.get('update_fields', []) for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", self.notification_class.init_parameters): + if self.notification_configuration[field].startswith("$encrypted$"): + continue if new_instance: value = self.notification_configuration[field] setattr(self, '_saved_{}_{}'.format("config", field), value) @@ -84,7 +86,6 @@ class Notifier(CommonModel): self.notification_class.init_parameters): saved_value = getattr(self, '_saved_{}_{}'.format("config", field), '') self.notification_configuration[field] = saved_value - #setattr(self.notification_configuration, field, saved_value) if 'notification_configuration' not in update_fields: update_fields.append('notification_configuration') self.save(update_fields=update_fields)