mirror of
https://github.com/ansible/awx.git
synced 2026-01-11 10:00:01 -03:30
Merge branch 'release_3.0.0' into stable
* release_3.0.0: (2364 commits) remove aws ask at runtime prompt from cred form config, resolves #3055 (#3058) fix missing URI encoding in event summary serch, kickback on #2980 (#3050) Switch base class for StateConflict Fixed password show/hide on enter for survey maker password type previews Fixed password show/hide on enter for survey taker survey questions where type is password Prevent populate_user from being registered multiple times. Fixing iterator used when jobs list refreshes Explicit super user check for JT can_delete Fix for populating teams for LDAP user. Update hubspot template for marketting Fix up flake8 Rolled back the onExit solution previously implemented to handle the backspace navigation on the job launch modal. New solution listens for state changes within the directive and cleans itself up. Switch disallowed object delete to 409 Password enter show/hide fix add test for CustomInventoryScript serializer Fixed bug where hitting enter in a password field in the job launch/survey maker modal would toggle the show/hide. Setting the local var CredentialList to the deep clone seems to be problematic. Moving this out so that the original object itself is overwritten which is how it's done in other places. (#3017) Jobs list page size (#3019) resolves kickback on #2980 (#3008) Use the correct yum attribute name ...
This commit is contained in:
commit
3ab8a93461
@ -25,3 +25,5 @@ exclude_lines =
|
||||
|
||||
ignore_errors = True
|
||||
|
||||
[xml]
|
||||
output = ./reports/coverage.xml
|
||||
|
||||
25
.gitignore
vendored
25
.gitignore
vendored
@ -5,6 +5,7 @@
|
||||
# Tower
|
||||
awx/settings/local_settings.py*
|
||||
awx/*.sqlite3
|
||||
awx/*.sqlite3_*
|
||||
awx/job_status
|
||||
awx/projects
|
||||
awx/job_output
|
||||
@ -22,6 +23,10 @@ celerybeat-schedule
|
||||
awx/ui/static
|
||||
awx/ui/build_test
|
||||
|
||||
# Tower setup playbook testing
|
||||
setup/test/roles/postgresql
|
||||
**/provision_docker
|
||||
|
||||
# Python & setuptools
|
||||
__pycache__
|
||||
/build
|
||||
@ -30,7 +35,7 @@ __pycache__
|
||||
/tar-build
|
||||
/setup-bundle-build
|
||||
/dist
|
||||
*.egg-info
|
||||
/*.egg-info
|
||||
*.py[c,o]
|
||||
|
||||
# JavaScript
|
||||
@ -39,6 +44,8 @@ __pycache__
|
||||
/bower.json
|
||||
/package.json
|
||||
/testem.yml
|
||||
/coverage
|
||||
/.istanbul.yml
|
||||
node_modules/**
|
||||
/tmp
|
||||
npm-debug.log
|
||||
@ -47,6 +54,7 @@ npm-debug.log
|
||||
/DEBUG
|
||||
|
||||
# Testing
|
||||
.cache
|
||||
.coverage
|
||||
.tox
|
||||
coverage.xml
|
||||
@ -54,12 +62,13 @@ htmlcov
|
||||
pep8.txt
|
||||
scratch
|
||||
testem.log
|
||||
awx/awx_test.sqlite3-journal
|
||||
|
||||
# Mac OS X
|
||||
*.DS_Store
|
||||
|
||||
# Editors
|
||||
*.swp
|
||||
*.sw[poj]
|
||||
*~
|
||||
|
||||
# Vagrant
|
||||
@ -80,13 +89,23 @@ setup/setup.log
|
||||
setup/inventory
|
||||
tower-backup-*
|
||||
|
||||
# Ansible
|
||||
**/*.retry
|
||||
|
||||
# Other
|
||||
.tower_cycle
|
||||
env/*
|
||||
nohup.out
|
||||
reports
|
||||
*.bak
|
||||
*.bak[0-9]
|
||||
*.dot
|
||||
*.log
|
||||
*.log.[0-9]
|
||||
*.results
|
||||
local/
|
||||
|
||||
# AWX python libs populated by requirements.txt
|
||||
awx/lib/.deps_built
|
||||
awx/lib/site-packages
|
||||
|
||||
venv/*
|
||||
|
||||
31
ISSUE_TEMPLATE.md
Normal file
31
ISSUE_TEMPLATE.md
Normal file
@ -0,0 +1,31 @@
|
||||
### Summary
|
||||
|
||||
<!-- Briefly describe the problem. -->
|
||||
|
||||
### Environment
|
||||
|
||||
<!--
|
||||
* Tower version: X.Y.Z
|
||||
* Ansible version: X.Y.Z
|
||||
* Operating System:
|
||||
* Web Browser:
|
||||
-->
|
||||
|
||||
### Steps To Reproduce:
|
||||
|
||||
<!-- For bugs, please show exactly how to reproduce the problem. For new
|
||||
features, show how the feature would be used. -->
|
||||
|
||||
### Expected Results:
|
||||
|
||||
<!-- For bug reports, what did you expect to happen when running the steps
|
||||
above? -->
|
||||
|
||||
### Actual Results:
|
||||
|
||||
<!-- For bug reports, what actually happened? -->
|
||||
|
||||
### Additional Information:
|
||||
|
||||
<!-- Include any links to sosreport, database dumps, screenshots or other
|
||||
information. -->
|
||||
@ -1,5 +1,5 @@
|
||||
recursive-include awx *.py
|
||||
recursive-include awx/static *.ico
|
||||
recursive-include awx/static *
|
||||
recursive-include awx/templates *.html
|
||||
recursive-include awx/api/templates *.md *.html
|
||||
recursive-include awx/ui/templates *.html
|
||||
@ -17,7 +17,7 @@ recursive-exclude awx/settings local_settings.py*
|
||||
include tools/scripts/request_tower_configuration.sh
|
||||
include tools/scripts/request_tower_configuration.ps1
|
||||
include tools/scripts/ansible-tower-service
|
||||
include tools/munin_monitors/*
|
||||
include tools/scripts/tower-python
|
||||
include tools/sosreport/*
|
||||
include COPYING
|
||||
include Makefile
|
||||
|
||||
389
Makefile
389
Makefile
@ -6,16 +6,25 @@ PACKER ?= packer
|
||||
PACKER_BUILD_OPTS ?= -var 'official=$(OFFICIAL)' -var 'aw_repo_url=$(AW_REPO_URL)'
|
||||
GRUNT ?= $(shell [ -t 0 ] && echo "grunt" || echo "grunt --no-color")
|
||||
TESTEM ?= ./node_modules/.bin/testem
|
||||
TESTEM_DEBUG_BROWSER ?= Chrome
|
||||
BROCCOLI_BIN ?= ./node_modules/.bin/broccoli
|
||||
MOCHA_BIN ?= ./node_modules/.bin/mocha
|
||||
MOCHA_BIN ?= ./node_modules/.bin/_mocha
|
||||
ISTANBUL_BIN ?= ./node_modules/.bin/istanbul
|
||||
BROWSER_SYNC_BIN ?= ./node_modules/.bin/browser-sync
|
||||
NODE ?= node
|
||||
NPM_BIN ?= npm
|
||||
DEPS_SCRIPT ?= packaging/bundle/deps.py
|
||||
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
|
||||
VENV_BASE ?= /tower_devel/venv
|
||||
SCL_PREFIX ?=
|
||||
CELERY_SCHEDULE_FILE ?= /celerybeat-schedule
|
||||
|
||||
CLIENT_TEST_DIR ?= build_test
|
||||
|
||||
# Python packages to install only from source (not from binary wheels)
|
||||
# Comma separated list
|
||||
SRC_ONLY_PKGS ?= cffi
|
||||
|
||||
# Determine appropriate shasum command
|
||||
UNAME_S := $(shell uname -s)
|
||||
ifeq ($(UNAME_S),Linux)
|
||||
@ -106,6 +115,7 @@ MOCK_CFG ?=
|
||||
RPM_SPECDIR= packaging/rpm
|
||||
RPM_SPEC = $(RPM_SPECDIR)/$(NAME).spec
|
||||
RPM_DIST ?= $(shell rpm --eval '%{?dist}' 2>/dev/null)
|
||||
|
||||
# Provide a fallback value for RPM_DIST
|
||||
ifeq ($(RPM_DIST),)
|
||||
RPM_DIST = .el6
|
||||
@ -115,7 +125,17 @@ RPM_ARCH ?= $(shell rpm --eval '%{_arch}' 2>/dev/null)
|
||||
ifeq ($(RPM_ARCH),)
|
||||
RPM_ARCH = $(shell uname -m)
|
||||
endif
|
||||
RPM_NVR = $(NAME)-$(VERSION)-$(RELEASE)$(RPM_DIST)
|
||||
|
||||
# Software collections settings if on EL6
|
||||
ifeq ($(RPM_DIST),.el6)
|
||||
SCL_PREFIX = python27-
|
||||
SCL_DEFINES = --define 'scl python27'
|
||||
else
|
||||
SCL_PREFIX =
|
||||
SCL_DEFINES =
|
||||
endif
|
||||
|
||||
RPM_NVR = $(SCL_PREFIX)$(NAME)-$(VERSION)-$(RELEASE)$(RPM_DIST)
|
||||
|
||||
# TAR Bundle build parameters
|
||||
DIST = $(shell echo $(RPM_DIST) | sed -e 's|^\.\(el\)\([0-9]\).*|\1|')
|
||||
@ -152,12 +172,15 @@ endif
|
||||
|
||||
.DEFAULT_GOAL := build
|
||||
|
||||
.PHONY: clean rebase push requirements requirements_dev requirements_jenkins \
|
||||
real-requirements real-requirements_dev real-requirements_jenkins \
|
||||
develop refresh adduser syncdb migrate dbchange dbshell runserver celeryd \
|
||||
receiver test test_coverage coverage_html ui_analysis_report test_jenkins dev_build \
|
||||
.PHONY: clean clean-tmp rebase push requirements requirements_dev \
|
||||
requirements_jenkins \
|
||||
develop refresh adduser migrate dbchange dbshell runserver celeryd \
|
||||
receiver test test_unit test_coverage coverage_html test_jenkins dev_build \
|
||||
release_build release_clean sdist rpmtar mock-rpm mock-srpm rpm-sign \
|
||||
devjs minjs testjs testjs_ci node-tests browser-tests jshint ngdocs sync_ui \
|
||||
build-ui sync-ui test-ui build-ui-for-coverage test-ui-for-coverage \
|
||||
build-ui-for-browser-tests test-ui-debug jshint ngdocs \
|
||||
websocket-proxy browser-sync browser-sync-reload brocolli-watcher \
|
||||
devjs minjs testjs_ci \
|
||||
deb deb-src debian debsign pbuilder reprepro setup_tarball \
|
||||
virtualbox-ovf virtualbox-centos-7 virtualbox-centos-6 \
|
||||
clean-bundle setup_bundle_tarball
|
||||
@ -179,13 +202,6 @@ clean-grunt:
|
||||
rm -f package.json Gruntfile.js Brocfile.js bower.json
|
||||
rm -rf node_modules
|
||||
|
||||
# Remove UI build files
|
||||
clean-ui:
|
||||
rm -rf DEBUG
|
||||
rm -rf awx/ui/build_test
|
||||
rm -rf awx/ui/static/
|
||||
rm -rf awx/ui/dist
|
||||
|
||||
# Remove packer artifacts
|
||||
clean-packer:
|
||||
rm -rf packer_cache
|
||||
@ -200,8 +216,21 @@ clean-packer:
|
||||
clean-bundle:
|
||||
rm -rf setup-bundle-build
|
||||
|
||||
# remove ui build artifacts
|
||||
clean-ui:
|
||||
rm -rf DEBUG
|
||||
|
||||
clean-static:
|
||||
rm -rf awx/ui/static/
|
||||
|
||||
clean-build-test:
|
||||
rm -rf awx/ui/build_test/
|
||||
|
||||
clean-tmp:
|
||||
rm -rf tmp/
|
||||
|
||||
# Remove temporary build files, compiled Python files.
|
||||
clean: clean-rpm clean-deb clean-grunt clean-ui clean-tar clean-packer clean-bundle
|
||||
clean: clean-rpm clean-deb clean-grunt clean-ui clean-static clean-build-test clean-tar clean-packer clean-bundle
|
||||
rm -rf awx/lib/site-packages
|
||||
rm -rf awx/lib/.deps_built
|
||||
rm -rf dist/*
|
||||
@ -225,30 +254,71 @@ rebase:
|
||||
push:
|
||||
git push origin master
|
||||
|
||||
# Install runtime, development and jenkins requirements
|
||||
requirements requirements_dev requirements_jenkins: %: real-%
|
||||
virtualenv: virtualenv_ansible virtualenv_tower
|
||||
|
||||
# Install third-party requirements needed for development environment.
|
||||
# NOTE:
|
||||
# * --target is only supported on newer versions of pip
|
||||
# * https://github.com/pypa/pip/issues/3056 - the workaround is to override the `install-platlib`
|
||||
# * --user (in conjunction with PYTHONUSERBASE="awx" may be a better option
|
||||
# * --target implies --ignore-installed
|
||||
real-requirements:
|
||||
@if [ "$(PYTHON_VERSION)" = "2.6" ]; then \
|
||||
pip install -r requirements/requirements_python26.txt --target awx/lib/site-packages/ --install-option="--install-platlib=\$$base/lib/python"; \
|
||||
else \
|
||||
pip install -r requirements/requirements.txt --target awx/lib/site-packages/ --install-option="--install-platlib=\$$base/lib/python"; \
|
||||
virtualenv_ansible:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
if [ ! -d "$(VENV_BASE)" ]; then \
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/ansible" ]; then \
|
||||
virtualenv --system-site-packages --setuptools $(VENV_BASE)/ansible && \
|
||||
$(VENV_BASE)/ansible/bin/pip install -I setuptools==23.0.0 && \
|
||||
$(VENV_BASE)/ansible/bin/pip install -I pip==8.1.1; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
real-requirements_dev:
|
||||
pip install -r requirements/requirements_dev.txt --target awx/lib/site-packages/ --install-option="--install-platlib=\$$base/lib/python"
|
||||
virtualenv_tower:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
if [ ! -d "$(VENV_BASE)" ]; then \
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/tower" ]; then \
|
||||
virtualenv --system-site-packages --setuptools $(VENV_BASE)/tower && \
|
||||
$(VENV_BASE)/tower/bin/pip install -I setuptools==23.0.0 && \
|
||||
$(VENV_BASE)/tower/bin/pip install -I pip==8.1.1; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
requirements_ansible: virtualenv_ansible
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/ansible/bin/activate; \
|
||||
$(VENV_BASE)/ansible/bin/pip install --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements_ansible.txt ;\
|
||||
else \
|
||||
pip install --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements_ansible.txt ; \
|
||||
fi
|
||||
|
||||
# Install third-party requirements needed for Tower's environment.
|
||||
requirements_tower: virtualenv_tower
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
$(VENV_BASE)/tower/bin/pip install --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements.txt ;\
|
||||
else \
|
||||
pip install --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements.txt ; \
|
||||
fi
|
||||
|
||||
requirements_tower_dev:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
$(VENV_BASE)/tower/bin/pip install -r requirements/requirements_dev.txt; \
|
||||
fi
|
||||
|
||||
# Install third-party requirements needed for running unittests in jenkins
|
||||
real-requirements_jenkins:
|
||||
pip install -r requirements/requirements_jenkins.txt
|
||||
requirements_jenkins:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
$(VENV_BASE)/tower/bin/pip install -Ir requirements/requirements_jenkins.txt; \
|
||||
else \
|
||||
pip install -Ir requirements/requirements_jenkins..txt; \
|
||||
fi && \
|
||||
$(NPM_BIN) install csslint jshint
|
||||
|
||||
requirements: requirements_ansible requirements_tower
|
||||
|
||||
requirements_dev: requirements requirements_tower_dev
|
||||
|
||||
requirements_test: requirements requirements_jenkins
|
||||
|
||||
# "Install" ansible-tower package in development mode.
|
||||
develop:
|
||||
@if [ "$(VIRTUAL_ENV)" ]; then \
|
||||
@ -265,30 +335,28 @@ version_file:
|
||||
|
||||
# Do any one-time init tasks.
|
||||
init:
|
||||
@if [ "$(VIRTUAL_ENV)" ]; then \
|
||||
awx-manage register_instance --primary --hostname=127.0.0.1; \
|
||||
else \
|
||||
sudo awx-manage register_instance --primary --hostname=127.0.0.1; \
|
||||
fi
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
fi; \
|
||||
tower-manage register_instance --primary --hostname=127.0.0.1; \
|
||||
|
||||
# Refresh development environment after pulling new code.
|
||||
refresh: clean requirements_dev version_file develop migrate
|
||||
|
||||
# Create Django superuser.
|
||||
adduser:
|
||||
$(PYTHON) manage.py createsuperuser
|
||||
|
||||
# Create initial database tables (excluding migrations).
|
||||
syncdb:
|
||||
$(PYTHON) manage.py syncdb --noinput
|
||||
tower-manage createsuperuser
|
||||
|
||||
# Create database tables and apply any new migrations.
|
||||
migrate: syncdb
|
||||
$(PYTHON) manage.py migrate --noinput
|
||||
migrate:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
fi; \
|
||||
tower-manage migrate --noinput --fake-initial
|
||||
|
||||
# Run after making changes to the models to create a new migration.
|
||||
dbchange:
|
||||
$(PYTHON) manage.py schemamigration main v14_changes --auto
|
||||
tower-manage makemigrations
|
||||
|
||||
# access database shell, asks for password
|
||||
dbshell:
|
||||
@ -316,27 +384,48 @@ servercc: server_noattach
|
||||
# Alternate approach to tmux to run all development tasks specified in
|
||||
# Procfile. https://youtu.be/OPMgaibszjk
|
||||
honcho:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
fi; \
|
||||
honcho start
|
||||
|
||||
# Run the built-in development webserver (by default on http://localhost:8013).
|
||||
runserver:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py runserver
|
||||
|
||||
# Run to start the background celery worker for development.
|
||||
celeryd:
|
||||
$(PYTHON) manage.py celeryd -l DEBUG -B --autoscale=20,2 -Ofair
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py celeryd -l DEBUG -B --autoscale=20,2 -Ofair --schedule=$(CELERY_SCHEDULE_FILE)
|
||||
|
||||
# Run to start the zeromq callback receiver
|
||||
receiver:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py run_callback_receiver
|
||||
|
||||
taskmanager:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py run_task_system
|
||||
|
||||
socketservice:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py run_socketio_service
|
||||
|
||||
factcacher:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py run_fact_cache_receiver
|
||||
|
||||
reports:
|
||||
@ -356,13 +445,17 @@ pylint: reports
|
||||
|
||||
check: flake8 pep8 # pyflakes pylint
|
||||
|
||||
TEST_DIRS=awx/main/tests
|
||||
# Run all API unit tests.
|
||||
test:
|
||||
$(PYTHON) manage.py test -v2 awx.main.tests
|
||||
py.test $(TEST_DIRS)
|
||||
|
||||
test_unit:
|
||||
py.test awx/main/tests/unit
|
||||
|
||||
# Run all API unit tests with coverage enabled.
|
||||
test_coverage:
|
||||
coverage run manage.py test -v2 awx.main.tests
|
||||
py.test --create-db --cov=awx --cov-report=xml --junitxml=./reports/junit.xml $(TEST_DIRS)
|
||||
|
||||
# Output test coverage as HTML (into htmlcov directory).
|
||||
coverage_html:
|
||||
@ -373,12 +466,13 @@ test_tox:
|
||||
tox -v
|
||||
|
||||
# Run unit tests to produce output for Jenkins.
|
||||
test_jenkins:
|
||||
$(PYTHON) manage.py jenkins -v2 --enable-coverage --project-apps-tests
|
||||
# Alias existing make target so old versions run against Jekins the same way
|
||||
test_jenkins : test_coverage
|
||||
|
||||
# UI TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# begin targets that pull ui files from packaging to the root of the app
|
||||
Gruntfile.js: packaging/node/Gruntfile.js
|
||||
cp $< $@
|
||||
|
||||
@ -394,69 +488,155 @@ package.json: packaging/node/package.template
|
||||
testem.yml: packaging/node/testem.yml
|
||||
cp $< $@
|
||||
|
||||
# Update local npm install
|
||||
.istanbul.yml: packaging/node/.istanbul.yml
|
||||
cp $< $@
|
||||
# end targets that pull ui files from packaging to the root of the app
|
||||
|
||||
# update package.json and install npm dependencies
|
||||
node_modules: package.json
|
||||
$(NPM_BIN) install
|
||||
touch $@
|
||||
|
||||
awx/ui/%: node_modules clean-ui Brocfile.js bower.json
|
||||
$(BROCCOLI_BIN) build $@ -- $(UI_FLAGS)
|
||||
# helper tasks to run broccoli build process at awx/ui/<destination_dir>,
|
||||
# to build the ui, use the build-ui target instead:
|
||||
# UI_FLAGS=<flags as seen in Brocfile.js and
|
||||
# packaging/node/tower-app.js>: additional parameters to pass broccoli
|
||||
# for building
|
||||
awx/ui/static: node_modules clean-ui clean-static Brocfile.js bower.json
|
||||
$(BROCCOLI_BIN) build awx/ui/static -- $(UI_FLAGS)
|
||||
|
||||
# Concatenated, non-minified build; contains debug code and sourcemaps; does not include any tests
|
||||
devjs: awx/ui/static
|
||||
awx/ui/build_test: node_modules clean-ui clean-build-test Brocfile.js bower.json
|
||||
$(BROCCOLI_BIN) build awx/ui/build_test -- $(UI_FLAGS)
|
||||
|
||||
# Concatenated, minified, compressed (production) build with no sourcemaps or tests
|
||||
minjs: UI_FLAGS=--silent --compress --no-docs --no-debug --no-sourcemaps $(EXTRA_UI_FLAGS)
|
||||
minjs: awx/ui/static
|
||||
# build the ui to awx/ui/static:
|
||||
# defaults to standard dev build (concatenated, non-minified, sourcemaps, no
|
||||
# tests)
|
||||
# PROD=true: standard prod build (concatenated, minified, no sourcemaps,
|
||||
# compressed, no tests)
|
||||
# EXTRA_UI_FLAGS=<flags as seen in Brocfile.js and
|
||||
# packaging/node/tower-app.js>: additional parameters to pass broccoli
|
||||
# for building
|
||||
PROD ?= false
|
||||
|
||||
# Performs build to awx/ui/build_test and runs node tests via mocha
|
||||
testjs: UI_FLAGS=--node-tests --no-concat --no-styles $(EXTRA_UI_FLAGS)
|
||||
testjs: awx/ui/build_test node-tests
|
||||
# TODO: Remove after 2.4 (alias for devjs/minjs)
|
||||
devjs: build-ui
|
||||
minjs: build-ui
|
||||
ifeq ($(MAKECMDGOALS),minjs)
|
||||
PROD = true
|
||||
endif
|
||||
|
||||
# Performs nonminified, noncompressed build to awx/ui/static and runs browsers tests with testem ci
|
||||
testjs_ci: UI_FLAGS=--no-styles --no-compress --browser-tests --no-node-tests --no-sourcemaps $(EXTRA_UI_FLAGS)
|
||||
testjs_ci: awx/ui/static testem.yml browser-tests-ci
|
||||
ifeq ($(PROD),true)
|
||||
UI_FLAGS=--silent --compress --no-docs --no-debug --no-sourcemaps \
|
||||
$(EXTRA_UI_FLAGS)
|
||||
else
|
||||
UI_FLAGS=$(EXTRA_UI_FLAGS)
|
||||
endif
|
||||
|
||||
# Performs nonminified, noncompressed build to awx/ui/static and runs browsers tests with testem ci in Chrome
|
||||
testjs_debug: UI_FLAGS=--no-styles --no-compress --browser-tests --no-node-tests --no-sourcemaps $(EXTRA_UI_FLAGS)
|
||||
testjs_debug: awx/ui/static testem.yml browser-tests-debug
|
||||
build-ui: awx/ui/static
|
||||
|
||||
# Runs node tests via mocha without building
|
||||
node-tests:
|
||||
NODE_PATH=awx/ui/build_test $(MOCHA_BIN) --full-trace $(shell find awx/ui/build_test -name '*-test.js') $(MOCHA_FLAGS)
|
||||
# launch watcher to continuously build the ui to awx/ui/static and run tests
|
||||
# after changes are made:
|
||||
# WATCHER_FLAGS: options to be utilized by broccoli timepiece
|
||||
# UI_FLAGS=<flags as seen in Brocfile.js and
|
||||
# packaging/node/tower-app.js>: additional parameters to pass broccoli
|
||||
# for building
|
||||
# DOCKER_MACHINE_NAME=<name of docker-machine tower is running on>: when
|
||||
# passed, not only will brocolli rebuild, but browser-sync will proxy
|
||||
# proxy tower and refresh the ui when a change is made.
|
||||
DOCKER_MACHINE_NAME ?= none
|
||||
ifeq ($(DOCKER_MACHINE_NAME),none)
|
||||
sync-ui: node_modules clean-tmp brocolli-watcher
|
||||
else
|
||||
sync-ui: node_modules clean-tmp
|
||||
tmux new-session -d -s ui_sync 'exec make brocolli-watcher'
|
||||
tmux rename-window 'UI Sync'
|
||||
tmux select-window -t ui_sync:0
|
||||
tmux split-window -v 'exec make browser-sync'
|
||||
tmux split-window -h 'exec make websocket-proxy'
|
||||
tmux select-layout main-vertical
|
||||
tmux attach-session -t ui_sync
|
||||
endif
|
||||
|
||||
# Runs browser tests on PhantomJS. Outputs the results in a consumable manner for Jenkins.
|
||||
browser-tests-ci:
|
||||
PATH=./node_modules/.bin:$(PATH) $(TESTEM) ci --file testem.yml -p 7359 -R xunit
|
||||
websocket-proxy:
|
||||
docker-machine ssh $(DOCKER_MACHINE_NAME) -L 8080:localhost:8080
|
||||
|
||||
# Runs browser tests using settings from `testem.yml` you can pass in the browser you'd
|
||||
# like to run the tests on (Defaults to Chrome, other options Safari, Firefox, and PhantomJS).
|
||||
# If you want to run the tests in Node (which is the quickest, but also more difficult to debug),
|
||||
# make sure to run the testjs/node-tests targets
|
||||
browser-tests-debug:
|
||||
browser-sync:
|
||||
$(BROWSER_SYNC_BIN) start --proxy $(shell docker-machine ip $(DOCKER_MACHINE_NAME)):8013 --ws
|
||||
|
||||
browser-sync-reload:
|
||||
$(BROWSER_SYNC_BIN) reload
|
||||
|
||||
brocolli-watcher: Brocfile.js testem.yml
|
||||
$(NODE) tools/ui/timepiece.js awx/ui/static $(WATCHER_FLAGS) -- $(UI_FLAGS)
|
||||
|
||||
# run ui unit-tests:
|
||||
# defaults to a useful dev testing run. Builds the ui to awx/ui/build_test
|
||||
# and runs mocha (node.js) tests with istanbul coverage (and an html
|
||||
# coverage report)
|
||||
# UI_TESTS_TO_RUN=<file>-test.js: Set this to only run a specific test file
|
||||
# CI=true: Builds the ui to awx/ui/build_test
|
||||
# and runs mocha (node.js) tests with istanbul coverage (and a cobertura
|
||||
# coverage report). Also builds the ui to awx/ui/static and runs the
|
||||
# testem (phantomjs) tests. Outputs these to XUNIT format to be consumed
|
||||
# and displayed in jenkins
|
||||
# DEBUG=true: Builds the ui to awx/ui/static and runs testem tests in Chrome
|
||||
# so you can breakpoint the tests and underlying code to figure out why
|
||||
# tests are failing.
|
||||
# TESTEM_DEBUG_BROWSER: the browser to run tests in, default to Chrome
|
||||
|
||||
# TODO: deprecated past 2.4
|
||||
testjs_ci: test-ui # w var UI_TEST_MODE=CI
|
||||
|
||||
UI_TEST_MODE ?= DEV
|
||||
ifeq ($(UI_TEST_MODE),CI)
|
||||
# ci testing run
|
||||
# this used to be testjs_ci, sort-of
|
||||
REPORTER = xunit
|
||||
test-ui: .istanbul.yml build-ui-for-coverage test-ui-for-coverage
|
||||
else
|
||||
ifeq ($(UI_TEST_MODE),DEV_DEBUG)
|
||||
# debug (breakpoint) dev testing run
|
||||
test-ui: build-ui-for-browser-tests test-ui-debug
|
||||
else
|
||||
# default dev testing run
|
||||
test-ui: .istanbul.yml build-ui-for-coverage test-ui-for-coverage
|
||||
endif
|
||||
endif
|
||||
|
||||
# helper tasks to test ui, don't call directly
|
||||
build-ui-for-coverage: UI_FLAGS=--node-tests --no-concat --no-styles
|
||||
build-ui-for-coverage: awx/ui/build_test
|
||||
|
||||
REPORTER ?= standard
|
||||
UI_TESTS_TO_RUN ?= all
|
||||
ifeq ($(REPORTER), xunit)
|
||||
test-ui-for-coverage:
|
||||
XUNIT_FILE=reports/test-results-ui.xml NODE_PATH=awx/ui/build_test $(ISTANBUL_BIN) cover --include-all-sources $(MOCHA_BIN) -- --full-trace --reporter xunit-file $(shell find awx/ui/build_test -name '*-test.js'); cp coverage/ui-coverage-report.xml reports/coverage-report-ui.xml
|
||||
else
|
||||
ifeq ($(UI_TESTS_TO_RUN), all)
|
||||
test-ui-for-coverage:
|
||||
NODE_PATH=awx/ui/build_test $(ISTANBUL_BIN) cover --include-all-sources $(MOCHA_BIN) -- --full-trace $(shell find awx/ui/build_test -name '*-test.js')
|
||||
else
|
||||
test-ui-for-coverage:
|
||||
NODE_PATH=awx/ui/build_test $(ISTANBUL_BIN) cover $(MOCHA_BIN) -- --full-trace $(shell find awx/ui/build_test -name '$(UI_TESTS_TO_RUN)')
|
||||
endif
|
||||
endif
|
||||
|
||||
build-ui-for-browser-tests: UI_FLAGS=--no-styles --no-compress --browser-tests --no-node-tests
|
||||
build-ui-for-browser-tests: awx/ui/static
|
||||
|
||||
TESTEM_DEBUG_BROWSER ?= Chrome
|
||||
test-ui-debug:
|
||||
PATH=./node_modules/.bin:$(PATH) $(TESTEM) --file testem.yml -l $(TESTEM_DEBUG_BROWSER)
|
||||
|
||||
# Check .js files for errors and lint
|
||||
# lint .js files
|
||||
jshint: node_modules Gruntfile.js
|
||||
$(GRUNT) $@
|
||||
|
||||
# Generate UI code documentation
|
||||
ngdocs: devjs Gruntfile.js
|
||||
# generate ui docs
|
||||
ngdocs: build-ui Gruntfile.js
|
||||
$(GRUNT) $@
|
||||
|
||||
# Launch watcher for build process
|
||||
sync_ui: node_modules Brocfile.js testem.yml
|
||||
$(NODE) tools/ui/timepiece.js awx/ui/static $(WATCHER_FLAGS) -- $(UI_FLAGS)
|
||||
|
||||
# Build code complexity report for UI code
|
||||
ui_analysis_report: reports/ui_code node_modules Gruntfile.js
|
||||
$(GRUNT) plato:report
|
||||
|
||||
# Non-concatenated, non-minified build with no tests, no debug code, no sourcemaps for plato reports
|
||||
reports/ui_code: node_modules clean-ui Brocfile.js bower.json Gruntfile.js
|
||||
rm -rf reports/ui_code
|
||||
$(BROCCOLI_BIN) build reports/ui_code -- --no-concat --no-debug --no-styles --no-sourcemaps
|
||||
|
||||
# END UI TASKS
|
||||
# --------------------------------------
|
||||
|
||||
@ -474,7 +654,7 @@ tar-build/$(SETUP_TAR_FILE):
|
||||
@cp -a setup tar-build/$(SETUP_TAR_NAME)
|
||||
@rsync -az docs/licenses tar-build/$(SETUP_TAR_NAME)/
|
||||
@cd tar-build/$(SETUP_TAR_NAME) && sed -e 's#%NAME%#$(NAME)#;s#%VERSION%#$(VERSION)#;s#%RELEASE%#$(RELEASE)#;' group_vars/all.in > group_vars/all
|
||||
@cd tar-build && tar -czf $(SETUP_TAR_FILE) --exclude "*/all.in" $(SETUP_TAR_NAME)/
|
||||
@cd tar-build && tar -czf $(SETUP_TAR_FILE) --exclude "*/all.in" --exclude "**/test/*" $(SETUP_TAR_NAME)/
|
||||
@ln -sf $(SETUP_TAR_FILE) tar-build/$(SETUP_TAR_LINK)
|
||||
|
||||
tar-build/$(SETUP_TAR_CHECKSUM):
|
||||
@ -541,6 +721,7 @@ rpm-build/$(SDIST_TAR_FILE): rpm-build dist/$(SDIST_TAR_FILE)
|
||||
cp packaging/rpm/$(NAME).te rpm-build/
|
||||
cp packaging/rpm/$(NAME).sysconfig rpm-build/
|
||||
cp packaging/remove_tower_source.py rpm-build/
|
||||
cp packaging/bytecompile.sh rpm-build/
|
||||
if [ "$(OFFICIAL)" != "yes" ] ; then \
|
||||
(cd dist/ && tar zxf $(SDIST_TAR_FILE)) ; \
|
||||
(cd dist/ && mv $(NAME)-$(VERSION)-$(BUILD) $(NAME)-$(VERSION)) ; \
|
||||
@ -554,7 +735,7 @@ rpmtar: sdist rpm-build/$(SDIST_TAR_FILE)
|
||||
|
||||
rpm-build/$(RPM_NVR).src.rpm: /etc/mock/$(MOCK_CFG).cfg
|
||||
$(MOCK_BIN) -r $(MOCK_CFG) --resultdir rpm-build --buildsrpm --spec rpm-build/$(NAME).spec --sources rpm-build \
|
||||
--define "tower_version $(VERSION)" --define "tower_release $(RELEASE)"
|
||||
--define "tower_version $(VERSION)" --define "tower_release $(RELEASE)" $(SCL_DEFINES)
|
||||
|
||||
mock-srpm: rpmtar rpm-build/$(RPM_NVR).src.rpm
|
||||
@echo "#############################################"
|
||||
@ -564,7 +745,7 @@ mock-srpm: rpmtar rpm-build/$(RPM_NVR).src.rpm
|
||||
|
||||
rpm-build/$(RPM_NVR).$(RPM_ARCH).rpm: rpm-build/$(RPM_NVR).src.rpm
|
||||
$(MOCK_BIN) -r $(MOCK_CFG) --resultdir rpm-build --rebuild rpm-build/$(RPM_NVR).src.rpm \
|
||||
--define "tower_version $(VERSION)" --define "tower_release $(RELEASE)"
|
||||
--define "tower_version $(VERSION)" --define "tower_release $(RELEASE)" $(SCL_DEFINES)
|
||||
|
||||
mock-rpm: rpmtar rpm-build/$(RPM_NVR).$(RPM_ARCH).rpm
|
||||
@echo "#############################################"
|
||||
@ -696,9 +877,11 @@ packaging/packer/ansible-tower-$(VERSION)-vmx/ansible-tower-$(VERSION).vmx: pack
|
||||
# TODO - figure out how to build the front-end and python requirements with
|
||||
# 'build'
|
||||
build:
|
||||
export SCL_PREFIX
|
||||
$(PYTHON) setup.py build
|
||||
|
||||
install:
|
||||
export SCL_PREFIX HTTPD_SCL_PREFIX
|
||||
$(PYTHON) setup.py install $(SETUP_INSTALL_ARGS)
|
||||
|
||||
# Docker Compose Development environment
|
||||
@ -708,6 +891,16 @@ docker-compose:
|
||||
docker-compose-test:
|
||||
cd tools && docker-compose run --rm --service-ports tower /bin/bash
|
||||
|
||||
MACHINE?=default
|
||||
docker-clean:
|
||||
rm -f awx/lib/.deps_built
|
||||
eval $$(docker-machine env $(MACHINE))
|
||||
docker stop $$(docker ps -a -q)
|
||||
-docker rm $$(docker ps -f name=tools_tower -a -q)
|
||||
-docker rmi tools_tower
|
||||
|
||||
docker-refresh: docker-clean docker-compose
|
||||
|
||||
mongo-debug-ui:
|
||||
docker run -it --rm --name mongo-express --link tools_mongo_1:mongo -e ME_CONFIG_OPTIONS_EDITORTHEME=ambiance -e ME_CONFIG_BASICAUTH_USERNAME=admin -e ME_CONFIG_BASICAUTH_PASSWORD=password -p 8081:8081 knickers/mongo-express
|
||||
|
||||
|
||||
42
README.md
42
README.md
@ -1,48 +1,12 @@
|
||||
[](http://jenkins.testing.ansible.com/job/Test_Tower_Unittest)
|
||||
[](https://requires.io/github/ansible/ansible-tower/requirements/?branch=devel)
|
||||
|
||||
Ansible Tower
|
||||
=============
|
||||
|
||||
Tower provides a web-based user interface, REST API and task engine built on top of
|
||||
Ansible.
|
||||
|
||||
The current version under development is 2.2.1.
|
||||
|
||||
Development releases always use the 'master' branch.
|
||||
|
||||
Release History
|
||||
================
|
||||
|
||||
* 1.2.2, July 31, 2013.
|
||||
* 1.3.0, September 15, 2013.
|
||||
* 1.3.1, September 17, 2013.
|
||||
* 1.4.0, November 25, 2013.
|
||||
* 1.4.5, February 10, 2014.
|
||||
* 1.4.8, April 7, 2014.
|
||||
* 1.4.9, April 17, 2014.
|
||||
* 1.4.10, April 28, 2014.
|
||||
* 1.4.11, May 30, 2014.
|
||||
* 2.0.0, August 19, 2014
|
||||
* 2.0.1, September 4, 2014
|
||||
* 2.0.2, October 6, 2014
|
||||
* 2.0.3, November 14, 2014
|
||||
* 2.0.4, November 21, 2014
|
||||
* 2.0.5, December 10, 2014
|
||||
* 2.1.0, January 7, 2015
|
||||
* 2.1.1, February 4, 2015
|
||||
* 2.1.2, March 25, 2015
|
||||
* 2.1.3, April 15, 2015
|
||||
* 2.1.4, June 12, 2015
|
||||
* 2.1.5, June 15, 2015
|
||||
* 2.1.6, June 23, 2015
|
||||
* 2.2.0, July 14, 2015
|
||||
* 2.2.1, August 12, 2015
|
||||
* 2.2.2, August 19, 2015
|
||||
* 2.3.0, September 22, 2015
|
||||
* 2.3.1, October 2, 2015
|
||||
* 2.4.0, November 14, 2015
|
||||
|
||||
Any fixes should be applied on the appropriate release branch and be cherry-picked to
|
||||
master.
|
||||
|
||||
Resources
|
||||
---------
|
||||
|
||||
|
||||
@ -4,9 +4,8 @@
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
import site
|
||||
|
||||
__version__ = '2.4.5'
|
||||
__version__ = '3.0.0'
|
||||
|
||||
__all__ = ['__version__']
|
||||
|
||||
@ -37,18 +36,6 @@ def find_commands(management_dir):
|
||||
def prepare_env():
|
||||
# Update the default settings environment variable based on current mode.
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE)
|
||||
# Add local site-packages directory to path.
|
||||
local_site_packages = os.path.join(os.path.dirname(__file__), 'lib',
|
||||
'site-packages')
|
||||
site.addsitedir(local_site_packages)
|
||||
try:
|
||||
index = sys.path.index(local_site_packages)
|
||||
sys.path.pop(index)
|
||||
# Work around https://bugs.python.org/issue7744
|
||||
# by moving local_site_packages to the front of sys.path
|
||||
sys.path.insert(0, local_site_packages)
|
||||
except ValueError:
|
||||
pass
|
||||
# Hide DeprecationWarnings when running in production. Need to first load
|
||||
# settings to apply our filter after Django's own warnings filter.
|
||||
from django.conf import settings
|
||||
@ -75,10 +62,26 @@ def prepare_env():
|
||||
settings.DATABASES['default'][opt] = os.environ['AWX_TEST_DATABASE_%s' % opt]
|
||||
# Disable capturing all SQL queries in memory when in DEBUG mode.
|
||||
if settings.DEBUG and not getattr(settings, 'SQL_DEBUG', True):
|
||||
from django.db.backends import BaseDatabaseWrapper
|
||||
from django.db.backends.util import CursorWrapper
|
||||
from django.db.backends.base.base import BaseDatabaseWrapper
|
||||
from django.db.backends.utils import CursorWrapper
|
||||
BaseDatabaseWrapper.make_debug_cursor = lambda self, cursor: CursorWrapper(cursor, self)
|
||||
|
||||
# Use the default devserver addr/port defined in settings for runserver.
|
||||
default_addr = getattr(settings, 'DEVSERVER_DEFAULT_ADDR', '127.0.0.1')
|
||||
default_port = getattr(settings, 'DEVSERVER_DEFAULT_PORT', 8000)
|
||||
from django.core.management.commands import runserver as core_runserver
|
||||
original_handle = core_runserver.Command.handle
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if not options.get('addrport'):
|
||||
options['addrport'] = '%s:%d' % (default_addr, int(default_port))
|
||||
elif options.get('addrport').isdigit():
|
||||
options['addrport'] = '%s:%d' % (default_addr, int(options['addrport']))
|
||||
return original_handle(self, *args, **options)
|
||||
|
||||
core_runserver.Command.handle = handle
|
||||
|
||||
|
||||
def manage():
|
||||
# Prepare the AWX environment.
|
||||
prepare_env()
|
||||
|
||||
@ -3,10 +3,11 @@
|
||||
|
||||
# Python
|
||||
import urllib
|
||||
import logging
|
||||
|
||||
# Django
|
||||
from django.utils.timezone import now as tz_now
|
||||
from django.conf import settings
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework import authentication
|
||||
@ -15,7 +16,9 @@ from rest_framework import HTTP_HEADER_ENCODING
|
||||
|
||||
# AWX
|
||||
from awx.main.models import UnifiedJob, AuthToken
|
||||
from awx.main.conf import tower_settings
|
||||
|
||||
logger = logging.getLogger('awx.api.authentication')
|
||||
|
||||
class TokenAuthentication(authentication.TokenAuthentication):
|
||||
'''
|
||||
@ -90,11 +93,11 @@ class TokenAuthentication(authentication.TokenAuthentication):
|
||||
|
||||
# Token invalidated due to session limit config being reduced
|
||||
# Session limit reached invalidation will also take place on authentication
|
||||
if settings.AUTH_TOKEN_PER_USER != -1:
|
||||
if tower_settings.AUTH_TOKEN_PER_USER != -1:
|
||||
if not token.in_valid_tokens(now=now):
|
||||
token.invalidate(reason='limit_reached')
|
||||
raise exceptions.AuthenticationFailed(AuthToken.reason_long('limit_reached'))
|
||||
|
||||
|
||||
# If the user is inactive, then return an error.
|
||||
if not token.user.is_active:
|
||||
raise exceptions.AuthenticationFailed('User inactive or deleted')
|
||||
@ -117,6 +120,16 @@ class TokenGetAuthentication(TokenAuthentication):
|
||||
return super(TokenGetAuthentication, self).authenticate(request)
|
||||
|
||||
|
||||
class LoggedBasicAuthentication(authentication.BasicAuthentication):
|
||||
|
||||
def authenticate(self, request):
|
||||
ret = super(LoggedBasicAuthentication, self).authenticate(request)
|
||||
if ret:
|
||||
username = ret[0].username if ret[0] else '<none>'
|
||||
logger.debug(smart_text(u"User {} performed a {} to {} through the API".format(username, request.method, request.path)))
|
||||
return ret
|
||||
|
||||
|
||||
class TaskAuthentication(authentication.BaseAuthentication):
|
||||
'''
|
||||
Custom authentication used for views accessed by the inventory and callback
|
||||
|
||||
89
awx/api/fields.py
Normal file
89
awx/api/fields.py
Normal file
@ -0,0 +1,89 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Django
|
||||
from django.utils.encoding import force_text
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework import serializers
|
||||
|
||||
__all__ = ['BooleanNullField', 'CharNullField', 'ChoiceNullField', 'EncryptedPasswordField', 'VerbatimField']
|
||||
|
||||
|
||||
class NullFieldMixin(object):
|
||||
'''
|
||||
Mixin to prevent shortcutting validation when we want to allow null input,
|
||||
but coerce the resulting value to another type.
|
||||
'''
|
||||
|
||||
def validate_empty_values(self, data):
|
||||
(is_empty_value, data) = super(NullFieldMixin, self).validate_empty_values(data)
|
||||
if is_empty_value and data is None:
|
||||
return (False, data)
|
||||
return (is_empty_value, data)
|
||||
|
||||
|
||||
class BooleanNullField(NullFieldMixin, serializers.NullBooleanField):
|
||||
'''
|
||||
Custom boolean field that allows null and empty string as False values.
|
||||
'''
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return bool(super(BooleanNullField, self).to_internal_value(data))
|
||||
|
||||
|
||||
class CharNullField(NullFieldMixin, serializers.CharField):
|
||||
'''
|
||||
Custom char field that allows null as input and coerces to an empty string.
|
||||
'''
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs['allow_null'] = True
|
||||
super(CharNullField, self).__init__(**kwargs)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return super(CharNullField, self).to_internal_value(data or u'')
|
||||
|
||||
|
||||
class ChoiceNullField(NullFieldMixin, serializers.ChoiceField):
|
||||
'''
|
||||
Custom choice field that allows null as input and coerces to an empty string.
|
||||
'''
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs['allow_null'] = True
|
||||
super(ChoiceNullField, self).__init__(**kwargs)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return super(ChoiceNullField, self).to_internal_value(data or u'')
|
||||
|
||||
|
||||
class EncryptedPasswordField(CharNullField):
|
||||
'''
|
||||
Custom field to handle encrypted password values (on credentials).
|
||||
'''
|
||||
|
||||
def to_internal_value(self, data):
|
||||
value = super(EncryptedPasswordField, self).to_internal_value(data or u'')
|
||||
# If user submits a value starting with $encrypted$, ignore it.
|
||||
if force_text(value).startswith('$encrypted$'):
|
||||
raise serializers.SkipField
|
||||
return value
|
||||
|
||||
def to_representation(self, value):
|
||||
# Replace the actual encrypted value with the string $encrypted$.
|
||||
if force_text(value).startswith('$encrypted$'):
|
||||
return '$encrypted$'
|
||||
return value
|
||||
|
||||
|
||||
class VerbatimField(serializers.Field):
|
||||
'''
|
||||
Custom field that passes the value through without changes.
|
||||
'''
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return data
|
||||
|
||||
def to_representation(self, value):
|
||||
return value
|
||||
@ -8,9 +8,10 @@ import re
|
||||
from django.core.exceptions import FieldError, ValidationError
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.db.models.related import RelatedObject
|
||||
from django.db.models.fields import FieldDoesNotExist
|
||||
from django.db.models.fields.related import ForeignObjectRel
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.encoding import force_text
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import ParseError
|
||||
@ -25,19 +26,6 @@ class MongoFilterBackend(BaseFilterBackend):
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
return queryset
|
||||
|
||||
class ActiveOnlyBackend(BaseFilterBackend):
|
||||
'''
|
||||
Filter to show only objects where is_active/active is True.
|
||||
'''
|
||||
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
for field in queryset.model._meta.fields:
|
||||
if field.name == 'is_active':
|
||||
queryset = queryset.filter(is_active=True)
|
||||
elif field.name == 'active':
|
||||
queryset = queryset.filter(active=True)
|
||||
return queryset
|
||||
|
||||
class TypeFilterBackend(BaseFilterBackend):
|
||||
'''
|
||||
Filter on type field now returned with all objects.
|
||||
@ -46,7 +34,7 @@ class TypeFilterBackend(BaseFilterBackend):
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
try:
|
||||
types = None
|
||||
for key, value in request.QUERY_PARAMS.items():
|
||||
for key, value in request.query_params.items():
|
||||
if key == 'type':
|
||||
if ',' in value:
|
||||
types = value.split(',')
|
||||
@ -70,7 +58,7 @@ class TypeFilterBackend(BaseFilterBackend):
|
||||
else:
|
||||
queryset = queryset.none()
|
||||
return queryset
|
||||
except FieldError, e:
|
||||
except FieldError as e:
|
||||
# Return a 400 for invalid field names.
|
||||
raise ParseError(*e.args)
|
||||
|
||||
@ -107,23 +95,21 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
'last_updated': 'last_job_run',
|
||||
}.get(name, name)
|
||||
|
||||
new_parts.append(name)
|
||||
|
||||
if name == 'pk':
|
||||
field = model._meta.pk
|
||||
else:
|
||||
field = model._meta.get_field_by_name(name)[0]
|
||||
if n < (len(parts) - 2):
|
||||
if getattr(field, 'rel', None):
|
||||
model = field.rel.to
|
||||
else:
|
||||
model = field.model
|
||||
new_parts.append(name)
|
||||
model = getattr(field, 'related_model', None) or field.model
|
||||
|
||||
if parts:
|
||||
new_parts.append(parts[-1])
|
||||
new_lookup = '__'.join(new_parts)
|
||||
return field, new_lookup
|
||||
|
||||
def to_python_related(self, value):
|
||||
value = unicode(value)
|
||||
value = force_text(value)
|
||||
if value.lower() in ('none', 'null'):
|
||||
return None
|
||||
else:
|
||||
@ -134,7 +120,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
return to_python_boolean(value, allow_none=True)
|
||||
elif isinstance(field, models.BooleanField):
|
||||
return to_python_boolean(value)
|
||||
elif isinstance(field, RelatedObject):
|
||||
elif isinstance(field, ForeignObjectRel):
|
||||
return self.to_python_related(value)
|
||||
else:
|
||||
return field.to_python(value)
|
||||
@ -145,13 +131,15 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
value = to_python_boolean(value)
|
||||
elif new_lookup.endswith('__in'):
|
||||
items = []
|
||||
if not value:
|
||||
raise ValueError('cannot provide empty value for __in')
|
||||
for item in value.split(','):
|
||||
items.append(self.value_to_python_for_field(field, item))
|
||||
value = items
|
||||
elif new_lookup.endswith('__regex') or new_lookup.endswith('__iregex'):
|
||||
try:
|
||||
re.compile(value)
|
||||
except re.error, e:
|
||||
except re.error as e:
|
||||
raise ValueError(e.args[0])
|
||||
else:
|
||||
value = self.value_to_python_for_field(field, value)
|
||||
@ -159,20 +147,20 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
try:
|
||||
# Apply filters specified via QUERY_PARAMS. Each entry in the lists
|
||||
# Apply filters specified via query_params. Each entry in the lists
|
||||
# below is (negate, field, value).
|
||||
and_filters = []
|
||||
or_filters = []
|
||||
chain_filters = []
|
||||
for key, values in request.QUERY_PARAMS.lists():
|
||||
for key, values in request.query_params.lists():
|
||||
if key in self.RESERVED_NAMES:
|
||||
continue
|
||||
|
||||
|
||||
# HACK: Make job event filtering by host name mostly work even
|
||||
# when not capturing job event hosts M2M.
|
||||
if queryset.model._meta.object_name == 'JobEvent' and key.startswith('hosts__name'):
|
||||
key = key.replace('hosts__name', 'or__host__name')
|
||||
or_filters.append((False, 'host__name__isnull', True))
|
||||
or_filters.append((False, 'host__name__isnull', True))
|
||||
|
||||
# Custom __int filter suffix (internal use only).
|
||||
q_int = False
|
||||
@ -231,11 +219,11 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
else:
|
||||
q = Q(**{k:v})
|
||||
queryset = queryset.filter(q)
|
||||
queryset = queryset.filter(*args)
|
||||
return queryset.distinct()
|
||||
except (FieldError, FieldDoesNotExist, ValueError), e:
|
||||
queryset = queryset.filter(*args).distinct()
|
||||
return queryset
|
||||
except (FieldError, FieldDoesNotExist, ValueError) as e:
|
||||
raise ParseError(e.args[0])
|
||||
except ValidationError, e:
|
||||
except ValidationError as e:
|
||||
raise ParseError(e.messages)
|
||||
|
||||
class OrderByBackend(BaseFilterBackend):
|
||||
@ -246,7 +234,7 @@ class OrderByBackend(BaseFilterBackend):
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
try:
|
||||
order_by = None
|
||||
for key, value in request.QUERY_PARAMS.items():
|
||||
for key, value in request.query_params.items():
|
||||
if key in ('order', 'order_by'):
|
||||
order_by = value
|
||||
if ',' in value:
|
||||
@ -273,6 +261,6 @@ class OrderByBackend(BaseFilterBackend):
|
||||
new_order_by.append(field)
|
||||
queryset = queryset.order_by(*new_order_by)
|
||||
return queryset
|
||||
except FieldError, e:
|
||||
except FieldError as e:
|
||||
# Return a 400 for invalid field names.
|
||||
raise ParseError(*e.args)
|
||||
|
||||
@ -7,32 +7,37 @@ import logging
|
||||
import time
|
||||
|
||||
# Django
|
||||
from django.http import Http404
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
from django.http import QueryDict
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.authentication import get_authorization_header
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework import generics
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.request import clone_request
|
||||
from rest_framework import status
|
||||
from rest_framework import views
|
||||
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.utils import * # noqa
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer
|
||||
|
||||
__all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
|
||||
'ListCreateAPIView', 'SubListAPIView', 'SubListCreateAPIView',
|
||||
'SubListCreateAttachDetachAPIView', 'RetrieveAPIView',
|
||||
'RetrieveUpdateAPIView', 'RetrieveDestroyAPIView',
|
||||
'RetrieveUpdateDestroyAPIView', 'DestroyAPIView',
|
||||
'MongoAPIView', 'MongoListAPIView']
|
||||
'SubDetailAPIView',
|
||||
'ResourceAccessList',
|
||||
'ParentMixin',
|
||||
'DeleteLastUnattachLabelMixin',]
|
||||
|
||||
logger = logging.getLogger('awx.api.generics')
|
||||
|
||||
@ -144,6 +149,7 @@ class APIView(views.APIView):
|
||||
'new_in_220': getattr(self, 'new_in_220', False),
|
||||
'new_in_230': getattr(self, 'new_in_230', False),
|
||||
'new_in_240': getattr(self, 'new_in_240', False),
|
||||
'new_in_300': getattr(self, 'new_in_300', False),
|
||||
}
|
||||
|
||||
def get_description(self, html=False):
|
||||
@ -154,18 +160,22 @@ class APIView(views.APIView):
|
||||
context = self.get_description_context()
|
||||
return render_to_string(template_list, context)
|
||||
|
||||
def metadata(self, request):
|
||||
'''
|
||||
Add version number where view was added to Tower.
|
||||
'''
|
||||
ret = super(APIView, self).metadata(request)
|
||||
added_in_version = '1.2'
|
||||
for version in ('2.4.0', '2.3.0', '2.2.0', '2.1.0', '2.0.0', '1.4.8', '1.4.5', '1.4', '1.3'):
|
||||
if getattr(self, 'new_in_%s' % version.replace('.', ''), False):
|
||||
added_in_version = version
|
||||
break
|
||||
ret['added_in_version'] = added_in_version
|
||||
return ret
|
||||
def update_raw_data(self, data):
|
||||
# Remove the parent key if the view is a sublist, since it will be set
|
||||
# automatically.
|
||||
parent_key = getattr(self, 'parent_key', None)
|
||||
if parent_key:
|
||||
data.pop(parent_key, None)
|
||||
|
||||
# Use request data as-is when original request is an update and the
|
||||
# submitted data was rejected.
|
||||
request_method = getattr(self, '_raw_data_request_method', None)
|
||||
response_status = getattr(self, '_raw_data_response_status', 0)
|
||||
if request_method in ('POST', 'PUT', 'PATCH') and response_status in xrange(400, 500):
|
||||
return self.request.data.copy()
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
# Base class for all model-based views.
|
||||
@ -177,18 +187,25 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
def get_serializer(self, *args, **kwargs):
|
||||
serializer = super(GenericAPIView, self).get_serializer(*args, **kwargs)
|
||||
# Override when called from browsable API to generate raw data form;
|
||||
# always remove read only fields from sample raw data.
|
||||
# update serializer "validated" data to be displayed by the raw data
|
||||
# form.
|
||||
if hasattr(self, '_raw_data_form_marker'):
|
||||
# Always remove read only fields from serializer.
|
||||
for name, field in serializer.fields.items():
|
||||
if getattr(field, 'read_only', None):
|
||||
del serializer.fields[name]
|
||||
serializer._data = self.update_raw_data(serializer.data)
|
||||
return serializer
|
||||
|
||||
def get_queryset(self):
|
||||
#if hasattr(self.request.user, 'get_queryset'):
|
||||
# return self.request.user.get_queryset(self.model)
|
||||
#else:
|
||||
return super(GenericAPIView, self).get_queryset()
|
||||
if self.queryset is not None:
|
||||
return self.queryset._clone()
|
||||
elif self.model is not None:
|
||||
return self.model._default_manager.all()
|
||||
else:
|
||||
return super(GenericAPIView, self).get_queryset()
|
||||
|
||||
def get_description_context(self):
|
||||
# Set instance attributes needed to get serializer metadata.
|
||||
@ -200,91 +217,13 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
if hasattr(self.model, "_meta"):
|
||||
if hasattr(self.model._meta, "verbose_name"):
|
||||
d.update({
|
||||
'model_verbose_name': unicode(self.model._meta.verbose_name),
|
||||
'model_verbose_name_plural': unicode(self.model._meta.verbose_name_plural),
|
||||
'model_verbose_name': smart_text(self.model._meta.verbose_name),
|
||||
'model_verbose_name_plural': smart_text(self.model._meta.verbose_name_plural),
|
||||
})
|
||||
d.update({'serializer_fields': self.get_serializer().metadata()})
|
||||
d['serializer_fields'] = self.metadata_class().get_serializer_info(self.get_serializer())
|
||||
d['settings'] = settings
|
||||
return d
|
||||
|
||||
def metadata(self, request):
|
||||
'''
|
||||
Add field information for GET requests (so field names/labels are
|
||||
available even when we can't POST/PUT).
|
||||
'''
|
||||
ret = super(GenericAPIView, self).metadata(request)
|
||||
actions = ret.get('actions', {})
|
||||
# Remove read only fields from PUT/POST data.
|
||||
for method in ('POST', 'PUT'):
|
||||
fields = actions.get(method, {})
|
||||
for field, meta in fields.items():
|
||||
if not isinstance(meta, dict):
|
||||
continue
|
||||
if meta.pop('read_only', False):
|
||||
fields.pop(field)
|
||||
if 'GET' in self.allowed_methods:
|
||||
cloned_request = clone_request(request, 'GET')
|
||||
try:
|
||||
# Test global permissions
|
||||
self.check_permissions(cloned_request)
|
||||
# Test object permissions
|
||||
if hasattr(self, 'retrieve'):
|
||||
try:
|
||||
self.get_object()
|
||||
except Http404:
|
||||
# Http404 should be acceptable and the serializer
|
||||
# metadata should be populated. Except this so the
|
||||
# outer "else" clause of the try-except-else block
|
||||
# will be executed.
|
||||
pass
|
||||
except (exceptions.APIException, PermissionDenied):
|
||||
pass
|
||||
else:
|
||||
# If user has appropriate permissions for the view, include
|
||||
# appropriate metadata about the fields that should be supplied.
|
||||
serializer = self.get_serializer()
|
||||
actions['GET'] = serializer.metadata()
|
||||
if hasattr(serializer, 'get_types'):
|
||||
ret['types'] = serializer.get_types()
|
||||
# Remove fields labeled as write_only, remove field attributes
|
||||
# that aren't relevant for retrieving data.
|
||||
for field, meta in actions['GET'].items():
|
||||
if not isinstance(meta, dict):
|
||||
continue
|
||||
meta.pop('required', None)
|
||||
meta.pop('read_only', None)
|
||||
meta.pop('default', None)
|
||||
meta.pop('min_length', None)
|
||||
meta.pop('max_length', None)
|
||||
if meta.pop('write_only', False):
|
||||
actions['GET'].pop(field)
|
||||
if actions:
|
||||
ret['actions'] = actions
|
||||
if getattr(self, 'search_fields', None):
|
||||
ret['search_fields'] = self.search_fields
|
||||
return ret
|
||||
|
||||
class MongoAPIView(GenericAPIView):
|
||||
|
||||
def get_parent_object(self):
|
||||
parent_filter = {
|
||||
self.lookup_field: self.kwargs.get(self.lookup_field, None),
|
||||
}
|
||||
return get_object_or_404(self.parent_model, **parent_filter)
|
||||
|
||||
def check_parent_access(self, parent=None):
|
||||
parent = parent or self.get_parent_object()
|
||||
parent_access = getattr(self, 'parent_access', 'read')
|
||||
if parent_access in ('read', 'delete'):
|
||||
args = (self.parent_model, parent_access, parent)
|
||||
else:
|
||||
args = (self.parent_model, parent_access, parent, None)
|
||||
if not self.request.user.can_access(*args):
|
||||
raise PermissionDenied()
|
||||
|
||||
class MongoListAPIView(generics.ListAPIView, MongoAPIView):
|
||||
pass
|
||||
|
||||
class SimpleListAPIView(generics.ListAPIView, GenericAPIView):
|
||||
|
||||
def get_queryset(self):
|
||||
@ -321,25 +260,7 @@ class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):
|
||||
# Base class for a list view that allows creating new objects.
|
||||
pass
|
||||
|
||||
class SubListAPIView(ListAPIView):
|
||||
# Base class for a read-only sublist view.
|
||||
|
||||
# Subclasses should define at least:
|
||||
# model = ModelClass
|
||||
# serializer_class = SerializerClass
|
||||
# parent_model = ModelClass
|
||||
# relationship = 'rel_name_from_parent_to_model'
|
||||
# And optionally (user must have given access permission on parent object
|
||||
# to view sublist):
|
||||
# parent_access = 'read'
|
||||
|
||||
def get_description_context(self):
|
||||
d = super(SubListAPIView, self).get_description_context()
|
||||
d.update({
|
||||
'parent_model_verbose_name': unicode(self.parent_model._meta.verbose_name),
|
||||
'parent_model_verbose_name_plural': unicode(self.parent_model._meta.verbose_name_plural),
|
||||
})
|
||||
return d
|
||||
class ParentMixin(object):
|
||||
|
||||
def get_parent_object(self):
|
||||
parent_filter = {
|
||||
@ -357,11 +278,31 @@ class SubListAPIView(ListAPIView):
|
||||
if not self.request.user.can_access(*args):
|
||||
raise PermissionDenied()
|
||||
|
||||
class SubListAPIView(ListAPIView, ParentMixin):
|
||||
# Base class for a read-only sublist view.
|
||||
|
||||
# Subclasses should define at least:
|
||||
# model = ModelClass
|
||||
# serializer_class = SerializerClass
|
||||
# parent_model = ModelClass
|
||||
# relationship = 'rel_name_from_parent_to_model'
|
||||
# And optionally (user must have given access permission on parent object
|
||||
# to view sublist):
|
||||
# parent_access = 'read'
|
||||
|
||||
def get_description_context(self):
|
||||
d = super(SubListAPIView, self).get_description_context()
|
||||
d.update({
|
||||
'parent_model_verbose_name': smart_text(self.parent_model._meta.verbose_name),
|
||||
'parent_model_verbose_name_plural': smart_text(self.parent_model._meta.verbose_name_plural),
|
||||
})
|
||||
return d
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
qs = self.request.user.get_queryset(self.model).distinct()
|
||||
sublist_qs = getattr(parent, self.relationship).distinct()
|
||||
sublist_qs = getattrd(parent, self.relationship).distinct()
|
||||
return qs & sublist_qs
|
||||
|
||||
class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
||||
@ -387,10 +328,11 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
||||
|
||||
# Make a copy of the data provided (since it's readonly) in order to
|
||||
# inject additional data.
|
||||
if hasattr(request.DATA, 'dict'):
|
||||
data = request.DATA.dict()
|
||||
if hasattr(request.data, 'copy'):
|
||||
data = request.data.copy()
|
||||
else:
|
||||
data = request.DATA
|
||||
data = QueryDict('')
|
||||
data.update(request.data)
|
||||
|
||||
# add the parent key to the post data using the pk from the URL
|
||||
parent_key = getattr(self, 'parent_key', None)
|
||||
@ -404,14 +346,14 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Verify we have permission to add the object as given.
|
||||
if not request.user.can_access(self.model, 'add', serializer.init_data):
|
||||
if not request.user.can_access(self.model, 'add', serializer.initial_data):
|
||||
raise PermissionDenied()
|
||||
|
||||
# save the object through the serializer, reload and returned the saved
|
||||
# object deserialized
|
||||
obj = serializer.save()
|
||||
serializer = self.get_serializer(instance=obj)
|
||||
|
||||
|
||||
headers = {'Location': obj.get_absolute_url()}
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
@ -419,12 +361,19 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
# Base class for a sublist view that allows for creating subobjects and
|
||||
# attaching/detaching them from the parent.
|
||||
|
||||
def get_description_context(self):
|
||||
d = super(SubListCreateAttachDetachAPIView, self).get_description_context()
|
||||
d.update({
|
||||
"has_attach": True,
|
||||
})
|
||||
return d
|
||||
|
||||
def attach(self, request, *args, **kwargs):
|
||||
created = False
|
||||
parent = self.get_parent_object()
|
||||
relationship = getattr(parent, self.relationship)
|
||||
sub_id = request.DATA.get('id', None)
|
||||
data = request.DATA
|
||||
relationship = getattrd(parent, self.relationship)
|
||||
sub_id = request.data.get('id', None)
|
||||
data = request.data
|
||||
|
||||
# Create the sub object if an ID is not provided.
|
||||
if not sub_id:
|
||||
@ -441,7 +390,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
|
||||
# Retrive the sub object (whether created or by ID).
|
||||
sub = get_object_or_400(self.model, pk=sub_id)
|
||||
|
||||
|
||||
# Verify we have permission to attach.
|
||||
if not request.user.can_access(self.parent_model, 'attach', parent, sub,
|
||||
self.relationship, data,
|
||||
@ -460,51 +409,82 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
else:
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def unattach(self, request, *args, **kwargs):
|
||||
sub_id = request.DATA.get('id', None)
|
||||
def unattach_validate(self, request):
|
||||
sub_id = request.data.get('id', None)
|
||||
res = None
|
||||
if not sub_id:
|
||||
data = dict(msg='"id" is required to disassociate')
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
res = Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
return (sub_id, res)
|
||||
|
||||
def unattach_by_id(self, request, sub_id):
|
||||
parent = self.get_parent_object()
|
||||
parent_key = getattr(self, 'parent_key', None)
|
||||
relationship = getattr(parent, self.relationship)
|
||||
relationship = getattrd(parent, self.relationship)
|
||||
sub = get_object_or_400(self.model, pk=sub_id)
|
||||
|
||||
if not request.user.can_access(self.parent_model, 'unattach', parent,
|
||||
sub, self.relationship):
|
||||
sub, self.relationship, request.data):
|
||||
raise PermissionDenied()
|
||||
|
||||
if parent_key:
|
||||
# sub object has a ForeignKey to the parent, so we can't remove it
|
||||
# from the set, only mark it as inactive.
|
||||
sub.mark_inactive()
|
||||
sub.delete()
|
||||
else:
|
||||
relationship.remove(sub)
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def unattach(self, request, *args, **kwargs):
|
||||
(sub_id, res) = self.unattach_validate(request)
|
||||
if res:
|
||||
return res
|
||||
return self.unattach_by_id(request, sub_id)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
if not isinstance(request.DATA, dict):
|
||||
if not isinstance(request.data, dict):
|
||||
return Response('invalid type for post data',
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
if 'disassociate' in request.DATA:
|
||||
if 'disassociate' in request.data:
|
||||
return self.unattach(request, *args, **kwargs)
|
||||
else:
|
||||
return self.attach(request, *args, **kwargs)
|
||||
|
||||
'''
|
||||
Models for which you want the last instance to be deleted from the database
|
||||
when the last disassociate is called should inherit from this class. Further,
|
||||
the model should implement is_detached()
|
||||
'''
|
||||
class DeleteLastUnattachLabelMixin(object):
|
||||
def unattach(self, request, *args, **kwargs):
|
||||
(sub_id, res) = super(DeleteLastUnattachLabelMixin, self).unattach_validate(request)
|
||||
if res:
|
||||
return res
|
||||
|
||||
res = super(DeleteLastUnattachLabelMixin, self).unattach_by_id(request, sub_id)
|
||||
|
||||
obj = self.model.objects.get(id=sub_id)
|
||||
|
||||
if obj.is_detached():
|
||||
obj.delete()
|
||||
|
||||
return res
|
||||
|
||||
class SubDetailAPIView(generics.RetrieveAPIView, GenericAPIView, ParentMixin):
|
||||
pass
|
||||
|
||||
class RetrieveAPIView(generics.RetrieveAPIView, GenericAPIView):
|
||||
pass
|
||||
|
||||
class RetrieveUpdateAPIView(RetrieveAPIView, generics.RetrieveUpdateAPIView):
|
||||
|
||||
def pre_save(self, obj):
|
||||
super(RetrieveUpdateAPIView, self).pre_save(obj)
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
self.update_filter(request, *args, **kwargs)
|
||||
return super(RetrieveUpdateAPIView, self).update(request, *args, **kwargs)
|
||||
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
self.update_filter(request, *args, **kwargs)
|
||||
return super(RetrieveUpdateAPIView, self).partial_update(request, *args, **kwargs)
|
||||
|
||||
def update_filter(self, request, *args, **kwargs):
|
||||
''' scrub any fields the user cannot/should not put/patch, based on user context. This runs after read-only serialization filtering '''
|
||||
pass
|
||||
@ -514,17 +494,9 @@ class RetrieveDestroyAPIView(RetrieveAPIView, generics.RetrieveDestroyAPIView):
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
# somewhat lame that delete has to call it's own permissions check
|
||||
obj = self.get_object()
|
||||
# FIXME: Why isn't the active check being caught earlier by RBAC?
|
||||
if not getattr(obj, 'active', True):
|
||||
raise Http404()
|
||||
if not getattr(obj, 'is_active', True):
|
||||
raise Http404()
|
||||
if not request.user.can_access(self.model, 'delete', obj):
|
||||
raise PermissionDenied()
|
||||
if hasattr(obj, 'mark_inactive'):
|
||||
obj.mark_inactive()
|
||||
else:
|
||||
raise NotImplementedError('destroy() not implemented yet for %s' % obj)
|
||||
obj.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, RetrieveDestroyAPIView):
|
||||
@ -532,3 +504,21 @@ class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, RetrieveDestroyAPIView
|
||||
|
||||
class DestroyAPIView(GenericAPIView, generics.DestroyAPIView):
|
||||
pass
|
||||
|
||||
|
||||
class ResourceAccessList(ListAPIView):
|
||||
|
||||
serializer_class = ResourceAccessListElementSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
self.object_id = self.kwargs['pk']
|
||||
resource_model = getattr(self, 'resource_model')
|
||||
obj = get_object_or_404(resource_model, pk=self.object_id)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
roles = set(Role.objects.filter(content_type=content_type, object_id=obj.id))
|
||||
|
||||
ancestors = set()
|
||||
for r in roles:
|
||||
ancestors.update(set(r.ancestors.all()))
|
||||
return User.objects.filter(roles__in=list(ancestors)).distinct()
|
||||
|
||||
@ -11,19 +11,21 @@ class LicenseForbids(APIException):
|
||||
default_detail = 'Your Tower license does not allow that.'
|
||||
|
||||
|
||||
def get_license(show_key=False):
|
||||
def get_license(show_key=False, bypass_database=False):
|
||||
"""Return a dictionary representing the license currently in
|
||||
place on this Tower instance.
|
||||
"""
|
||||
license_reader = TaskSerializer()
|
||||
return license_reader.from_file(show_key=show_key)
|
||||
if bypass_database:
|
||||
return license_reader.from_file(show_key=show_key)
|
||||
return license_reader.from_database(show_key=show_key)
|
||||
|
||||
|
||||
def feature_enabled(name):
|
||||
def feature_enabled(name, bypass_database=False):
|
||||
"""Return True if the requested feature is enabled, False otherwise.
|
||||
If the feature does not exist, raise KeyError.
|
||||
"""
|
||||
license = get_license()
|
||||
license = get_license(bypass_database=bypass_database)
|
||||
|
||||
# Sanity check: If there is no license, the feature is considered
|
||||
# to be off.
|
||||
|
||||
@ -28,7 +28,7 @@ class Command(BaseCommand):
|
||||
def handle(self, *args, **kwargs):
|
||||
# Get the license data.
|
||||
license_reader = TaskSerializer()
|
||||
license_data = license_reader.from_file()
|
||||
license_data = license_reader.from_database()
|
||||
|
||||
# Does the license have features, at all?
|
||||
# If there is no license yet, then all features are clearly off.
|
||||
|
||||
190
awx/api/metadata.py
Normal file
190
awx/api/metadata.py
Normal file
@ -0,0 +1,190 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.http import Http404
|
||||
from django.utils.encoding import force_text, smart_text
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework import exceptions
|
||||
from rest_framework import metadata
|
||||
from rest_framework import serializers
|
||||
from rest_framework.relations import RelatedField
|
||||
from rest_framework.request import clone_request
|
||||
|
||||
# Ansible Tower
|
||||
from awx.main.models import InventorySource, NotificationTemplate
|
||||
|
||||
|
||||
class Metadata(metadata.SimpleMetadata):
|
||||
|
||||
def get_field_info(self, field):
|
||||
field_info = OrderedDict()
|
||||
field_info['type'] = self.label_lookup[field]
|
||||
field_info['required'] = getattr(field, 'required', False)
|
||||
|
||||
text_attrs = [
|
||||
'read_only', 'label', 'help_text',
|
||||
'min_length', 'max_length',
|
||||
'min_value', 'max_value'
|
||||
]
|
||||
|
||||
for attr in text_attrs:
|
||||
value = getattr(field, attr, None)
|
||||
if value is not None and value != '':
|
||||
field_info[attr] = force_text(value, strings_only=True)
|
||||
|
||||
# Update help text for common fields.
|
||||
serializer = getattr(field, 'parent', None)
|
||||
if serializer:
|
||||
field_help_text = {
|
||||
'id': 'Database ID for this {}.',
|
||||
'name': 'Name of this {}.',
|
||||
'description': 'Optional description of this {}.',
|
||||
'type': 'Data type for this {}.',
|
||||
'url': 'URL for this {}.',
|
||||
'related': 'Data structure with URLs of related resources.',
|
||||
'summary_fields': 'Data structure with name/description for related resources.',
|
||||
'created': 'Timestamp when this {} was created.',
|
||||
'modified': 'Timestamp when this {} was last modified.',
|
||||
}
|
||||
if field.field_name in field_help_text:
|
||||
opts = serializer.Meta.model._meta.concrete_model._meta
|
||||
verbose_name = smart_text(opts.verbose_name)
|
||||
field_info['help_text'] = field_help_text[field.field_name].format(verbose_name)
|
||||
|
||||
# Indicate if a field has a default value.
|
||||
# FIXME: Still isn't showing all default values?
|
||||
try:
|
||||
field_info['default'] = field.get_default()
|
||||
except serializers.SkipField:
|
||||
pass
|
||||
|
||||
if getattr(field, 'child', None):
|
||||
field_info['child'] = self.get_field_info(field.child)
|
||||
elif getattr(field, 'fields', None):
|
||||
field_info['children'] = self.get_serializer_info(field)
|
||||
|
||||
if hasattr(field, 'choices') and not isinstance(field, RelatedField):
|
||||
field_info['choices'] = [(choice_value, choice_name) for choice_value, choice_name in field.choices.items()]
|
||||
|
||||
# Indicate if a field is write-only.
|
||||
if getattr(field, 'write_only', False):
|
||||
field_info['write_only'] = True
|
||||
|
||||
# Special handling of inventory source_region choices that vary based on
|
||||
# selected inventory source.
|
||||
if field.field_name == 'source_regions':
|
||||
for cp in ('azure', 'ec2', 'gce', 'rax'):
|
||||
get_regions = getattr(InventorySource, 'get_%s_region_choices' % cp)
|
||||
field_info['%s_region_choices' % cp] = get_regions()
|
||||
|
||||
# Special handling of group_by choices for EC2.
|
||||
if field.field_name == 'group_by':
|
||||
for cp in ('ec2',):
|
||||
get_group_by_choices = getattr(InventorySource, 'get_%s_group_by_choices' % cp)
|
||||
field_info['%s_group_by_choices' % cp] = get_group_by_choices()
|
||||
|
||||
# Special handling of notification configuration where the required properties
|
||||
# are conditional on the type selected.
|
||||
if field.field_name == 'notification_configuration':
|
||||
for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
|
||||
field_info[notification_type_name] = notification_type_class.init_parameters
|
||||
|
||||
# Update type of fields returned...
|
||||
if field.field_name == 'type':
|
||||
field_info['type'] = 'choice'
|
||||
elif field.field_name == 'url':
|
||||
field_info['type'] = 'string'
|
||||
elif field.field_name in ('related', 'summary_fields'):
|
||||
field_info['type'] = 'object'
|
||||
elif field.field_name in ('created', 'modified'):
|
||||
field_info['type'] = 'datetime'
|
||||
|
||||
return field_info
|
||||
|
||||
def determine_actions(self, request, view):
|
||||
# Add field information for GET requests (so field names/labels are
|
||||
# available even when we can't POST/PUT).
|
||||
actions = {}
|
||||
for method in {'GET', 'PUT', 'POST'} & set(view.allowed_methods):
|
||||
view.request = clone_request(request, method)
|
||||
try:
|
||||
# Test global permissions
|
||||
if hasattr(view, 'check_permissions'):
|
||||
view.check_permissions(view.request)
|
||||
# Test object permissions
|
||||
if method == 'PUT' and hasattr(view, 'get_object'):
|
||||
view.get_object()
|
||||
except (exceptions.APIException, PermissionDenied, Http404):
|
||||
continue
|
||||
else:
|
||||
# If user has appropriate permissions for the view, include
|
||||
# appropriate metadata about the fields that should be supplied.
|
||||
serializer = view.get_serializer()
|
||||
actions[method] = self.get_serializer_info(serializer)
|
||||
finally:
|
||||
view.request = request
|
||||
|
||||
for field, meta in actions[method].items():
|
||||
if not isinstance(meta, dict):
|
||||
continue
|
||||
|
||||
# Add type choices if available from the serializer.
|
||||
if field == 'type' and hasattr(serializer, 'get_type_choices'):
|
||||
meta['choices'] = serializer.get_type_choices()
|
||||
|
||||
# For GET method, remove meta attributes that aren't relevant
|
||||
# when reading a field and remove write-only fields.
|
||||
if method == 'GET':
|
||||
meta.pop('required', None)
|
||||
meta.pop('read_only', None)
|
||||
meta.pop('default', None)
|
||||
meta.pop('min_length', None)
|
||||
meta.pop('max_length', None)
|
||||
if meta.pop('write_only', False):
|
||||
actions['GET'].pop(field)
|
||||
|
||||
# For PUT/POST methods, remove read-only fields.
|
||||
if method in ('PUT', 'POST'):
|
||||
if meta.pop('read_only', False):
|
||||
actions[method].pop(field)
|
||||
|
||||
return actions
|
||||
|
||||
def determine_metadata(self, request, view):
|
||||
metadata = super(Metadata, self).determine_metadata(request, view)
|
||||
|
||||
# Add version number in which view was added to Tower.
|
||||
added_in_version = '1.2'
|
||||
for version in ('3.0.0', '2.4.0', '2.3.0', '2.2.0', '2.1.0', '2.0.0', '1.4.8', '1.4.5', '1.4', '1.3'):
|
||||
if getattr(view, 'new_in_%s' % version.replace('.', ''), False):
|
||||
added_in_version = version
|
||||
break
|
||||
metadata['added_in_version'] = added_in_version
|
||||
|
||||
# Add type(s) handled by this view/serializer.
|
||||
if hasattr(view, 'get_serializer'):
|
||||
serializer = view.get_serializer()
|
||||
if hasattr(serializer, 'get_types'):
|
||||
metadata['types'] = serializer.get_types()
|
||||
|
||||
# Add search fields if available from the view.
|
||||
if getattr(view, 'search_fields', None):
|
||||
metadata['search_fields'] = view.search_fields
|
||||
|
||||
return metadata
|
||||
|
||||
class RoleMetadata(Metadata):
|
||||
def determine_metadata(self, request, view):
|
||||
metadata = super(RoleMetadata, self).determine_metadata(request, view)
|
||||
if 'actions' in metadata:
|
||||
metadata['actions'].pop('POST')
|
||||
metadata['actions']['POST'] = {
|
||||
"id": {"type": "integer", "label": "ID", "help_text": "Database ID for this role."},
|
||||
"disassociate": {"type": "integer", "label": "Disassociate", "help_text": "Provide to remove this role."},
|
||||
}
|
||||
return metadata
|
||||
@ -2,36 +2,24 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework import serializers, pagination
|
||||
from rest_framework.templatetags.rest_framework import replace_query_param
|
||||
from rest_framework import pagination
|
||||
from rest_framework.utils.urls import replace_query_param
|
||||
|
||||
class NextPageField(pagination.NextPageField):
|
||||
'''Pagination field to output URL path.'''
|
||||
|
||||
def to_native(self, value):
|
||||
if not value.has_next():
|
||||
class Pagination(pagination.PageNumberPagination):
|
||||
|
||||
page_size_query_param = 'page_size'
|
||||
|
||||
def get_next_link(self):
|
||||
if not self.page.has_next():
|
||||
return None
|
||||
page = value.next_page_number()
|
||||
request = self.context.get('request')
|
||||
url = request and request.get_full_path() or ''
|
||||
return replace_query_param(url, self.page_field, page)
|
||||
url = self.request and self.request.get_full_path() or ''
|
||||
page_number = self.page.next_page_number()
|
||||
return replace_query_param(url, self.page_query_param, page_number)
|
||||
|
||||
class PreviousPageField(pagination.NextPageField):
|
||||
'''Pagination field to output URL path.'''
|
||||
|
||||
def to_native(self, value):
|
||||
if not value.has_previous():
|
||||
def get_previous_link(self):
|
||||
if not self.page.has_previous():
|
||||
return None
|
||||
page = value.previous_page_number()
|
||||
request = self.context.get('request')
|
||||
url = request and request.get_full_path() or ''
|
||||
return replace_query_param(url, self.page_field, page)
|
||||
|
||||
class PaginationSerializer(pagination.BasePaginationSerializer):
|
||||
'''
|
||||
Custom pagination serializer to output only URL path (without host/port).
|
||||
'''
|
||||
|
||||
count = serializers.Field(source='paginator.count')
|
||||
next = NextPageField(source='*')
|
||||
previous = PreviousPageField(source='*')
|
||||
url = self.request and self.request.get_full_path() or ''
|
||||
page_number = self.page.previous_page_number()
|
||||
return replace_query_param(url, self.page_query_param, page_number)
|
||||
|
||||
30
awx/api/parsers.py
Normal file
30
awx/api/parsers.py
Normal file
@ -0,0 +1,30 @@
|
||||
# Python
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils import six
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework import parsers
|
||||
from rest_framework.exceptions import ParseError
|
||||
|
||||
|
||||
class JSONParser(parsers.JSONParser):
|
||||
"""
|
||||
Parses JSON-serialized data, preserving order of dictionary keys.
|
||||
"""
|
||||
|
||||
def parse(self, stream, media_type=None, parser_context=None):
|
||||
"""
|
||||
Parses the incoming bytestream as JSON and returns the resulting data.
|
||||
"""
|
||||
parser_context = parser_context or {}
|
||||
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
|
||||
|
||||
try:
|
||||
data = stream.read().decode(encoding)
|
||||
return json.loads(data, object_pairs_hook=OrderedDict)
|
||||
except ValueError as exc:
|
||||
raise ParseError('JSON parse error - %s' % six.text_type(exc))
|
||||
@ -19,7 +19,7 @@ from awx.main.utils import get_object_or_400
|
||||
logger = logging.getLogger('awx.api.permissions')
|
||||
|
||||
__all__ = ['ModelAccessPermission', 'JobTemplateCallbackPermission',
|
||||
'TaskPermission']
|
||||
'TaskPermission', 'ProjectUpdatePermission']
|
||||
|
||||
class ModelAccessPermission(permissions.BasePermission):
|
||||
'''
|
||||
@ -61,7 +61,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
else:
|
||||
if obj:
|
||||
return True
|
||||
return check_user_access(request.user, view.model, 'add', request.DATA)
|
||||
return check_user_access(request.user, view.model, 'add', request.data)
|
||||
|
||||
def check_put_permissions(self, request, view, obj=None):
|
||||
if not obj:
|
||||
@ -70,10 +70,10 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
return True
|
||||
if getattr(view, 'is_variable_data', False):
|
||||
return check_user_access(request.user, view.model, 'change', obj,
|
||||
dict(variables=request.DATA))
|
||||
dict(variables=request.data))
|
||||
else:
|
||||
return check_user_access(request.user, view.model, 'change', obj,
|
||||
request.DATA)
|
||||
request.data)
|
||||
|
||||
def check_patch_permissions(self, request, view, obj=None):
|
||||
return self.check_put_permissions(request, view, obj)
|
||||
@ -103,11 +103,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
if not request.user or request.user.is_anonymous():
|
||||
return False
|
||||
|
||||
# Don't allow inactive users (and respond with a 403).
|
||||
if not request.user.is_active:
|
||||
raise PermissionDenied('your account is inactive')
|
||||
|
||||
# Always allow superusers (as long as they are active).
|
||||
# Always allow superusers
|
||||
if getattr(view, 'always_allow_superuser', True) and request.user.is_superuser:
|
||||
return True
|
||||
|
||||
@ -127,11 +123,11 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
|
||||
def has_permission(self, request, view, obj=None):
|
||||
logger.debug('has_permission(user=%s method=%s data=%r, %s, %r)',
|
||||
request.user, request.method, request.DATA,
|
||||
request.user, request.method, request.data,
|
||||
view.__class__.__name__, obj)
|
||||
try:
|
||||
response = self.check_permissions(request, view, obj)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.debug('has_permission raised %r', e, exc_info=True)
|
||||
raise
|
||||
else:
|
||||
@ -156,13 +152,11 @@ class JobTemplateCallbackPermission(ModelAccessPermission):
|
||||
# Require method to be POST, host_config_key to be specified and match
|
||||
# the requested job template, and require the job template to be
|
||||
# active in order to proceed.
|
||||
host_config_key = request.DATA.get('host_config_key', '')
|
||||
host_config_key = request.data.get('host_config_key', '')
|
||||
if request.method.lower() != 'post':
|
||||
raise PermissionDenied()
|
||||
elif not host_config_key:
|
||||
raise PermissionDenied()
|
||||
elif obj and not obj.active:
|
||||
raise PermissionDenied()
|
||||
elif obj and obj.host_config_key != host_config_key:
|
||||
raise PermissionDenied()
|
||||
else:
|
||||
@ -182,7 +176,7 @@ class TaskPermission(ModelAccessPermission):
|
||||
# Verify that the ID present in the auth token is for a valid, active
|
||||
# unified job.
|
||||
try:
|
||||
unified_job = UnifiedJob.objects.get(active=True, status='running',
|
||||
unified_job = UnifiedJob.objects.get(status='running',
|
||||
pk=int(request.auth.split('-')[0]))
|
||||
except (UnifiedJob.DoesNotExist, TypeError):
|
||||
return False
|
||||
@ -196,3 +190,15 @@ class TaskPermission(ModelAccessPermission):
|
||||
return bool(not obj or obj.pk == unified_job.pk)
|
||||
else:
|
||||
return False
|
||||
|
||||
class ProjectUpdatePermission(ModelAccessPermission):
|
||||
'''
|
||||
Permission check used by ProjectUpdateView to determine who can update projects
|
||||
'''
|
||||
def check_get_permissions(self, request, view, obj=None):
|
||||
project = get_object_or_400(view.model, pk=view.kwargs['pk'])
|
||||
return check_user_access(request.user, view.model, 'read', project)
|
||||
|
||||
def check_post_permissions(self, request, view, obj=None):
|
||||
project = get_object_or_400(view.model, pk=view.kwargs['pk'])
|
||||
return check_user_access(request.user, view.model, 'start', project)
|
||||
|
||||
@ -4,6 +4,7 @@
|
||||
# Django REST Framework
|
||||
from rest_framework import renderers
|
||||
|
||||
|
||||
class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
'''
|
||||
Customizations to the default browsable API renderer.
|
||||
@ -16,24 +17,39 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
return renderers.JSONRenderer()
|
||||
return renderer
|
||||
|
||||
def get_raw_data_form(self, view, method, request):
|
||||
def get_context(self, data, accepted_media_type, renderer_context):
|
||||
# Store the associated response status to know how to populate the raw
|
||||
# data form.
|
||||
try:
|
||||
setattr(renderer_context['view'], '_raw_data_response_status', renderer_context['response'].status_code)
|
||||
return super(BrowsableAPIRenderer, self).get_context(data, accepted_media_type, renderer_context)
|
||||
finally:
|
||||
delattr(renderer_context['view'], '_raw_data_response_status')
|
||||
|
||||
def get_raw_data_form(self, data, view, method, request):
|
||||
# Set a flag on the view to indiciate to the view/serializer that we're
|
||||
# creating a raw data form for the browsable API. Store the original
|
||||
# request method to determine how to populate the raw data form.
|
||||
try:
|
||||
setattr(view, '_raw_data_form_marker', True)
|
||||
return super(BrowsableAPIRenderer, self).get_raw_data_form(view, method, request)
|
||||
setattr(view, '_raw_data_request_method', request.method)
|
||||
return super(BrowsableAPIRenderer, self).get_raw_data_form(data, view, method, request)
|
||||
finally:
|
||||
delattr(view, '_raw_data_form_marker')
|
||||
delattr(view, '_raw_data_request_method')
|
||||
|
||||
def get_rendered_html_form(self, view, method, request):
|
||||
'''Never show auto-generated form (only raw form).'''
|
||||
def get_rendered_html_form(self, data, view, method, request):
|
||||
# Never show auto-generated form (only raw form).
|
||||
obj = getattr(view, 'object', None)
|
||||
if not self.show_form_for_method(view, method, request, obj):
|
||||
return
|
||||
if method in ('DELETE', 'OPTIONS'):
|
||||
return True # Don't actually need to return a form
|
||||
|
||||
def get_context(self, data, accepted_media_type, renderer_context):
|
||||
context = super(BrowsableAPIRenderer, self).get_context(data, accepted_media_type, renderer_context)
|
||||
return context
|
||||
def get_filter_form(self, data, view, request):
|
||||
# Don't show filter form in browsable API.
|
||||
return
|
||||
|
||||
|
||||
class PlainTextRenderer(renderers.BaseRenderer):
|
||||
|
||||
@ -45,9 +61,12 @@ class PlainTextRenderer(renderers.BaseRenderer):
|
||||
data = unicode(data)
|
||||
return data.encode(self.charset)
|
||||
|
||||
|
||||
class DownloadTextRenderer(PlainTextRenderer):
|
||||
|
||||
format = "txt_download"
|
||||
|
||||
|
||||
class AnsiTextRenderer(PlainTextRenderer):
|
||||
|
||||
media_type = 'text/plain'
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -5,4 +5,5 @@
|
||||
{% if new_in_200 %}> _New in Ansible Tower 2.0.0_{% endif %}
|
||||
{% if new_in_220 %}> _New in Ansible Tower 2.2.0_{% endif %}
|
||||
{% if new_in_230 %}> _New in Ansible Tower 2.3.0_{% endif %}
|
||||
{% if new_in_240 %}> _New in Ansible Tower 2.4.0_{% endif %}
|
||||
{% if new_in_240 %}> _New in Ansible Tower 2.4.0_{% endif %}
|
||||
{% if new_in_300 %}> _New in Ansible Tower 3.0.0_{% endif %}
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
{% for fn, fm in serializer_fields.items %}{% spaceless %}
|
||||
{% if not write_only or not fm.read_only %}
|
||||
* `{{ fn }}`: {{ fm.help_text|capfirst }} ({{ fm.type }}{% if write_only and fm.required %}, required{% endif %}{% if write_only and fm.read_only %}, read-only{% endif %}{% if write_only and not fm.choices and not fm.required %}, default=`{% if fm.type == "string" or fm.type == "email" %}"{% firstof fm.default "" %}"{% else %}{{ fm.default }}{% endif %}`{% endif %}){% if fm.choices %}{% for c in fm.choices %}
|
||||
{% if write_only and fm.read_only or not write_only and fm.write_only or write_only and fn == parent_key %}
|
||||
{% else %}
|
||||
* `{{ fn }}`: {{ fm.help_text|capfirst }} ({{ fm.type }}{% if write_only and fm.required %}, required{% endif %}{% if write_only and fm.read_only %}, read-only{% endif %}{% if write_only and not fm.choices and not fm.required %}, default=`{% if fm.type == "string" or fm.type == "email" %}"{% firstof fm.default "" %}"{% else %}{% if fm.type == "field" and not fm.default %}None{% else %}{{ fm.default }}{% endif %}{% endif %}`{% endif %}){% if fm.choices %}{% for c in fm.choices %}
|
||||
- `{% if c.0 == "" %}""{% else %}{{ c.0 }}{% endif %}`{% if c.1 != c.0 %}: {{ c.1 }}{% endif %}{% if write_only and c.0 == fm.default %} (default){% endif %}{% endfor %}{% endif %}{% endif %}
|
||||
{% endspaceless %}
|
||||
{% endfor %}
|
||||
|
||||
28
awx/api/templates/api/_schedule_detail.md
Normal file
28
awx/api/templates/api/_schedule_detail.md
Normal file
@ -0,0 +1,28 @@
|
||||
The following lists the expected format and details of our rrules:
|
||||
|
||||
* DTSTART is required and must follow the following format: DTSTART:YYYYMMDDTHHMMSSZ
|
||||
* DTSTART is expected to be in UTC
|
||||
* INTERVAL is required
|
||||
* SECONDLY is not supported
|
||||
* TZID is not supported
|
||||
* RRULE must preceed the rule statements
|
||||
* BYDAY is supported but not BYDAY with a numerical prefix
|
||||
* BYYEARDAY and BYWEEKNO are not supported
|
||||
* Only one rrule statement per schedule is supported
|
||||
* COUNT must be < 1000
|
||||
|
||||
Here are some example rrules:
|
||||
|
||||
"DTSTART:20500331T055000Z RRULE:FREQ=MINUTELY;INTERVAL=10;COUNT=5"
|
||||
"DTSTART:20240331T075000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=MINUTELY;INTERVAL=1;UNTIL=20230401T075000Z"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=MO,WE,FR"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=WEEKLY;INTERVAL=5;BYDAY=MO"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=6"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=MONTHLY;INTERVAL=1;BYSETPOS=4;BYDAY=SU"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=MONTHLY;INTERVAL=1;BYSETPOS=-1;BYDAY=MO,TU,WE,TH,FR"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=MONTHLY;INTERVAL=1;BYSETPOS=-1;BYDAY=MO,TU,WE,TH,FR,SA,SU"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYMONTH=4;BYMONTHDAY=1"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYSETPOS=-1;BYMONTH=8;BYDAY=SU"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=WEEKLY;INTERVAL=1;UNTIL=20230401T075000Z;BYDAY=MO,WE,FR"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20230610T075000Z"
|
||||
@ -1,30 +1,5 @@
|
||||
|
||||
POST requests to this resource must include a proper `rrule` value following
|
||||
a particular format and conforming to the following rules:
|
||||
a particular format and conforming to subset of allowed rules.
|
||||
|
||||
* DTSTART is required and must follow the following format: DTSTART:YYYYMMDDTHHMMSSZ
|
||||
* DTSTART is expected to be in UTC
|
||||
* INTERVAL is required
|
||||
* SECONDLY is not supported
|
||||
* TZID is not supported
|
||||
* RRULE must preceed the rule statements
|
||||
* BYDAY is supported but not BYDAY with a numerical prefix
|
||||
* BYYEARDAY and BYWEEKNO are not supported
|
||||
* Only one rrule statement per schedule is supported
|
||||
* COUNT must be < 1000
|
||||
|
||||
Here are some example rrules:
|
||||
|
||||
"DTSTART:20500331T055000Z RRULE:FREQ=MINUTELY;INTERVAL=10;COUNT=5"
|
||||
"DTSTART:20240331T075000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=MINUTELY;INTERVAL=1;UNTIL=20230401T075000Z"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=MO,WE,FR"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=WEEKLY;INTERVAL=5;BYDAY=MO"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=6"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=MONTHLY;INTERVAL=1;BYSETPOS=4;BYDAY=SU"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=MONTHLY;INTERVAL=1;BYSETPOS=-1;BYDAY=MO,TU,WE,TH,FR"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=MONTHLY;INTERVAL=1;BYSETPOS=-1;BYDAY=MO,TU,WE,TH,FR,SA,SU"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYMONTH=4;BYMONTHDAY=1"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYSETPOS=-1;BYMONTH=8;BYDAY=SU"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=WEEKLY;INTERVAL=1;UNTIL=20230401T075000Z;BYDAY=MO,WE,FR"
|
||||
"DTSTART:20140331T075000Z RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20230610T075000Z"
|
||||
{% include "api/_schedule_detail.md" %}
|
||||
|
||||
9
awx/api/templates/api/job_template_label_list.md
Normal file
9
awx/api/templates/api/job_template_label_list.md
Normal file
@ -0,0 +1,9 @@
|
||||
{% include "api/sub_list_create_api_view.md" %}
|
||||
|
||||
Labels not associated with any other resources are deleted. A label can become disassociated with a resource as a result of 3 events.
|
||||
|
||||
1. A label is explicitly diassociated with a related job template
|
||||
2. A job is deleted with labels
|
||||
3. A cleanup job deletes a job with labels
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
@ -6,24 +6,40 @@ The response will include the following fields:
|
||||
|
||||
* `ask_variables_on_launch`: Flag indicating whether the job_template is
|
||||
configured to prompt for variables upon launch (boolean, read-only)
|
||||
* `ask_tags_on_launch`: Flag indicating whether the job_template is
|
||||
configured to prompt for tags upon launch (boolean, read-only)
|
||||
* `ask_job_type_on_launch`: Flag indicating whether the job_template is
|
||||
configured to prompt for job_type upon launch (boolean, read-only)
|
||||
* `ask_limit_on_launch`: Flag indicating whether the job_template is
|
||||
configured to prompt for limit upon launch (boolean, read-only)
|
||||
* `ask_inventory_on_launch`: Flag indicating whether the job_template is
|
||||
configured to prompt for inventory upon launch (boolean, read-only)
|
||||
* `ask_credential_on_launch`: Flag indicating whether the job_template is
|
||||
configured to prompt for credential upon launch (boolean, read-only)
|
||||
* `can_start_without_user_input`: Flag indicating if the job_template can be
|
||||
launched without user-input (boolean, read-only)
|
||||
* `passwords_needed_to_start`: Password names required to launch the
|
||||
job_template (array, read-only)
|
||||
* `variables_needed_to_start`: Required variable names required to launch the
|
||||
job_template (array, read-only)
|
||||
* `survey_enabled`: Flag indicating if whether the job_template has an enabled
|
||||
* `survey_enabled`: Flag indicating whether the job_template has an enabled
|
||||
survey (boolean, read-only)
|
||||
* `credential_needed_to_start`: Flag indicating the presence of a credential
|
||||
associated with the job template. If not then one should be supplied when
|
||||
launching the job (boolean, read-only)
|
||||
* `inventory_needed_to_start`: Flag indicating the presence of an inventory
|
||||
associated with the job template. If not then one should be supplied when
|
||||
launching the job (boolean, read-only)
|
||||
|
||||
Make a POST request to this resource to launch the job_template. If any
|
||||
passwords or extra variables (extra_vars) are required, they must be passed
|
||||
via POST data, with extra_vars given as a YAML or JSON string and escaped
|
||||
parentheses. If `credential_needed_to_start` is `True` then the `credential`
|
||||
field is required as well.
|
||||
passwords, inventory, or extra variables (extra_vars) are required, they must
|
||||
be passed via POST data, with extra_vars given as a YAML or JSON string and
|
||||
escaped parentheses. If `credential_needed_to_start` is `True` then the
|
||||
`credential` field is required and if the `inventory_needed_to_start` is
|
||||
`True` then the `inventory` is required as well.
|
||||
|
||||
If successful, the response status code will be 202. If any required passwords
|
||||
If successful, the response status code will be 201. If any required passwords
|
||||
are not provided, a 400 status code will be returned. If the job cannot be
|
||||
launched, a 405 status code will be returned.
|
||||
launched, a 405 status code will be returned. If the provided credential or
|
||||
inventory are not allowed to be used by the user, then a 403 status code will
|
||||
be returned.
|
||||
|
||||
@ -3,7 +3,7 @@ POST requests to this resource should include the full specification for a Job T
|
||||
Here is an example survey specification:
|
||||
|
||||
{
|
||||
"name": "Simple Surveny",
|
||||
"name": "Simple Survey",
|
||||
"description": "Description of the simple survey",
|
||||
"spec": [
|
||||
{
|
||||
@ -23,6 +23,7 @@ list of survey items.
|
||||
Within each survey item `type` must be one of:
|
||||
|
||||
* text: For survey questions expecting a textual answer
|
||||
* password: For survey questions expecting a password or other sensitive information
|
||||
* integer: For survey questions expecting a whole number answer
|
||||
* float: For survey questions expecting a decimal number
|
||||
* multiplechoice: For survey questions where one option from a list is required
|
||||
@ -116,4 +117,4 @@ Here is a more comprehensive example showing the various question types and thei
|
||||
"default": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
7
awx/api/templates/api/schedule_list.md
Normal file
7
awx/api/templates/api/schedule_list.md
Normal file
@ -0,0 +1,7 @@
|
||||
{% include "api/list_api_view.md" %}
|
||||
|
||||
Schedule Details
|
||||
================
|
||||
{% include "api/_schedule_detail.md" %}
|
||||
|
||||
|
||||
@ -13,22 +13,22 @@
|
||||
.ansi3 { font-weight: italic; }
|
||||
.ansi4 { text-decoration: underline; }
|
||||
.ansi9 { text-decoration: line-through; }
|
||||
.ansi30 { color: #000316; }
|
||||
.ansi31 { color: #ff5850; }
|
||||
.ansi32 { color: #60D66F; }
|
||||
.ansi33 { color: #AA5500; }
|
||||
.ansi34 { color: #0000AA; }
|
||||
.ansi35 { color: #E850A8; }
|
||||
.ansi36 { color: #00AAAA; }
|
||||
.ansi37 { color: #F5F1DE; }
|
||||
.ansi40 { background-color: #000000; }
|
||||
.ansi41 { background-color: #ff5850; }
|
||||
.ansi42 { background-color: #60D66F; }
|
||||
.ansi43 { background-color: #AA5500; }
|
||||
.ansi44 { background-color: #0000AA; }
|
||||
.ansi45 { background-color: #E850A8; }
|
||||
.ansi46 { background-color: #00AAAA; }
|
||||
.ansi47 { background-color: #F5F1DE; }
|
||||
.ansi30 { color: #161b1f; }
|
||||
.ansi31 { color: #d9534f; }
|
||||
.ansi32 { color: #5cb85c; }
|
||||
.ansi33 { color: #f0ad4e; }
|
||||
.ansi34 { color: #337ab7; }
|
||||
.ansi35 { color: #e1539e; }
|
||||
.ansi36 { color: #2dbaba; }
|
||||
.ansi37 { color: #ffffff; }
|
||||
.ansi40 { background-color: #161b1f; }
|
||||
.ansi41 { background-color: #d9534f; }
|
||||
.ansi42 { background-color: #5cb85c; }
|
||||
.ansi43 { background-color: #f0ad4e; }
|
||||
.ansi44 { background-color: #337ab7; }
|
||||
.ansi45 { background-color: #e1539e; }
|
||||
.ansi46 { background-color: #2dbaba; }
|
||||
.ansi47 { background-color: #ffffff; }
|
||||
body.ansi_back pre {
|
||||
font-family: Monaco, Menlo, Consolas, "Courier New", monospace;
|
||||
font-size: 12px;
|
||||
@ -47,4 +47,4 @@ div.ansi_back.ansi_dark {
|
||||
<pre>{{ body }}</pre>
|
||||
</body>
|
||||
</html>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
@ -12,7 +12,7 @@ fields to create a new {{ model_verbose_name }} associated with this
|
||||
|
||||
{% block post_create %}{% endblock %}
|
||||
|
||||
{% if view.attach %}
|
||||
{% if has_attach|default:False %}
|
||||
{% if parent_key %}
|
||||
# Remove {{ parent_model_verbose_name|title }} {{ model_verbose_name_plural|title }}:
|
||||
|
||||
@ -34,7 +34,7 @@ existing {{ model_verbose_name }} with this {{ parent_model_verbose_name }}.
|
||||
|
||||
Make a POST request to this resource with `id` and `disassociate` fields to
|
||||
remove the {{ model_verbose_name }} from this {{ parent_model_verbose_name }}
|
||||
without deleting the {{ model_verbose_name }}.
|
||||
{% if model_verbose_name != "label" %} without deleting the {{ model_verbose_name }}{% endif %}.
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
|
||||
@ -5,7 +5,7 @@ Make a POST request to this resource to launch the system job template.
|
||||
An extra parameter `extra_vars` is suggested in order to pass extra parameters
|
||||
to the system job task.
|
||||
|
||||
For example on `cleanup_jobs`, `cleanup_deleted`, and `cleanup_activitystream`:
|
||||
For example on `cleanup_jobs` and `cleanup_activitystream`:
|
||||
|
||||
`{"days": 30}`
|
||||
|
||||
|
||||
12
awx/api/templates/api/team_roles_list.md
Normal file
12
awx/api/templates/api/team_roles_list.md
Normal file
@ -0,0 +1,12 @@
|
||||
# List Roles for this Team:
|
||||
|
||||
Make a GET request to this resource to retrieve a list of roles associated with the selected team.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
|
||||
# Associate Roles with this Team:
|
||||
|
||||
Make a POST request to this resource to add or remove a role from this team. The following fields may be modified:
|
||||
|
||||
* `id`: The Role ID to add to the team. (int, required)
|
||||
* `disassociate`: Provide if you want to remove the role. (any value, optional)
|
||||
12
awx/api/templates/api/user_roles_list.md
Normal file
12
awx/api/templates/api/user_roles_list.md
Normal file
@ -0,0 +1,12 @@
|
||||
# List Roles for this User:
|
||||
|
||||
Make a GET request to this resource to retrieve a list of roles associated with the selected user.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
|
||||
# Associate Roles with this User:
|
||||
|
||||
Make a POST request to this resource to add or remove a role from this user. The following fields may be modified:
|
||||
|
||||
* `id`: The Role ID to add to the user. (int, required)
|
||||
* `disassociate`: Provide if you want to remove the role. (any value, optional)
|
||||
@ -1,20 +1,19 @@
|
||||
TOWER SOFTWARE END USER LICENSE AGREEMENT
|
||||
ANSIBLE TOWER BY RED HAT END USER LICENSE AGREEMENT
|
||||
|
||||
Unless otherwise agreed to, and executed in a definitive agreement, between
|
||||
Ansible, Inc. (“Ansible”) and the individual or entity (“Customer”) signing or
|
||||
electronically accepting these terms of use for the Tower Software (“EULA”),
|
||||
all Tower Software, including any and all versions released or made available
|
||||
by Ansible, shall be subject to the Ansible Software Subscription and Services
|
||||
Agreement found at www.ansible.com/subscription-agreement (“Agreement”).
|
||||
Ansible is not responsible for any additional obligations, conditions or
|
||||
warranties agreed to between Customer and an authorized distributor, or
|
||||
reseller, of the Tower Software. BY DOWNLOADING AND USING THE TOWER SOFTWARE,
|
||||
OR BY CLICKING ON THE “YES” BUTTON OR OTHER BUTTON OR MECHANISM DESIGNED TO
|
||||
ACKNOWLEDGE CONSENT TO THE TERMS OF AN ELECTRONIC COPY OF THIS EULA, THE
|
||||
CUSTOMER HEREBY ACKNOWLEDGES THAT CUSTOMER HAS READ, UNDERSTOOD, AND AGREES TO
|
||||
BE BOUND BY THE TERMS OF THIS EULA AND AGREEMENT, INCLUDING ALL TERMS
|
||||
INCORPORATED HEREIN BY REFERENCE, AND THAT THIS EULA AND AGREEMENT IS
|
||||
EQUIVALENT TO ANY WRITTEN NEGOTIATED AGREEMENT BETWEEN CUSTOMER AND ANSIBLE.
|
||||
THIS EULA AND AGREEMENT IS ENFORCEABLE AGAINST ANY PERSON OR ENTITY THAT USES
|
||||
OR AVAILS ITSELF OF THE TOWER SOFTWARE OR ANY PERSON OR ENTITY THAT USES THE OR
|
||||
AVAILS ITSELF OF THE TOWER SOFTWARE ON ANOTHER PERSON’S OR ENTITY’S BEHALF.
|
||||
This end user license agreement (“EULA”) governs the use of the Ansible Tower software and any related updates, upgrades, versions, appearance, structure and organization (the “Ansible Tower Software”), regardless of the delivery mechanism.
|
||||
|
||||
1. License Grant. Subject to the terms of this EULA, Red Hat, Inc. and its affiliates (“Red Hat”) grant to you (“You”) a non-transferable, non-exclusive, worldwide, non-sublicensable, limited, revocable license to use the Ansible Tower Software for the term of the associated Red Hat Software Subscription(s) and in a quantity equal to the number of Red Hat Software Subscriptions purchased from Red Hat for the Ansible Tower Software (“License”), each as set forth on the applicable Red Hat ordering document. You acquire only the right to use the Ansible Tower Software and do not acquire any rights of ownership. Red Hat reserves all rights to the Ansible Tower Software not expressly granted to You. This License grant pertains solely to Your use of the Ansible Tower Software and is not intended to limit Your rights under, or grant You rights that supersede, the license terms of any software packages which may be made available with the Ansible Tower Software that are subject to an open source software license.
|
||||
|
||||
2. Intellectual Property Rights. Title to the Ansible Tower Software and each component, copy and modification, including all derivative works whether made by Red Hat, You or on Red Hat's behalf, including those made at Your suggestion and all associated intellectual property rights, are and shall remain the sole and exclusive property of Red Hat and/or it licensors. The License does not authorize You (nor may You allow any third party, specifically non-employees of Yours) to: (a) copy, distribute, reproduce, use or allow third party access to the Ansible Tower Software except as expressly authorized hereunder; (b) decompile, disassemble, reverse engineer, translate, modify, convert or apply any procedure or process to the Ansible Tower Software in order to ascertain, derive, and/or appropriate for any reason or purpose, including the Ansible Tower Software source code or source listings or any trade secret information or process contained in the Ansible Tower Software (except as permitted under applicable law); (c) execute or incorporate other software (except for approved software as appears in the Ansible Tower Software documentation or specifically approved by Red Hat in writing) into Ansible Tower Software, or create a derivative work of any part of the Ansible Tower Software; (d) remove any trademarks, trade names or titles, copyrights legends or any other proprietary marking on the Ansible Tower Software; (e) disclose the results of any benchmarking of the Ansible Tower Software (whether or not obtained with Red Hat’s assistance) to any third party; (f) attempt to circumvent any user limits or other license, timing or use restrictions that are built into, defined or agreed upon, regarding the Ansible Tower Software. You are hereby notified that the Ansible Tower Software may contain time-out devices, counter devices, and/or other devices intended to ensure the limits of the License will not be exceeded (“Limiting Devices”). If the Ansible Tower Software contains Limiting Devices, Red Hat will provide You materials necessary to use the Ansible Tower Software to the extent permitted. You may not tamper with or otherwise take any action to defeat or circumvent a Limiting Device or other control measure, including but not limited to, resetting the unit amount or using false host identification number for the purpose of extending any term of the License.
|
||||
|
||||
3. Evaluation Licenses. Unless You have purchased Ansible Tower Software Subscriptions from Red Hat or an authorized reseller under the terms of a commercial agreement with Red Hat, all use of the Ansible Tower Software shall be limited to testing purposes and not for production use (“Evaluation”). Unless otherwise agreed by Red Hat, Evaluation of the Ansible Tower Software shall be limited to an evaluation environment and the Ansible Tower Software shall not be used to manage any systems or virtual machines on networks being used in the operation of Your business or any other non-evaluation purpose. Unless otherwise agreed by Red Hat, You shall limit all Evaluation use to a single 30 day evaluation period and shall not download or otherwise obtain additional copies of the Ansible Tower Software or license keys for Evaluation.
|
||||
|
||||
4. Limited Warranty. Except as specifically stated in this Section 4, to the maximum extent permitted under applicable law, the Ansible Tower Software and the components are provided and licensed “as is” without warranty of any kind, expressed or implied, including the implied warranties of merchantability, non-infringement or fitness for a particular purpose. Red Hat warrants solely to You that the media on which the Ansible Tower Software may be furnished will be free from defects in materials and manufacture under normal use for a period of thirty (30) days from the date of delivery to You. Red Hat does not warrant that the functions contained in the Ansible Tower Software will meet Your requirements or that the operation of the Ansible Tower Software will be entirely error free, appear precisely as described in the accompanying documentation, or comply with regulatory requirements.
|
||||
|
||||
5. Limitation of Remedies and Liability. To the maximum extent permitted by applicable law, Your exclusive remedy under this EULA is to return any defective media within thirty (30) days of delivery along with a copy of Your payment receipt and Red Hat, at its option, will replace it or refund the money paid by You for the media. To the maximum extent permitted under applicable law, neither Red Hat nor any Red Hat authorized distributor will be liable to You for any incidental or consequential damages, including lost profits or lost savings arising out of the use or inability to use the Ansible Tower Software or any component, even if Red Hat or the authorized distributor has been advised of the possibility of such damages. In no event shall Red Hat's liability or an authorized distributor’s liability exceed the amount that You paid to Red Hat for the Ansible Tower Software during the twelve months preceding the first event giving rise to liability.
|
||||
|
||||
6. Export Control. In accordance with the laws of the United States and other countries, You represent and warrant that You: (a) understand that the Ansible Tower Software and its components may be subject to export controls under the U.S. Commerce Department’s Export Administration Regulations (“EAR”); (b) are not located in any country listed in Country Group E:1 in Supplement No. 1 to part 740 of the EAR; (c) will not export, re-export, or transfer the Ansible Tower Software to any prohibited destination or to any end user who has been prohibited from participating in US export transactions by any federal agency of the US government; (d) will not use or transfer the Ansible Tower Software for use in connection with the design, development or production of nuclear, chemical or biological weapons, or rocket systems, space launch vehicles, or sounding rockets or unmanned air vehicle systems; (e) understand and agree that if you are in the United States and you export or transfer the Ansible Tower Software to eligible end users, you will, to the extent required by EAR Section 740.17 obtain a license for such export or transfer and will submit semi-annual reports to the Commerce Department’s Bureau of Industry and Security, which include the name and address (including country) of each transferee; and (f) understand that countries including the United States may restrict the import, use, or export of encryption products (which may include the Ansible Tower Software) and agree that you shall be solely responsible for compliance with any such import, use, or export restrictions.
|
||||
|
||||
7. General. If any provision of this EULA is held to be unenforceable, that shall not affect the enforceability of the remaining provisions. This agreement shall be governed by the laws of the State of New York and of the United States, without regard to any conflict of laws provisions. The rights and obligations of the parties to this EULA shall not be governed by the United Nations Convention on the International Sale of Goods.
|
||||
|
||||
Copyright © 2015 Red Hat, Inc. All rights reserved. "Red Hat" and “Ansible Tower” are registered trademarks of Red Hat, Inc. All other trademarks are the property of their respective owners.
|
||||
|
||||
@ -1,7 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .decorator_paginated import PaginatedDecoratorTests # noqa
|
||||
from .job_tasks import JobTasksTests # noqa
|
||||
@ -19,7 +19,14 @@ organization_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/inventories/$', 'organization_inventories_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', 'organization_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', 'organization_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', 'organization_credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'organization_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates/$', 'organization_notification_templates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'organization_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'organization_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'organization_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'organization_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'organization_access_list'),
|
||||
)
|
||||
|
||||
user_urls = patterns('awx.api.views',
|
||||
@ -30,26 +37,33 @@ user_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/admin_of_organizations/$', 'user_admin_of_organizations_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', 'user_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', 'user_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/permissions/$', 'user_permissions_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', 'user_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'user_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'user_access_list'),
|
||||
|
||||
)
|
||||
|
||||
project_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'project_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'project_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/playbooks/$', 'project_playbooks'),
|
||||
url(r'^(?P<pk>[0-9]+)/organizations/$', 'project_organizations_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', 'project_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/update/$', 'project_update_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/project_updates/$', 'project_updates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'project_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', 'project_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'project_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'project_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'project_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'project_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'project_access_list'),
|
||||
)
|
||||
|
||||
project_update_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'project_update_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'project_update_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', 'project_update_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'project_update_notifications_list'),
|
||||
)
|
||||
|
||||
team_urls = patterns('awx.api.views',
|
||||
@ -58,8 +72,10 @@ team_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', 'team_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', 'team_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', 'team_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/permissions/$', 'team_permissions_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', 'team_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'team_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'team_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'team_access_list'),
|
||||
)
|
||||
|
||||
inventory_urls = patterns('awx.api.views',
|
||||
@ -73,8 +89,11 @@ inventory_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/tree/$', 'inventory_tree_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_sources/$', 'inventory_inventory_sources_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'inventory_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_templates/$', 'inventory_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/scan_job_templates/$', 'inventory_scan_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'inventory_ad_hoc_commands_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'inventory_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'inventory_object_roles_list'),
|
||||
#url(r'^(?P<pk>[0-9]+)/single_fact/$', 'inventory_single_fact_view'),
|
||||
)
|
||||
|
||||
@ -91,8 +110,8 @@ host_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'host_ad_hoc_commands_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_command_events/$', 'host_ad_hoc_command_events_list'),
|
||||
#url(r'^(?P<pk>[0-9]+)/single_fact/$', 'host_single_fact_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_versions/$', 'host_fact_versions_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_view/$', 'host_fact_compare_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_versions/$', 'host_fact_versions_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_view/$', 'host_fact_compare_view'),
|
||||
)
|
||||
|
||||
group_urls = patterns('awx.api.views',
|
||||
@ -120,39 +139,59 @@ inventory_source_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', 'inventory_source_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', 'inventory_source_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', 'inventory_source_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'inventory_source_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'inventory_source_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'inventory_source_notification_templates_success_list'),
|
||||
)
|
||||
|
||||
inventory_update_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'inventory_update_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'inventory_update_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', 'inventory_update_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'inventory_update_notifications_list'),
|
||||
)
|
||||
|
||||
inventory_script_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'inventory_script_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'inventory_script_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'inventory_script_object_roles_list'),
|
||||
)
|
||||
|
||||
credential_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'credential_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'credential_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'credential_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'credential_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner_users/$', 'credential_owner_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner_teams/$', 'credential_owner_teams_list'),
|
||||
# See also credentials resources on users/teams.
|
||||
)
|
||||
|
||||
permission_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'permission_detail'),
|
||||
role_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'role_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'role_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', 'role_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', 'role_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/parents/$', 'role_parents_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/children/$', 'role_children_list'),
|
||||
)
|
||||
|
||||
job_template_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'job_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', 'job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', 'job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', 'job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/callback/$', 'job_template_callback'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', 'job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', 'job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'job_template_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'job_template_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', 'job_template_label_list'),
|
||||
)
|
||||
|
||||
job_urls = patterns('awx.api.views',
|
||||
@ -167,6 +206,8 @@ job_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/job_tasks/$', 'job_job_tasks_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'job_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', 'job_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'job_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', 'job_label_list'),
|
||||
)
|
||||
|
||||
job_host_summary_urls = patterns('awx.api.views',
|
||||
@ -201,12 +242,33 @@ system_job_template_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', 'system_job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', 'system_job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', 'system_job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'system_job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'system_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'system_job_template_notification_templates_success_list'),
|
||||
)
|
||||
|
||||
system_job_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'system_job_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'system_job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'system_job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'system_job_notifications_list'),
|
||||
)
|
||||
|
||||
notification_template_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'notification_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'notification_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/test/$', 'notification_template_test'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'notification_template_notification_list'),
|
||||
)
|
||||
|
||||
notification_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'notification_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'notification_detail'),
|
||||
)
|
||||
|
||||
label_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'label_detail'),
|
||||
)
|
||||
|
||||
schedule_urls = patterns('awx.api.views',
|
||||
@ -220,6 +282,10 @@ activity_stream_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'activity_stream_detail'),
|
||||
)
|
||||
|
||||
settings_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'settings_list'),
|
||||
url(r'^reset/$', 'settings_reset'))
|
||||
|
||||
v1_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'api_v1_root_view'),
|
||||
url(r'^ping/$', 'api_v1_ping_view'),
|
||||
@ -228,9 +294,10 @@ v1_urls = patterns('awx.api.views',
|
||||
url(r'^authtoken/$', 'auth_token_view'),
|
||||
url(r'^me/$', 'user_me_list'),
|
||||
url(r'^dashboard/$', 'dashboard_view'),
|
||||
url(r'^dashboard/graphs/jobs/$', 'dashboard_jobs_graph_view'),
|
||||
url(r'^dashboard/graphs/inventory/$', 'dashboard_inventory_graph_view'),
|
||||
url(r'^schedules/', include(schedule_urls)),
|
||||
url(r'^dashboard/graphs/jobs/$','dashboard_jobs_graph_view'),
|
||||
# TODO: Uncomment aftger 3.0 when we bring database settings endpoints back
|
||||
# url(r'^settings/', include(settings_urls)),
|
||||
url(r'^schedules/', include(schedule_urls)),
|
||||
url(r'^organizations/', include(organization_urls)),
|
||||
url(r'^users/', include(user_urls)),
|
||||
url(r'^projects/', include(project_urls)),
|
||||
@ -243,7 +310,7 @@ v1_urls = patterns('awx.api.views',
|
||||
url(r'^inventory_updates/', include(inventory_update_urls)),
|
||||
url(r'^inventory_scripts/', include(inventory_script_urls)),
|
||||
url(r'^credentials/', include(credential_urls)),
|
||||
url(r'^permissions/', include(permission_urls)),
|
||||
url(r'^roles/', include(role_urls)),
|
||||
url(r'^job_templates/', include(job_template_urls)),
|
||||
url(r'^jobs/', include(job_urls)),
|
||||
url(r'^job_host_summaries/', include(job_host_summary_urls)),
|
||||
@ -252,7 +319,10 @@ v1_urls = patterns('awx.api.views',
|
||||
url(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)),
|
||||
url(r'^system_job_templates/', include(system_job_template_urls)),
|
||||
url(r'^system_jobs/', include(system_job_urls)),
|
||||
url(r'^unified_job_templates/$', 'unified_job_template_list'),
|
||||
url(r'^notification_templates/', include(notification_template_urls)),
|
||||
url(r'^notifications/', include(notification_urls)),
|
||||
url(r'^labels/', include(label_urls)),
|
||||
url(r'^unified_job_templates/$','unified_job_template_list'),
|
||||
url(r'^unified_jobs/$', 'unified_job_list'),
|
||||
url(r'^activity_stream/', include(activity_stream_urls)),
|
||||
)
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from ordereddict import OrderedDict
|
||||
from collections import OrderedDict
|
||||
import copy
|
||||
import functools
|
||||
|
||||
@ -23,21 +23,22 @@ def paginated(method):
|
||||
def func(self, request, *args, **kwargs):
|
||||
# Manually spin up pagination.
|
||||
# How many results do we show?
|
||||
limit = api_settings.PAGINATE_BY
|
||||
if request.QUERY_PARAMS.get(api_settings.PAGINATE_BY_PARAM, False):
|
||||
limit = request.QUERY_PARAMS[api_settings.PAGINATE_BY_PARAM]
|
||||
if api_settings.MAX_PAGINATE_BY:
|
||||
limit = min(api_settings.MAX_PAGINATE_BY, limit)
|
||||
paginator_class = api_settings.DEFAULT_PAGINATION_CLASS
|
||||
limit = paginator_class.page_size
|
||||
if request.query_params.get(paginator_class.page_size_query_param, False):
|
||||
limit = request.query_params[paginator_class.page_size_query_param]
|
||||
if paginator_class.max_page_size:
|
||||
limit = min(paginator_class.max_page_size, limit)
|
||||
limit = int(limit)
|
||||
|
||||
# Get the order parameter if it's given
|
||||
if request.QUERY_PARAMS.get("ordering", False):
|
||||
ordering = request.QUERY_PARAMS["ordering"]
|
||||
if request.query_params.get("ordering", False):
|
||||
ordering = request.query_params["ordering"]
|
||||
else:
|
||||
ordering = None
|
||||
|
||||
# What page are we on?
|
||||
page = int(request.QUERY_PARAMS.get('page', 1))
|
||||
page = int(request.query_params.get('page', 1))
|
||||
offset = (page - 1) * limit
|
||||
|
||||
# Add the limit, offset, page, and order variables to the keyword arguments
|
||||
|
||||
1839
awx/api/views.py
1839
awx/api/views.py
File diff suppressed because it is too large
Load Diff
@ -35,7 +35,7 @@ def _get_db_monkeypatched(cls):
|
||||
password=settings.MONGO_PASSWORD,
|
||||
tz_aware=settings.USE_TZ)
|
||||
register_key_transform(get_db())
|
||||
except ConnectionError:
|
||||
except (ConnectionError, AttributeError):
|
||||
logger.info('Failed to establish connect to MongoDB')
|
||||
return get_db(cls._meta.get("db_alias", "default"))
|
||||
|
||||
|
||||
@ -1,8 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .models import * # noqa
|
||||
from .utils import * # noqa
|
||||
from .base import * # noqa
|
||||
@ -1,223 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Python
|
||||
from __future__ import absolute_import
|
||||
from django.utils.timezone import now
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
import django
|
||||
|
||||
# MongoEngine
|
||||
from mongoengine.connection import get_db, ConnectionError
|
||||
|
||||
# AWX
|
||||
from awx.fact.models.fact import * # noqa
|
||||
|
||||
TEST_FACT_ANSIBLE = {
|
||||
"ansible_swapfree_mb" : 4092,
|
||||
"ansible_default_ipv6" : {
|
||||
|
||||
},
|
||||
"ansible_distribution_release" : "trusty",
|
||||
"ansible_system_vendor" : "innotek GmbH",
|
||||
"ansible_os_family" : "Debian",
|
||||
"ansible_all_ipv4_addresses" : [
|
||||
"192.168.1.145"
|
||||
],
|
||||
"ansible_lsb" : {
|
||||
"release" : "14.04",
|
||||
"major_release" : "14",
|
||||
"codename" : "trusty",
|
||||
"id" : "Ubuntu",
|
||||
"description" : "Ubuntu 14.04.2 LTS"
|
||||
},
|
||||
}
|
||||
|
||||
TEST_FACT_PACKAGES = [
|
||||
{
|
||||
"name": "accountsservice",
|
||||
"architecture": "amd64",
|
||||
"source": "apt",
|
||||
"version": "0.6.35-0ubuntu7.1"
|
||||
},
|
||||
{
|
||||
"name": "acpid",
|
||||
"architecture": "amd64",
|
||||
"source": "apt",
|
||||
"version": "1:2.0.21-1ubuntu2"
|
||||
},
|
||||
{
|
||||
"name": "adduser",
|
||||
"architecture": "all",
|
||||
"source": "apt",
|
||||
"version": "3.113+nmu3ubuntu3"
|
||||
},
|
||||
]
|
||||
|
||||
TEST_FACT_SERVICES = [
|
||||
{
|
||||
"source" : "upstart",
|
||||
"state" : "waiting",
|
||||
"name" : "ureadahead-other",
|
||||
"goal" : "stop"
|
||||
},
|
||||
{
|
||||
"source" : "upstart",
|
||||
"state" : "running",
|
||||
"name" : "apport",
|
||||
"goal" : "start"
|
||||
},
|
||||
{
|
||||
"source" : "upstart",
|
||||
"state" : "waiting",
|
||||
"name" : "console-setup",
|
||||
"goal" : "stop"
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
class MongoDBRequired(django.test.TestCase):
|
||||
def setUp(self):
|
||||
# Drop mongo database
|
||||
try:
|
||||
self.db = get_db()
|
||||
self.db.connection.drop_database(settings.MONGO_DB)
|
||||
except ConnectionError:
|
||||
self.skipTest('MongoDB connection failed')
|
||||
|
||||
class BaseFactTestMixin(MongoDBRequired):
|
||||
pass
|
||||
|
||||
class BaseFactTest(BaseFactTestMixin, MongoDBRequired):
|
||||
pass
|
||||
|
||||
# TODO: for now, we relate all hosts to a single inventory
|
||||
class FactScanBuilder(object):
|
||||
|
||||
def __init__(self):
|
||||
self.facts_data = {}
|
||||
self.hostname_data = []
|
||||
self.inventory_id = 1
|
||||
|
||||
self.host_objs = []
|
||||
self.fact_objs = []
|
||||
self.version_objs = []
|
||||
self.timestamps = []
|
||||
|
||||
self.epoch = now().replace(year=2015, microsecond=0)
|
||||
|
||||
def set_epoch(self, epoch):
|
||||
self.epoch = epoch
|
||||
|
||||
def add_fact(self, module, facts):
|
||||
self.facts_data[module] = facts
|
||||
|
||||
def add_hostname(self, hostname):
|
||||
self.hostname_data.append(hostname)
|
||||
|
||||
def build(self, scan_count, host_count):
|
||||
if len(self.facts_data) == 0:
|
||||
raise RuntimeError("No fact data to build populate scans. call add_fact()")
|
||||
if (len(self.hostname_data) > 0 and len(self.hostname_data) != host_count):
|
||||
raise RuntimeError("Registered number of hostnames %d does not match host_count %d" % (len(self.hostname_data), host_count))
|
||||
|
||||
if len(self.hostname_data) == 0:
|
||||
self.hostname_data = ['hostname_%s' % i for i in range(0, host_count)]
|
||||
|
||||
self.host_objs = [FactHost(hostname=hostname, inventory_id=self.inventory_id).save() for hostname in self.hostname_data]
|
||||
|
||||
for i in range(0, scan_count):
|
||||
scan = {}
|
||||
scan_version = {}
|
||||
timestamp = self.epoch.replace(year=self.epoch.year - i, microsecond=0)
|
||||
for module in self.facts_data:
|
||||
fact_objs = []
|
||||
version_objs = []
|
||||
for host in self.host_objs:
|
||||
(fact_obj, version_obj) = Fact.add_fact(timestamp=timestamp,
|
||||
host=host,
|
||||
module=module,
|
||||
fact=self.facts_data[module])
|
||||
fact_objs.append(fact_obj)
|
||||
version_objs.append(version_obj)
|
||||
scan[module] = fact_objs
|
||||
scan_version[module] = version_objs
|
||||
self.fact_objs.append(scan)
|
||||
self.version_objs.append(scan_version)
|
||||
self.timestamps.append(timestamp)
|
||||
|
||||
|
||||
def get_scan(self, index, module=None):
|
||||
res = None
|
||||
res = self.fact_objs[index]
|
||||
if module:
|
||||
res = res[module]
|
||||
return res
|
||||
|
||||
def get_scans(self, index_start=None, index_end=None):
|
||||
if index_start is None:
|
||||
index_start = 0
|
||||
if index_end is None:
|
||||
index_end = len(self.fact_objs)
|
||||
return self.fact_objs[index_start:index_end]
|
||||
|
||||
def get_scan_version(self, index, module=None):
|
||||
res = None
|
||||
res = self.version_objs[index]
|
||||
if module:
|
||||
res = res[module]
|
||||
return res
|
||||
|
||||
def get_scan_versions(self, index_start=None, index_end=None):
|
||||
if index_start is None:
|
||||
index_start = 0
|
||||
if index_end is None:
|
||||
index_end = len(self.version_objs)
|
||||
return self.version_objs[index_start:index_end]
|
||||
|
||||
def get_hostname(self, index):
|
||||
return self.host_objs[index].hostname
|
||||
|
||||
def get_hostnames(self, index_start=None, index_end=None):
|
||||
if index_start is None:
|
||||
index_start = 0
|
||||
if index_end is None:
|
||||
index_end = len(self.host_objs)
|
||||
|
||||
return [self.host_objs[i].hostname for i in range(index_start, index_end)]
|
||||
|
||||
def get_inventory_id(self):
|
||||
return self.inventory_id
|
||||
|
||||
def set_inventory_id(self, inventory_id):
|
||||
self.inventory_id = inventory_id
|
||||
|
||||
def get_host(self, index):
|
||||
return self.host_objs[index]
|
||||
|
||||
def get_hosts(self, index_start=None, index_end=None):
|
||||
if index_start is None:
|
||||
index_start = 0
|
||||
if index_end is None:
|
||||
index_end = len(self.host_objs)
|
||||
|
||||
return self.host_objs[index_start:index_end]
|
||||
|
||||
def get_scan_count(self):
|
||||
return len(self.fact_objs)
|
||||
|
||||
def get_host_count(self):
|
||||
return len(self.host_objs)
|
||||
|
||||
def get_timestamp(self, index):
|
||||
return self.timestamps[index]
|
||||
|
||||
def get_timestamps(self, index_start=None, index_end=None):
|
||||
if not index_start:
|
||||
index_start = 0
|
||||
if not index_end:
|
||||
len(self.timestamps)
|
||||
return self.timestamps[index_start:index_end]
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,6 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .fact import * # noqa
|
||||
@ -1,9 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .fact_simple import * # noqa
|
||||
from .fact_transform_pymongo import * # noqa
|
||||
from .fact_transform import * # noqa
|
||||
from .fact_get_single_facts import * # noqa
|
||||
@ -1,96 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Python
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Django
|
||||
|
||||
# AWX
|
||||
from awx.fact.models.fact import * # noqa
|
||||
from awx.fact.tests.base import BaseFactTest, FactScanBuilder, TEST_FACT_PACKAGES
|
||||
|
||||
__all__ = ['FactGetSingleFactsTest', 'FactGetSingleFactsMultipleScansTest',]
|
||||
|
||||
class FactGetSingleFactsTest(BaseFactTest):
|
||||
def setUp(self):
|
||||
super(FactGetSingleFactsTest, self).setUp()
|
||||
self.builder = FactScanBuilder()
|
||||
self.builder.add_fact('packages', TEST_FACT_PACKAGES)
|
||||
self.builder.add_fact('nested', TEST_FACT_PACKAGES)
|
||||
self.builder.build(scan_count=1, host_count=20)
|
||||
|
||||
def check_query_results(self, facts_known, facts):
|
||||
self.assertIsNotNone(facts)
|
||||
self.assertEqual(len(facts_known), len(facts), "More or less facts found than expected")
|
||||
# Ensure only 'acpid' is returned
|
||||
for fact in facts:
|
||||
self.assertEqual(len(fact.fact), 1)
|
||||
self.assertEqual(fact.fact[0]['name'], 'acpid')
|
||||
|
||||
# Transpose facts to a dict with key id
|
||||
count = 0
|
||||
facts_dict = {}
|
||||
for fact in facts:
|
||||
count += 1
|
||||
facts_dict[fact.id] = fact
|
||||
self.assertEqual(count, len(facts_known))
|
||||
|
||||
# For each fact that we put into the database on setup,
|
||||
# we should find that fact in the result set returned
|
||||
for fact_known in facts_known:
|
||||
key = fact_known.id
|
||||
self.assertIn(key, facts_dict)
|
||||
self.assertEqual(len(facts_dict[key].fact), 1)
|
||||
|
||||
def check_query_results_nested(self, facts):
|
||||
self.assertIsNotNone(facts)
|
||||
for fact in facts:
|
||||
self.assertEqual(len(fact.fact), 1)
|
||||
self.assertEqual(fact.fact['nested'][0]['name'], 'acpid')
|
||||
|
||||
def test_single_host(self):
|
||||
facts = Fact.get_single_facts(self.builder.get_hostnames(0, 1), 'name', 'acpid', self.builder.get_timestamp(0), 'packages')
|
||||
|
||||
self.check_query_results(self.builder.get_scan(0, 'packages')[:1], facts)
|
||||
|
||||
def test_all(self):
|
||||
facts = Fact.get_single_facts(self.builder.get_hostnames(), 'name', 'acpid', self.builder.get_timestamp(0), 'packages')
|
||||
|
||||
self.check_query_results(self.builder.get_scan(0, 'packages'), facts)
|
||||
|
||||
def test_subset_hosts(self):
|
||||
host_count = (self.builder.get_host_count() / 2)
|
||||
facts = Fact.get_single_facts(self.builder.get_hostnames(0, host_count), 'name', 'acpid', self.builder.get_timestamp(0), 'packages')
|
||||
|
||||
self.check_query_results(self.builder.get_scan(0, 'packages')[:host_count], facts)
|
||||
|
||||
def test_get_single_facts_nested(self):
|
||||
facts = Fact.get_single_facts(self.builder.get_hostnames(), 'nested.name', 'acpid', self.builder.get_timestamp(0), 'packages')
|
||||
|
||||
self.check_query_results_nested(facts)
|
||||
|
||||
class FactGetSingleFactsMultipleScansTest(BaseFactTest):
|
||||
def setUp(self):
|
||||
super(FactGetSingleFactsMultipleScansTest, self).setUp()
|
||||
self.builder = FactScanBuilder()
|
||||
self.builder.add_fact('packages', TEST_FACT_PACKAGES)
|
||||
self.builder.build(scan_count=10, host_count=10)
|
||||
|
||||
def test_1_host(self):
|
||||
facts = Fact.get_single_facts(self.builder.get_hostnames(0, 1), 'name', 'acpid', self.builder.get_timestamp(0), 'packages')
|
||||
self.assertEqual(len(facts), 1)
|
||||
self.assertEqual(facts[0], self.builder.get_scan(0, 'packages')[0])
|
||||
|
||||
def test_multiple_hosts(self):
|
||||
facts = Fact.get_single_facts(self.builder.get_hostnames(0, 3), 'name', 'acpid', self.builder.get_timestamp(0), 'packages')
|
||||
self.assertEqual(len(facts), 3)
|
||||
for i, fact in enumerate(facts):
|
||||
self.assertEqual(fact, self.builder.get_scan(0, 'packages')[i])
|
||||
|
||||
def test_middle_of_timeline(self):
|
||||
facts = Fact.get_single_facts(self.builder.get_hostnames(0, 3), 'name', 'acpid', self.builder.get_timestamp(4), 'packages')
|
||||
self.assertEqual(len(facts), 3)
|
||||
for i, fact in enumerate(facts):
|
||||
self.assertEqual(fact, self.builder.get_scan(4, 'packages')[i])
|
||||
|
||||
@ -1,127 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Python
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import json
|
||||
|
||||
# Django
|
||||
from django.utils.timezone import now
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
# AWX
|
||||
from awx.fact.models.fact import * # noqa
|
||||
from awx.fact.tests.base import BaseFactTest, FactScanBuilder, TEST_FACT_PACKAGES
|
||||
|
||||
__all__ = ['FactHostTest', 'FactTest', 'FactGetHostVersionTest', 'FactGetHostTimelineTest']
|
||||
|
||||
# damn you python 2.6
|
||||
def timedelta_total_seconds(timedelta):
|
||||
return (
|
||||
timedelta.microseconds + 0.0 +
|
||||
(timedelta.seconds + timedelta.days * 24 * 3600) * 10 ** 6) / 10 ** 6
|
||||
|
||||
|
||||
class FactHostTest(BaseFactTest):
|
||||
def test_create_host(self):
|
||||
host = FactHost(hostname='hosty', inventory_id=1)
|
||||
host.save()
|
||||
|
||||
host = FactHost.objects.get(hostname='hosty', inventory_id=1)
|
||||
self.assertIsNotNone(host, "Host added but not found")
|
||||
self.assertEqual('hosty', host.hostname, "Gotten record hostname does not match expected hostname")
|
||||
self.assertEqual(1, host.inventory_id, "Gotten record inventory_id does not match expected inventory_id")
|
||||
|
||||
# Ensure an error is raised for .get() that doesn't match a record.
|
||||
def test_get_host_id_no_result(self):
|
||||
host = FactHost(hostname='hosty', inventory_id=1)
|
||||
host.save()
|
||||
|
||||
self.assertRaises(FactHost.DoesNotExist, FactHost.objects.get, hostname='doesnotexist', inventory_id=1)
|
||||
|
||||
class FactTest(BaseFactTest):
|
||||
def setUp(self):
|
||||
super(FactTest, self).setUp()
|
||||
|
||||
def test_add_fact(self):
|
||||
timestamp = now().replace(microsecond=0)
|
||||
host = FactHost(hostname="hosty", inventory_id=1).save()
|
||||
(f_obj, v_obj) = Fact.add_fact(host=host, timestamp=timestamp, module='packages', fact=TEST_FACT_PACKAGES)
|
||||
f = Fact.objects.get(id=f_obj.id)
|
||||
v = FactVersion.objects.get(id=v_obj.id)
|
||||
|
||||
self.assertEqual(f.id, f_obj.id)
|
||||
self.assertEqual(f.module, 'packages')
|
||||
self.assertEqual(f.fact, TEST_FACT_PACKAGES)
|
||||
self.assertEqual(f.timestamp, timestamp)
|
||||
|
||||
# host relationship created
|
||||
self.assertEqual(f.host.id, host.id)
|
||||
|
||||
# version created and related
|
||||
self.assertEqual(v.id, v_obj.id)
|
||||
self.assertEqual(v.timestamp, timestamp)
|
||||
self.assertEqual(v.host.id, host.id)
|
||||
self.assertEqual(v.fact.id, f_obj.id)
|
||||
self.assertEqual(v.fact.module, 'packages')
|
||||
|
||||
# Note: Take the failure of this with a grain of salt.
|
||||
# The test almost entirely depends on the specs of the system running on.
|
||||
def test_add_fact_performance_4mb_file(self):
|
||||
timestamp = now().replace(microsecond=0)
|
||||
host = FactHost(hostname="hosty", inventory_id=1).save()
|
||||
|
||||
from awx.fact import tests
|
||||
with open('%s/data/file_scan.json' % os.path.dirname(os.path.realpath(tests.__file__))) as f:
|
||||
data = json.load(f)
|
||||
|
||||
t1 = now()
|
||||
(f_obj, v_obj) = Fact.add_fact(host=host, timestamp=timestamp, module='packages', fact=data)
|
||||
t2 = now()
|
||||
diff = timedelta_total_seconds(t2 - t1)
|
||||
print("add_fact save time: %s (s)" % diff)
|
||||
# Note: 20 is realllly high. This should complete in < 2 seconds
|
||||
self.assertLessEqual(diff, 20)
|
||||
|
||||
Fact.objects.get(id=f_obj.id)
|
||||
FactVersion.objects.get(id=v_obj.id)
|
||||
|
||||
class FactGetHostVersionTest(BaseFactTest):
|
||||
def setUp(self):
|
||||
super(FactGetHostVersionTest, self).setUp()
|
||||
self.builder = FactScanBuilder()
|
||||
self.builder.add_fact('packages', TEST_FACT_PACKAGES)
|
||||
self.builder.build(scan_count=2, host_count=1)
|
||||
|
||||
def test_get_host_version_exact_timestamp(self):
|
||||
fact_known = self.builder.get_scan(0, 'packages')[0]
|
||||
fact = Fact.get_host_version(hostname=self.builder.get_hostname(0), inventory_id=self.builder.get_inventory_id(), timestamp=self.builder.get_timestamp(0), module='packages')
|
||||
self.assertIsNotNone(fact)
|
||||
self.assertEqual(fact_known, fact)
|
||||
|
||||
def test_get_host_version_lte_timestamp(self):
|
||||
timestamp = self.builder.get_timestamp(0) + relativedelta(days=1)
|
||||
fact_known = self.builder.get_scan(0, 'packages')[0]
|
||||
fact = Fact.get_host_version(hostname=self.builder.get_hostname(0), inventory_id=self.builder.get_inventory_id(), timestamp=timestamp, module='packages')
|
||||
self.assertIsNotNone(fact)
|
||||
self.assertEqual(fact_known, fact)
|
||||
|
||||
def test_get_host_version_none(self):
|
||||
timestamp = self.builder.get_timestamp(0) - relativedelta(years=20)
|
||||
fact = Fact.get_host_version(hostname=self.builder.get_hostname(0), inventory_id=self.builder.get_inventory_id(), timestamp=timestamp, module='packages')
|
||||
self.assertIsNone(fact)
|
||||
|
||||
class FactGetHostTimelineTest(BaseFactTest):
|
||||
def setUp(self):
|
||||
super(FactGetHostTimelineTest, self).setUp()
|
||||
self.builder = FactScanBuilder()
|
||||
self.builder.add_fact('packages', TEST_FACT_PACKAGES)
|
||||
self.builder.build(scan_count=20, host_count=1)
|
||||
|
||||
def test_get_host_timeline_ok(self):
|
||||
timestamps = Fact.get_host_timeline(hostname=self.builder.get_hostname(0), inventory_id=self.builder.get_inventory_id(), module='packages')
|
||||
self.assertIsNotNone(timestamps)
|
||||
self.assertEqual(len(timestamps), self.builder.get_scan_count())
|
||||
for i in range(0, self.builder.get_scan_count()):
|
||||
self.assertEqual(timestamps[i], self.builder.get_timestamp(i))
|
||||
@ -1,120 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Python
|
||||
from __future__ import absolute_import
|
||||
from datetime import datetime
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
|
||||
# Pymongo
|
||||
import pymongo
|
||||
|
||||
# AWX
|
||||
from awx.fact.models.fact import * # noqa
|
||||
from awx.fact.tests.base import BaseFactTest
|
||||
|
||||
__all__ = ['FactTransformTest', 'FactTransformUpdateTest',]
|
||||
|
||||
TEST_FACT_PACKAGES_WITH_DOTS = [
|
||||
{
|
||||
"name": "acpid3.4",
|
||||
"version": "1:2.0.21-1ubuntu2",
|
||||
"deeper.key": "some_value"
|
||||
},
|
||||
{
|
||||
"name": "adduser.2",
|
||||
"source": "apt",
|
||||
"version": "3.113+nmu3ubuntu3"
|
||||
},
|
||||
{
|
||||
"what.ever." : {
|
||||
"shallowish.key": "some_shallow_value"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
TEST_FACT_PACKAGES_WITH_DOLLARS = [
|
||||
{
|
||||
"name": "acpid3$4",
|
||||
"version": "1:2.0.21-1ubuntu2",
|
||||
"deeper.key": "some_value"
|
||||
},
|
||||
{
|
||||
"name": "adduser$2",
|
||||
"source": "apt",
|
||||
"version": "3.113+nmu3ubuntu3"
|
||||
},
|
||||
{
|
||||
"what.ever." : {
|
||||
"shallowish.key": "some_shallow_value"
|
||||
}
|
||||
}
|
||||
]
|
||||
class FactTransformTest(BaseFactTest):
|
||||
def setUp(self):
|
||||
super(FactTransformTest, self).setUp()
|
||||
# TODO: get host settings from config
|
||||
self.client = pymongo.MongoClient('localhost', 27017)
|
||||
self.db2 = self.client[settings.MONGO_DB]
|
||||
|
||||
self.timestamp = datetime.now().replace(microsecond=0)
|
||||
|
||||
def setup_create_fact_dot(self):
|
||||
self.host = FactHost(hostname='hosty', inventory_id=1).save()
|
||||
self.f = Fact(timestamp=self.timestamp, module='packages', fact=TEST_FACT_PACKAGES_WITH_DOTS, host=self.host)
|
||||
self.f.save()
|
||||
|
||||
def setup_create_fact_dollar(self):
|
||||
self.host = FactHost(hostname='hosty', inventory_id=1).save()
|
||||
self.f = Fact(timestamp=self.timestamp, module='packages', fact=TEST_FACT_PACKAGES_WITH_DOLLARS, host=self.host)
|
||||
self.f.save()
|
||||
|
||||
def test_fact_with_dot_serialized(self):
|
||||
self.setup_create_fact_dot()
|
||||
|
||||
q = {
|
||||
'_id': self.f.id
|
||||
}
|
||||
|
||||
# Bypass mongoengine and pymongo transform to get record
|
||||
f_dict = self.db2['fact'].find_one(q)
|
||||
self.assertIn('what\uff0Eever\uff0E', f_dict['fact'][2])
|
||||
|
||||
def test_fact_with_dot_serialized_pymongo(self):
|
||||
#self.setup_create_fact_dot()
|
||||
|
||||
host = FactHost(hostname='hosty', inventory_id=1).save()
|
||||
f = self.db['fact'].insert({
|
||||
'hostname': 'hosty',
|
||||
'fact': TEST_FACT_PACKAGES_WITH_DOTS,
|
||||
'timestamp': self.timestamp,
|
||||
'host': host.id,
|
||||
'module': 'packages',
|
||||
})
|
||||
|
||||
q = {
|
||||
'_id': f
|
||||
}
|
||||
# Bypass mongoengine and pymongo transform to get record
|
||||
f_dict = self.db2['fact'].find_one(q)
|
||||
self.assertIn('what\uff0Eever\uff0E', f_dict['fact'][2])
|
||||
|
||||
def test_fact_with_dot_deserialized_pymongo(self):
|
||||
self.setup_create_fact_dot()
|
||||
|
||||
q = {
|
||||
'_id': self.f.id
|
||||
}
|
||||
f_dict = self.db['fact'].find_one(q)
|
||||
self.assertIn('what.ever.', f_dict['fact'][2])
|
||||
|
||||
def test_fact_with_dot_deserialized(self):
|
||||
self.setup_create_fact_dot()
|
||||
|
||||
f = Fact.objects.get(id=self.f.id)
|
||||
self.assertIn('what.ever.', f.fact[2])
|
||||
|
||||
class FactTransformUpdateTest(BaseFactTest):
|
||||
pass
|
||||
@ -1,96 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Python
|
||||
from __future__ import absolute_import
|
||||
from datetime import datetime
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
|
||||
# Pymongo
|
||||
import pymongo
|
||||
|
||||
# AWX
|
||||
from awx.fact.models.fact import * # noqa
|
||||
from awx.fact.tests.base import BaseFactTest
|
||||
|
||||
__all__ = ['FactSerializePymongoTest', 'FactDeserializePymongoTest',]
|
||||
|
||||
class FactPymongoBaseTest(BaseFactTest):
|
||||
def setUp(self):
|
||||
super(FactPymongoBaseTest, self).setUp()
|
||||
# TODO: get host settings from config
|
||||
self.client = pymongo.MongoClient('localhost', 27017)
|
||||
self.db2 = self.client[settings.MONGO_DB]
|
||||
|
||||
def _create_fact(self):
|
||||
fact = {}
|
||||
fact[self.k] = self.v
|
||||
q = {
|
||||
'hostname': 'blah'
|
||||
}
|
||||
h = self.db['fact_host'].insert(q)
|
||||
q = {
|
||||
'host': h,
|
||||
'module': 'blah',
|
||||
'timestamp': datetime.now(),
|
||||
'fact': fact
|
||||
}
|
||||
f = self.db['fact'].insert(q)
|
||||
return f
|
||||
|
||||
def check_transform(self, id):
|
||||
raise RuntimeError("Must override")
|
||||
|
||||
def create_dot_fact(self):
|
||||
self.k = 'this.is.a.key'
|
||||
self.v = 'this.is.a.value'
|
||||
|
||||
self.k_uni = 'this\uff0Eis\uff0Ea\uff0Ekey'
|
||||
|
||||
return self._create_fact()
|
||||
|
||||
def create_dollar_fact(self):
|
||||
self.k = 'this$is$a$key'
|
||||
self.v = 'this$is$a$value'
|
||||
|
||||
self.k_uni = 'this\uff04is\uff04a\uff04key'
|
||||
|
||||
return self._create_fact()
|
||||
|
||||
class FactSerializePymongoTest(FactPymongoBaseTest):
|
||||
def check_transform(self, id):
|
||||
q = {
|
||||
'_id': id
|
||||
}
|
||||
f = self.db2.fact.find_one(q)
|
||||
self.assertIn(self.k_uni, f['fact'])
|
||||
self.assertEqual(f['fact'][self.k_uni], self.v)
|
||||
|
||||
# Ensure key . are being transformed to the equivalent unicode into the database
|
||||
def test_key_transform_dot(self):
|
||||
f = self.create_dot_fact()
|
||||
self.check_transform(f)
|
||||
|
||||
# Ensure key $ are being transformed to the equivalent unicode into the database
|
||||
def test_key_transform_dollar(self):
|
||||
f = self.create_dollar_fact()
|
||||
self.check_transform(f)
|
||||
|
||||
class FactDeserializePymongoTest(FactPymongoBaseTest):
|
||||
def check_transform(self, id):
|
||||
q = {
|
||||
'_id': id
|
||||
}
|
||||
f = self.db.fact.find_one(q)
|
||||
self.assertIn(self.k, f['fact'])
|
||||
self.assertEqual(f['fact'][self.k], self.v)
|
||||
|
||||
def test_key_transform_dot(self):
|
||||
f = self.create_dot_fact()
|
||||
self.check_transform(f)
|
||||
|
||||
def test_key_transform_dollar(self):
|
||||
f = self.create_dollar_fact()
|
||||
self.check_transform(f)
|
||||
@ -1,6 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .dbtransform import * # noqa
|
||||
@ -1,112 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# AWX
|
||||
from awx.main.tests.base import BaseTest
|
||||
from awx.fact.models.fact import * # noqa
|
||||
from awx.fact.utils.dbtransform import KeyTransform
|
||||
|
||||
#__all__ = ['DBTransformTest', 'KeyTransformUnitTest']
|
||||
__all__ = ['KeyTransformUnitTest']
|
||||
|
||||
class KeyTransformUnitTest(BaseTest):
|
||||
def setUp(self):
|
||||
super(KeyTransformUnitTest, self).setUp()
|
||||
self.key_transform = KeyTransform([('.', '\uff0E'), ('$', '\uff04')])
|
||||
|
||||
def test_no_replace(self):
|
||||
value = {
|
||||
"a_key_with_a_dict" : {
|
||||
"key" : "value",
|
||||
"nested_key_with_dict": {
|
||||
"nested_key_with_value" : "deep_value"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
data = self.key_transform.transform_incoming(value, None)
|
||||
self.assertEqual(data, value)
|
||||
|
||||
data = self.key_transform.transform_outgoing(value, None)
|
||||
self.assertEqual(data, value)
|
||||
|
||||
def test_complex(self):
|
||||
value = {
|
||||
"a.key.with.a.dict" : {
|
||||
"key" : "value",
|
||||
"nested.key.with.dict": {
|
||||
"nested.key.with.value" : "deep_value"
|
||||
}
|
||||
}
|
||||
}
|
||||
value_transformed = {
|
||||
"a\uff0Ekey\uff0Ewith\uff0Ea\uff0Edict" : {
|
||||
"key" : "value",
|
||||
"nested\uff0Ekey\uff0Ewith\uff0Edict": {
|
||||
"nested\uff0Ekey\uff0Ewith\uff0Evalue" : "deep_value"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
data = self.key_transform.transform_incoming(value, None)
|
||||
self.assertEqual(data, value_transformed)
|
||||
|
||||
data = self.key_transform.transform_outgoing(value_transformed, None)
|
||||
self.assertEqual(data, value)
|
||||
|
||||
def test_simple(self):
|
||||
value = {
|
||||
"a.key" : "value"
|
||||
}
|
||||
value_transformed = {
|
||||
"a\uff0Ekey" : "value"
|
||||
}
|
||||
|
||||
data = self.key_transform.transform_incoming(value, None)
|
||||
self.assertEqual(data, value_transformed)
|
||||
|
||||
data = self.key_transform.transform_outgoing(value_transformed, None)
|
||||
self.assertEqual(data, value)
|
||||
|
||||
def test_nested_dict(self):
|
||||
value = {
|
||||
"a.key.with.a.dict" : {
|
||||
"nested.key." : "value"
|
||||
}
|
||||
}
|
||||
value_transformed = {
|
||||
"a\uff0Ekey\uff0Ewith\uff0Ea\uff0Edict" : {
|
||||
"nested\uff0Ekey\uff0E" : "value"
|
||||
}
|
||||
}
|
||||
|
||||
data = self.key_transform.transform_incoming(value, None)
|
||||
self.assertEqual(data, value_transformed)
|
||||
|
||||
data = self.key_transform.transform_outgoing(value_transformed, None)
|
||||
self.assertEqual(data, value)
|
||||
|
||||
def test_array(self):
|
||||
value = {
|
||||
"a.key.with.an.array" : [
|
||||
{
|
||||
"key.with.dot" : "value"
|
||||
}
|
||||
]
|
||||
}
|
||||
value_transformed = {
|
||||
"a\uff0Ekey\uff0Ewith\uff0Ean\uff0Earray" : [
|
||||
{
|
||||
"key\uff0Ewith\uff0Edot" : "value"
|
||||
}
|
||||
]
|
||||
}
|
||||
data = self.key_transform.transform_incoming(value, None)
|
||||
self.assertEqual(data, value_transformed)
|
||||
|
||||
data = self.key_transform.transform_outgoing(value_transformed, None)
|
||||
self.assertEqual(data, value)
|
||||
|
||||
'''
|
||||
class DBTransformTest(BaseTest, MongoDBRequired):
|
||||
'''
|
||||
@ -1,28 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf import settings
|
||||
from mongoengine import connect
|
||||
from mongoengine.connection import ConnectionError
|
||||
from pymongo.errors import AutoReconnect
|
||||
|
||||
def test_mongo_connection():
|
||||
# Connect to Mongo
|
||||
try:
|
||||
# Sanity check: If we have intentionally invalid settings, then we
|
||||
# know we cannot connect.
|
||||
if settings.MONGO_HOST == NotImplemented:
|
||||
raise ConnectionError
|
||||
|
||||
# Attempt to connect to the MongoDB database.
|
||||
db = connect(settings.MONGO_DB,
|
||||
host=settings.MONGO_HOST,
|
||||
port=int(settings.MONGO_PORT),
|
||||
username=settings.MONGO_USERNAME,
|
||||
password=settings.MONGO_PASSWORD,
|
||||
tz_aware=settings.USE_TZ)
|
||||
db[settings.MONGO_DB].command('ping')
|
||||
return True
|
||||
except (ConnectionError, AutoReconnect):
|
||||
return False
|
||||
|
||||
@ -11,12 +11,12 @@ class KeyTransform(SONManipulator):
|
||||
|
||||
def replace_key(self, key):
|
||||
for (replace, replacement) in self.replace:
|
||||
key = key.replace(replace, replacement)
|
||||
key = key.replace(replace, replacement)
|
||||
return key
|
||||
|
||||
def revert_key(self, key):
|
||||
for (replacement, replace) in self.replace:
|
||||
key = key.replace(replace, replacement)
|
||||
key = key.replace(replace, replacement)
|
||||
return key
|
||||
|
||||
def replace_incoming(self, obj):
|
||||
@ -49,7 +49,10 @@ class KeyTransform(SONManipulator):
|
||||
return self.replace_incoming(son)
|
||||
|
||||
def transform_outgoing(self, son, collection):
|
||||
if not collection or collection.name != 'fact':
|
||||
return son
|
||||
return self.replace_outgoing(son)
|
||||
|
||||
def register_key_transform(db):
|
||||
db.add_son_manipulator(KeyTransform([('.', '\uff0E'), ('$', '\uff04')]))
|
||||
#db.add_son_manipulator(KeyTransform([('.', '\uff0E'), ('$', '\uff04')]))
|
||||
pass
|
||||
|
||||
1519
awx/main/access.py
1519
awx/main/access.py
File diff suppressed because it is too large
Load Diff
50
awx/main/conf.py
Normal file
50
awx/main/conf.py
Normal file
@ -0,0 +1,50 @@
|
||||
# Copyright (c) 2015 Ansible, Inc..
|
||||
# All Rights Reserved.
|
||||
|
||||
import logging
|
||||
|
||||
from django.conf import settings as django_settings
|
||||
from django.db.utils import ProgrammingError
|
||||
from django.db import OperationalError
|
||||
from awx.main.models.configuration import TowerSettings
|
||||
|
||||
logger = logging.getLogger('awx.main.conf')
|
||||
|
||||
class TowerConfiguration(object):
|
||||
|
||||
# TODO: Caching so we don't have to hit the database every time for settings
|
||||
def __getattr__(self, key):
|
||||
settings_manifest = django_settings.TOWER_SETTINGS_MANIFEST
|
||||
if key not in settings_manifest:
|
||||
raise AttributeError("Tower Setting with key '{0}' is not defined in the manifest".format(key))
|
||||
default_value = settings_manifest[key]['default']
|
||||
ts = TowerSettings.objects.filter(key=key)
|
||||
try:
|
||||
if not ts.exists():
|
||||
try:
|
||||
val_actual = getattr(django_settings, key)
|
||||
except AttributeError:
|
||||
val_actual = default_value
|
||||
return val_actual
|
||||
return ts[0].value_converted
|
||||
except (ProgrammingError, OperationalError), e:
|
||||
# Database is not available yet, usually during migrations so lets use the default
|
||||
logger.debug("Database settings not available yet, using defaults ({0})".format(e))
|
||||
return default_value
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
settings_manifest = django_settings.TOWER_SETTINGS_MANIFEST
|
||||
if key not in settings_manifest:
|
||||
raise AttributeError("Tower Setting with key '{0}' does not exist".format(key))
|
||||
settings_entry = settings_manifest[key]
|
||||
try:
|
||||
settings_actual = TowerSettings.objects.get(key=key)
|
||||
except TowerSettings.DoesNotExist:
|
||||
settings_actual = TowerSettings(key=key,
|
||||
description=settings_entry['description'],
|
||||
category=settings_entry['category'],
|
||||
value_type=settings_entry['type'])
|
||||
settings_actual.value_converted = value
|
||||
settings_actual.save()
|
||||
|
||||
tower_settings = TowerConfiguration()
|
||||
@ -1,5 +1,5 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
CLOUD_PROVIDERS = ('azure', 'ec2', 'gce', 'rax', 'vmware', 'openstack')
|
||||
CLOUD_PROVIDERS = ('azure', 'azure_rm', 'ec2', 'gce', 'rax', 'vmware', 'openstack', 'satellite6', 'cloudforms')
|
||||
SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + ('custom',)
|
||||
|
||||
@ -1,14 +1,31 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import json
|
||||
|
||||
# Django
|
||||
from django.db.models.signals import (
|
||||
post_save,
|
||||
post_delete,
|
||||
)
|
||||
from django.db.models.signals import m2m_changed
|
||||
from django.db import models
|
||||
from django.db.models.fields.related import SingleRelatedObjectDescriptor
|
||||
from django.db.models.fields.related import (
|
||||
add_lazy_relation,
|
||||
SingleRelatedObjectDescriptor,
|
||||
ReverseSingleRelatedObjectDescriptor,
|
||||
ManyRelatedObjectsDescriptor,
|
||||
ReverseManyRelatedObjectsDescriptor,
|
||||
)
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
# South
|
||||
from south.modelsinspector import add_introspection_rules
|
||||
# AWX
|
||||
from awx.main.models.rbac import batch_role_ancestor_rebuilding, Role
|
||||
from awx.main.utils import get_current_apps
|
||||
|
||||
|
||||
__all__ = ['AutoOneToOneField', 'ImplicitRoleField']
|
||||
|
||||
__all__ = ['AutoOneToOneField']
|
||||
|
||||
# Based on AutoOneToOneField from django-annoying:
|
||||
# https://bitbucket.org/offline/django-annoying/src/a0de8b294db3/annoying/fields.py
|
||||
@ -20,8 +37,8 @@ class AutoSingleRelatedObjectDescriptor(SingleRelatedObjectDescriptor):
|
||||
try:
|
||||
return super(AutoSingleRelatedObjectDescriptor,
|
||||
self).__get__(instance, instance_type)
|
||||
except self.related.model.DoesNotExist:
|
||||
obj = self.related.model(**{self.related.field.name: instance})
|
||||
except self.related.related_model.DoesNotExist:
|
||||
obj = self.related.related_model(**{self.related.field.name: instance})
|
||||
if self.related.field.rel.parent_link:
|
||||
raise NotImplementedError('not supported with polymorphic!')
|
||||
for f in instance._meta.local_fields:
|
||||
@ -36,5 +53,200 @@ class AutoOneToOneField(models.OneToOneField):
|
||||
setattr(cls, related.get_accessor_name(),
|
||||
AutoSingleRelatedObjectDescriptor(related))
|
||||
|
||||
add_introspection_rules([([AutoOneToOneField], [], {})],
|
||||
[r'^awx\.main\.fields\.AutoOneToOneField'])
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def resolve_role_field(obj, field):
|
||||
ret = []
|
||||
|
||||
field_components = field.split('.', 1)
|
||||
if hasattr(obj, field_components[0]):
|
||||
obj = getattr(obj, field_components[0])
|
||||
else:
|
||||
return []
|
||||
|
||||
if obj is None:
|
||||
return []
|
||||
|
||||
if len(field_components) == 1:
|
||||
Role_ = get_current_apps().get_model('main', 'Role')
|
||||
if type(obj) is not Role_:
|
||||
raise Exception(smart_text('{} refers to a {}, not a Role'.format(field, type(obj))))
|
||||
ret.append(obj.id)
|
||||
else:
|
||||
if type(obj) is ManyRelatedObjectsDescriptor:
|
||||
for o in obj.all():
|
||||
ret += resolve_role_field(o, field_components[1])
|
||||
else:
|
||||
ret += resolve_role_field(obj, field_components[1])
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
class ImplicitRoleDescriptor(ReverseSingleRelatedObjectDescriptor):
|
||||
pass
|
||||
|
||||
|
||||
class ImplicitRoleField(models.ForeignKey):
|
||||
"""Implicitly creates a role entry for a resource"""
|
||||
|
||||
def __init__(self, parent_role=None, *args, **kwargs):
|
||||
self.parent_role = parent_role
|
||||
|
||||
kwargs.setdefault('to', 'Role')
|
||||
kwargs.setdefault('related_name', '+')
|
||||
kwargs.setdefault('null', 'True')
|
||||
super(ImplicitRoleField, self).__init__(*args, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(ImplicitRoleField, self).deconstruct()
|
||||
kwargs['parent_role'] = self.parent_role
|
||||
return name, path, args, kwargs
|
||||
|
||||
def contribute_to_class(self, cls, name):
|
||||
super(ImplicitRoleField, self).contribute_to_class(cls, name)
|
||||
setattr(cls, self.name, ImplicitRoleDescriptor(self))
|
||||
|
||||
if not hasattr(cls, '__implicit_role_fields'):
|
||||
setattr(cls, '__implicit_role_fields', [])
|
||||
getattr(cls, '__implicit_role_fields').append(self)
|
||||
|
||||
post_save.connect(self._post_save, cls, True, dispatch_uid='implicit-role-post-save')
|
||||
post_delete.connect(self._post_delete, cls, True, dispatch_uid='implicit-role-post-delete')
|
||||
add_lazy_relation(cls, self, "self", self.bind_m2m_changed)
|
||||
|
||||
def bind_m2m_changed(self, _self, _role_class, cls):
|
||||
if not self.parent_role:
|
||||
return
|
||||
|
||||
field_names = self.parent_role
|
||||
if type(field_names) is not list:
|
||||
field_names = [field_names]
|
||||
|
||||
for field_name in field_names:
|
||||
# Handle the OR syntax for role parents
|
||||
if type(field_name) == tuple:
|
||||
continue
|
||||
|
||||
if field_name.startswith('singleton:'):
|
||||
continue
|
||||
|
||||
field_name, sep, field_attr = field_name.partition('.')
|
||||
field = getattr(cls, field_name)
|
||||
|
||||
if type(field) is ReverseManyRelatedObjectsDescriptor or \
|
||||
type(field) is ManyRelatedObjectsDescriptor:
|
||||
|
||||
if '.' in field_attr:
|
||||
raise Exception('Referencing deep roles through ManyToMany fields is unsupported.')
|
||||
|
||||
if type(field) is ReverseManyRelatedObjectsDescriptor:
|
||||
sender = field.through
|
||||
else:
|
||||
sender = field.related.through
|
||||
|
||||
reverse = type(field) is ManyRelatedObjectsDescriptor
|
||||
m2m_changed.connect(self.m2m_update(field_attr, reverse), sender, weak=False)
|
||||
|
||||
def m2m_update(self, field_attr, _reverse):
|
||||
def _m2m_update(instance, action, model, pk_set, reverse, **kwargs):
|
||||
if action == 'post_add' or action == 'pre_remove':
|
||||
if _reverse:
|
||||
reverse = not reverse
|
||||
|
||||
if reverse:
|
||||
for pk in pk_set:
|
||||
obj = model.objects.get(pk=pk)
|
||||
if action == 'post_add':
|
||||
getattr(instance, field_attr).children.add(getattr(obj, self.name))
|
||||
if action == 'pre_remove':
|
||||
getattr(instance, field_attr).children.remove(getattr(obj, self.name))
|
||||
|
||||
else:
|
||||
for pk in pk_set:
|
||||
obj = model.objects.get(pk=pk)
|
||||
if action == 'post_add':
|
||||
getattr(instance, self.name).parents.add(getattr(obj, field_attr))
|
||||
if action == 'pre_remove':
|
||||
getattr(instance, self.name).parents.remove(getattr(obj, field_attr))
|
||||
return _m2m_update
|
||||
|
||||
|
||||
def _post_save(self, instance, created, *args, **kwargs):
|
||||
Role_ = get_current_apps().get_model('main', 'Role')
|
||||
ContentType_ = get_current_apps().get_model('contenttypes', 'ContentType')
|
||||
ct_id = ContentType_.objects.get_for_model(instance).id
|
||||
with batch_role_ancestor_rebuilding():
|
||||
# Create any missing role objects
|
||||
missing_roles = []
|
||||
for implicit_role_field in getattr(instance.__class__, '__implicit_role_fields'):
|
||||
cur_role = getattr(instance, implicit_role_field.name, None)
|
||||
if cur_role is None:
|
||||
missing_roles.append(
|
||||
Role_(
|
||||
role_field=implicit_role_field.name,
|
||||
content_type_id=ct_id,
|
||||
object_id=instance.id
|
||||
)
|
||||
)
|
||||
if len(missing_roles) > 0:
|
||||
Role_.objects.bulk_create(missing_roles)
|
||||
updates = {}
|
||||
role_ids = []
|
||||
for role in Role_.objects.filter(content_type_id=ct_id, object_id=instance.id):
|
||||
setattr(instance, role.role_field, role)
|
||||
updates[role.role_field] = role.id
|
||||
role_ids.append(role.id)
|
||||
type(instance).objects.filter(pk=instance.pk).update(**updates)
|
||||
Role.rebuild_role_ancestor_list(role_ids, [])
|
||||
|
||||
# Update parentage if necessary
|
||||
for implicit_role_field in getattr(instance.__class__, '__implicit_role_fields'):
|
||||
cur_role = getattr(instance, implicit_role_field.name)
|
||||
original_parents = set(json.loads(cur_role.implicit_parents))
|
||||
new_parents = implicit_role_field._resolve_parent_roles(instance)
|
||||
cur_role.parents.remove(*list(original_parents - new_parents))
|
||||
cur_role.parents.add(*list(new_parents - original_parents))
|
||||
new_parents_list = list(new_parents)
|
||||
new_parents_list.sort()
|
||||
new_parents_json = json.dumps(new_parents_list)
|
||||
if cur_role.implicit_parents != new_parents_json:
|
||||
cur_role.implicit_parents = new_parents_json
|
||||
cur_role.save()
|
||||
|
||||
|
||||
def _resolve_parent_roles(self, instance):
|
||||
if not self.parent_role:
|
||||
return set()
|
||||
|
||||
paths = self.parent_role if type(self.parent_role) is list else [self.parent_role]
|
||||
parent_roles = set()
|
||||
|
||||
for path in paths:
|
||||
if path.startswith("singleton:"):
|
||||
singleton_name = path[10:]
|
||||
Role_ = get_current_apps().get_model('main', 'Role')
|
||||
qs = Role_.objects.filter(singleton_name=singleton_name)
|
||||
if qs.count() >= 1:
|
||||
role = qs[0]
|
||||
else:
|
||||
role = Role_.objects.create(singleton_name=singleton_name, role_field=singleton_name)
|
||||
parents = [role.id]
|
||||
else:
|
||||
parents = resolve_role_field(instance, path)
|
||||
|
||||
for parent in parents:
|
||||
parent_roles.add(parent)
|
||||
return parent_roles
|
||||
|
||||
def _post_delete(self, instance, *args, **kwargs):
|
||||
role_ids = []
|
||||
for implicit_role_field in getattr(instance.__class__, '__implicit_role_fields'):
|
||||
role_ids.append(getattr(instance, implicit_role_field.name + '_id'))
|
||||
|
||||
Role_ = get_current_apps().get_model('main', 'Role')
|
||||
child_ids = [x for x in Role_.parents.through.objects.filter(to_role_id__in=role_ids).distinct().values_list('from_role_id', flat=True)]
|
||||
Role_.objects.filter(id__in=role_ids).delete()
|
||||
Role.rebuild_role_ancestor_list([], child_ids)
|
||||
|
||||
@ -1,131 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import datetime
|
||||
from optparse import make_option
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
from django.contrib.auth.models import User
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.timezone import now, is_aware, make_aware
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
|
||||
class Command(BaseCommand):
|
||||
'''
|
||||
Management command to age deleted items.
|
||||
'''
|
||||
|
||||
help = 'Age deleted items in the database.'
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--days', dest='days', type='int', default=90, metavar='N',
|
||||
help='Age deleted items N days (90 if not specified)'),
|
||||
make_option('--id', dest='id', type='int', default=None,
|
||||
help='Object primary key'),
|
||||
make_option('--type', dest='type', default=None,
|
||||
help='Model to limit aging to'),
|
||||
)
|
||||
|
||||
def get_models(self, model):
|
||||
if not model._meta.abstract:
|
||||
yield model
|
||||
for sub in model.__subclasses__():
|
||||
for submodel in self.get_models(sub):
|
||||
yield submodel
|
||||
|
||||
def cleanup_model(self, model, id=None):
|
||||
'''
|
||||
Presume the '_deleted_' string to be in the 'name' field unless considering the User model.
|
||||
When considering the User model, presume the '_d_' string to be in the 'username' field.
|
||||
'''
|
||||
name_field = 'name'
|
||||
name_prefix = '_deleted_'
|
||||
n_aged_items = 0
|
||||
if model is User:
|
||||
name_field = 'username'
|
||||
name_prefix = '_d_'
|
||||
active_field = None
|
||||
for field in model._meta.fields:
|
||||
if field.name in ('is_active', 'active'):
|
||||
active_field = field.name
|
||||
if not active_field:
|
||||
#print("Skipping model %s, no active field" % model)
|
||||
print("Returning %s" % n_aged_items)
|
||||
return n_aged_items
|
||||
|
||||
kv = {
|
||||
active_field: False,
|
||||
}
|
||||
if id:
|
||||
kv['pk'] = id
|
||||
else:
|
||||
kv['%s__startswith' % name_field] = name_prefix
|
||||
|
||||
qs = model.objects.filter(**kv)
|
||||
#print("Aging model %s" % model)
|
||||
for instance in qs:
|
||||
name = getattr(instance, name_field)
|
||||
name_pieces = name.split('_')
|
||||
if not name_pieces or len(name_pieces) < 3:
|
||||
print("Unexpected deleted model name format %s" % name)
|
||||
return n_aged_items
|
||||
|
||||
if len(name_pieces) <= 3:
|
||||
name_append = ''
|
||||
else:
|
||||
name_append = '_' + name_pieces[3]
|
||||
|
||||
dt = parse_datetime(name_pieces[2])
|
||||
if not is_aware(dt):
|
||||
dt = make_aware(dt, self.cutoff.tzinfo)
|
||||
if not dt:
|
||||
print('unable to find deleted timestamp in %s field' % name_field)
|
||||
else:
|
||||
aged_date = dt - datetime.timedelta(days=self.days)
|
||||
if model is User:
|
||||
aged_ts_append = aged_date.strftime('%Y-%m-%dT%H:%M:%S.%f')
|
||||
else:
|
||||
aged_ts_append = aged_date.isoformat() + name_append
|
||||
setattr(instance, name_field, name_prefix + aged_ts_append)
|
||||
instance.save()
|
||||
#print("Aged %s" % getattr(instance, name_field))
|
||||
n_aged_items += 1
|
||||
|
||||
return n_aged_items
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
self.days = int(options.get('days', 90))
|
||||
self.id = options.get('id', None)
|
||||
self.type = options.get('type', None)
|
||||
self.cutoff = now() - datetime.timedelta(days=self.days)
|
||||
|
||||
if self.id and not self.type:
|
||||
raise CommandError('Specifying id requires --type')
|
||||
|
||||
n_aged_items = 0
|
||||
if not self.type:
|
||||
n_aged_items += self.cleanup_model(User)
|
||||
for model in self.get_models(PrimordialModel):
|
||||
n_aged_items += self.cleanup_model(model)
|
||||
else:
|
||||
model_found = None
|
||||
if self.type == User.__name__:
|
||||
model_found = User
|
||||
else:
|
||||
for model in self.get_models(PrimordialModel):
|
||||
if model.__name__ == self.type:
|
||||
model_found = model
|
||||
break
|
||||
if not model_found:
|
||||
raise RuntimeError("Invalid type %s" % self.type)
|
||||
n_aged_items += self.cleanup_model(model_found, self.id)
|
||||
|
||||
print("Aged %d items" % n_aged_items)
|
||||
|
||||
@ -1,127 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import datetime
|
||||
import logging
|
||||
from optparse import make_option
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
from django.contrib.auth.models import User
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.timezone import now, is_aware, make_aware
|
||||
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
|
||||
class Command(BaseCommand):
|
||||
'''
|
||||
Management command to cleanup deleted items.
|
||||
'''
|
||||
|
||||
help = 'Cleanup deleted items from the database.'
|
||||
args = '[<appname>, <appname.ModelName>, ...]'
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--days', dest='days', type='int', default=90, metavar='N',
|
||||
help='Remove items deleted more than N days ago'),
|
||||
make_option('--dry-run', dest='dry_run', action='store_true',
|
||||
default=False, help='Dry run mode (show items that would '
|
||||
'be removed)'),
|
||||
)
|
||||
|
||||
def get_models(self, model):
|
||||
if not model._meta.abstract:
|
||||
yield model
|
||||
for sub in model.__subclasses__():
|
||||
for submodel in self.get_models(sub):
|
||||
yield submodel
|
||||
|
||||
def cleanup_model(self, model):
|
||||
|
||||
'''
|
||||
Presume the '_deleted_' string to be in the 'name' field unless considering the User model.
|
||||
When considering the User model, presume the '_d_' string to be in the 'username' field.
|
||||
'''
|
||||
name_field = 'name'
|
||||
name_prefix = '_deleted_'
|
||||
if model is User:
|
||||
name_field = 'username'
|
||||
name_prefix = '_d_'
|
||||
active_field = None
|
||||
n_deleted_items = 0
|
||||
for field in model._meta.fields:
|
||||
if field.name in ('is_active', 'active'):
|
||||
active_field = field.name
|
||||
if not active_field:
|
||||
self.logger.warning('skipping model %s, no active field', model)
|
||||
return n_deleted_items
|
||||
qs = model.objects.filter(**{
|
||||
active_field: False,
|
||||
'%s__startswith' % name_field: name_prefix,
|
||||
})
|
||||
self.logger.debug('cleaning up model %s', model)
|
||||
pks_to_delete = set()
|
||||
for instance in qs.iterator():
|
||||
dt = parse_datetime(getattr(instance, name_field).split('_')[2])
|
||||
if not is_aware(dt):
|
||||
dt = make_aware(dt, self.cutoff.tzinfo)
|
||||
if not dt:
|
||||
self.logger.warning('unable to find deleted timestamp in %s '
|
||||
'field', name_field)
|
||||
elif dt >= self.cutoff:
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s', action_text, instance)
|
||||
else:
|
||||
action_text = 'would delete' if self.dry_run else 'deleting'
|
||||
self.logger.info('%s %s', action_text, instance)
|
||||
n_deleted_items += 1
|
||||
if not self.dry_run:
|
||||
#pks_to_delete.add(instance.pk)
|
||||
instance.delete()
|
||||
|
||||
# Cleanup objects in batches instead of deleting each one individually.
|
||||
if len(pks_to_delete) >= 50:
|
||||
model.objects.filter(pk__in=pks_to_delete).delete()
|
||||
pks_to_delete.clear()
|
||||
if len(pks_to_delete):
|
||||
model.objects.filter(pk__in=pks_to_delete).delete()
|
||||
return n_deleted_items
|
||||
|
||||
def init_logging(self):
|
||||
log_levels = dict(enumerate([logging.ERROR, logging.INFO,
|
||||
logging.DEBUG, 0]))
|
||||
self.logger = logging.getLogger('awx.main.commands.cleanup_deleted')
|
||||
self.logger.setLevel(log_levels.get(self.verbosity, 0))
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(logging.Formatter('%(message)s'))
|
||||
self.logger.addHandler(handler)
|
||||
self.logger.propagate = False
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.init_logging()
|
||||
self.days = int(options.get('days', 90))
|
||||
self.dry_run = bool(options.get('dry_run', False))
|
||||
# FIXME: Handle args to select models.
|
||||
self.cutoff = now() - datetime.timedelta(days=self.days)
|
||||
|
||||
n_deleted_items = 0
|
||||
n_deleted_items += self.cleanup_model(User)
|
||||
for model in self.get_models(PrimordialModel):
|
||||
n_deleted_items += self.cleanup_model(model)
|
||||
|
||||
if not self.dry_run:
|
||||
self.logger.log(99, "Removed %d items", n_deleted_items)
|
||||
else:
|
||||
self.logger.log(99, "Would have removed %d items", n_deleted_items)
|
||||
|
||||
tokens_removed = AuthToken.objects.filter(expires__lt=now())
|
||||
if not self.dry_run:
|
||||
self.logger.log(99, "Removed %d expired auth tokens" % tokens_removed.count())
|
||||
tokens_removed.delete()
|
||||
else:
|
||||
self.logger.log(99, "Would have removed %d expired auth tokens" % tokens_removed.count())
|
||||
@ -12,7 +12,7 @@ from django.db import transaction
|
||||
from django.utils.timezone import now
|
||||
|
||||
# AWX
|
||||
from awx.fact.models.fact import * # noqa
|
||||
from awx.main.models.fact import Fact
|
||||
from awx.api.license import feature_enabled
|
||||
|
||||
OLDER_THAN = 'older_than'
|
||||
@ -31,7 +31,7 @@ class CleanupFacts(object):
|
||||
# pivot -= granularity
|
||||
# group by host
|
||||
def cleanup(self, older_than_abs, granularity, module=None):
|
||||
fact_oldest = FactVersion.objects.all().order_by('timestamp').first()
|
||||
fact_oldest = Fact.objects.all().order_by('timestamp').first()
|
||||
if not fact_oldest:
|
||||
return 0
|
||||
|
||||
@ -44,7 +44,10 @@ class CleanupFacts(object):
|
||||
# Special case, granularity=0x where x is d, w, or y
|
||||
# The intent is to delete all facts < older_than_abs
|
||||
if granularity == relativedelta():
|
||||
return FactVersion.objects.filter(**kv).order_by('-timestamp').delete()
|
||||
qs = Fact.objects.filter(**kv)
|
||||
count = qs.count()
|
||||
qs.delete()
|
||||
return count
|
||||
|
||||
total = 0
|
||||
|
||||
@ -61,18 +64,17 @@ class CleanupFacts(object):
|
||||
kv['module'] = module
|
||||
|
||||
|
||||
fact_version_objs = FactVersion.objects.filter(**kv).order_by('-timestamp').limit(1)
|
||||
if fact_version_objs:
|
||||
fact_version_obj = fact_version_objs[0]
|
||||
fact_version_obj = Fact.objects.filter(**kv).order_by('-timestamp').first()
|
||||
if fact_version_obj:
|
||||
kv = {
|
||||
'timestamp__lt': fact_version_obj.timestamp,
|
||||
'timestamp__gt': date_pivot_next
|
||||
}
|
||||
if module:
|
||||
kv['module'] = module
|
||||
count = FactVersion.objects.filter(**kv).delete()
|
||||
# FIXME: These two deletes should be a transaction
|
||||
count = Fact.objects.filter(**kv).delete()
|
||||
qs = Fact.objects.filter(**kv)
|
||||
count = qs.count()
|
||||
qs.delete()
|
||||
total += count
|
||||
|
||||
date_pivot = date_pivot_next
|
||||
|
||||
@ -1,27 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from crum import impersonate
|
||||
from awx.main.models import User, Organization
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Creates the default organization if and only if no organizations
|
||||
exist in the system.
|
||||
"""
|
||||
help = 'Creates a default organization iff there are none.'
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
# Sanity check: Is there already an organization in the system?
|
||||
if Organization.objects.count():
|
||||
return
|
||||
|
||||
# Create a default organization as the first superuser found.
|
||||
try:
|
||||
superuser = User.objects.filter(is_superuser=True, is_active=True).order_by('pk')[0]
|
||||
except IndexError:
|
||||
superuser = None
|
||||
with impersonate(superuser):
|
||||
Organization.objects.create(name='Default')
|
||||
print('Default organization added.')
|
||||
49
awx/main/management/commands/create_preload_data.py
Normal file
49
awx/main/management/commands/create_preload_data.py
Normal file
@ -0,0 +1,49 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from crum import impersonate
|
||||
from awx.main.models import User, Organization, Project, Inventory, Credential, Host, JobTemplate
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Create preloaded data, intended for new installs
|
||||
"""
|
||||
help = 'Creates a preload tower data iff there is none.'
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
# Sanity check: Is there already an organization in the system?
|
||||
if Organization.objects.count():
|
||||
return
|
||||
|
||||
# Create a default organization as the first superuser found.
|
||||
try:
|
||||
superuser = User.objects.filter(is_superuser=True).order_by('pk')[0]
|
||||
except IndexError:
|
||||
superuser = None
|
||||
with impersonate(superuser):
|
||||
o = Organization.objects.create(name='Default')
|
||||
p = Project(name='Demo Project',
|
||||
scm_type='git',
|
||||
scm_url='https://github.com/ansible/ansible-tower-samples',
|
||||
scm_update_on_launch=True,
|
||||
scm_update_cache_timeout=0,
|
||||
organization=o)
|
||||
p.save(skip_update=True)
|
||||
c = Credential.objects.create(name='Demo Credential',
|
||||
username=superuser.username,
|
||||
created_by=superuser)
|
||||
c.admin_role.members.add(superuser)
|
||||
i = Inventory.objects.create(name='Demo Inventory',
|
||||
organization=o,
|
||||
created_by=superuser)
|
||||
Host.objects.create(name='localhost',
|
||||
inventory=i,
|
||||
variables="ansible_connection: local",
|
||||
created_by=superuser)
|
||||
JobTemplate.objects.create(name='Demo Job Template',
|
||||
playbook='hello_world.yml',
|
||||
project=p,
|
||||
inventory=i,
|
||||
credential=c)
|
||||
print('Default organization added.')
|
||||
@ -28,6 +28,7 @@ from awx.main.models import * # noqa
|
||||
from awx.main.utils import ignore_inventory_computed_fields, check_proot_installed, wrap_args_with_proot
|
||||
from awx.main.signals import disable_activity_stream
|
||||
from awx.main.task_engine import TaskSerializer as LicenseReader
|
||||
from awx.main.conf import tower_settings
|
||||
|
||||
logger = logging.getLogger('awx.main.commands.inventory_import')
|
||||
|
||||
@ -52,13 +53,13 @@ class MemObject(object):
|
||||
'''
|
||||
Common code shared between in-memory groups and hosts.
|
||||
'''
|
||||
|
||||
|
||||
def __init__(self, name, source_dir):
|
||||
assert name, 'no name'
|
||||
assert source_dir, 'no source dir'
|
||||
self.name = name
|
||||
self.source_dir = source_dir
|
||||
|
||||
|
||||
def load_vars(self, base_path):
|
||||
all_vars = {}
|
||||
files_found = 0
|
||||
@ -77,7 +78,7 @@ class MemObject(object):
|
||||
v = yaml.safe_load(file(path, 'r').read())
|
||||
if hasattr(v, 'items'): # is a dict
|
||||
all_vars.update(v)
|
||||
except yaml.YAMLError, e:
|
||||
except yaml.YAMLError as e:
|
||||
if hasattr(e, 'problem_mark'):
|
||||
logger.error('Invalid YAML in %s:%s col %s', path,
|
||||
e.problem_mark.line + 1,
|
||||
@ -106,7 +107,7 @@ class MemGroup(MemObject):
|
||||
group_vars = os.path.join(source_dir, 'group_vars', self.name)
|
||||
self.variables = self.load_vars(group_vars)
|
||||
logger.debug('Loaded group: %s', self.name)
|
||||
|
||||
|
||||
def child_group_by_name(self, name, loader):
|
||||
if name == 'all':
|
||||
return
|
||||
@ -265,7 +266,7 @@ class BaseLoader(object):
|
||||
logger.debug('Filtering group %s', name)
|
||||
return None
|
||||
if name not in self.all_group.all_groups:
|
||||
group = MemGroup(name, self.source_dir)
|
||||
group = MemGroup(name, self.source_dir)
|
||||
if not child:
|
||||
all_group.add_child_group(group)
|
||||
self.all_group.all_groups[name] = group
|
||||
@ -314,7 +315,7 @@ class IniLoader(BaseLoader):
|
||||
for t in tokens[1:]:
|
||||
k,v = t.split('=', 1)
|
||||
host.variables[k] = v
|
||||
group.add_host(host)
|
||||
group.add_host(host)
|
||||
elif input_mode == 'children':
|
||||
group.child_group_by_name(line, self)
|
||||
elif input_mode == 'vars':
|
||||
@ -327,7 +328,7 @@ class IniLoader(BaseLoader):
|
||||
# from API documentation:
|
||||
#
|
||||
# if called with --list, inventory outputs like so:
|
||||
#
|
||||
#
|
||||
# {
|
||||
# "databases" : {
|
||||
# "hosts" : [ "host1.example.com", "host2.example.com" ],
|
||||
@ -356,12 +357,24 @@ class ExecutableJsonLoader(BaseLoader):
|
||||
data = {}
|
||||
stdout, stderr = '', ''
|
||||
try:
|
||||
if self.is_custom and getattr(settings, 'AWX_PROOT_ENABLED', False):
|
||||
if self.is_custom and getattr(tower_settings, 'AWX_PROOT_ENABLED', False):
|
||||
if not check_proot_installed():
|
||||
raise RuntimeError("proot is not installed but is configured for use")
|
||||
kwargs = {'proot_temp_dir': self.source_dir} # TODO: Remove proot dir
|
||||
cmd = wrap_args_with_proot(cmd, self.source_dir, **kwargs)
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
# Use ansible venv if it's available and setup to use
|
||||
env = dict(os.environ.items())
|
||||
if settings.ANSIBLE_USE_VENV:
|
||||
env['VIRTUAL_ENV'] = settings.ANSIBLE_VENV_PATH
|
||||
env['PATH'] = os.path.join(settings.ANSIBLE_VENV_PATH, "bin") + ":" + env['PATH']
|
||||
venv_libdir = os.path.join(settings.ANSIBLE_VENV_PATH, "lib")
|
||||
env.pop('PYTHONPATH', None) # default to none if no python_ver matches
|
||||
for python_ver in ["python2.7", "python2.6"]:
|
||||
if os.path.isdir(os.path.join(venv_libdir, python_ver)):
|
||||
env['PYTHONPATH'] = os.path.join(venv_libdir, python_ver, "site-packages") + ":"
|
||||
break
|
||||
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
|
||||
stdout, stderr = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError('%r failed (rc=%d) with output: %s' % (cmd, proc.returncode, stderr))
|
||||
@ -465,11 +478,9 @@ def load_inventory_source(source, all_group=None, group_filter_re=None,
|
||||
'''
|
||||
Load inventory from given source directory or file.
|
||||
'''
|
||||
# Sanity check: We need the "azure" module to be titled "windows_azure.py",
|
||||
# because it depends on the "azure" package from PyPI, and naming the
|
||||
# module the same way makes the importer sad.
|
||||
source = source.replace('azure', 'windows_azure')
|
||||
|
||||
# Sanity check: We sanitize these module names for our API but Ansible proper doesn't follow
|
||||
# good naming conventions
|
||||
source = source.replace('azure.py', 'windows_azure.py')
|
||||
logger.debug('Analyzing type of source: %s', source)
|
||||
original_all_group = all_group
|
||||
if not os.path.exists(source):
|
||||
@ -580,7 +591,7 @@ class Command(NoArgsCommand):
|
||||
def _get_instance_id(self, from_dict, default=''):
|
||||
'''
|
||||
Retrieve the instance ID from the given dict of host variables.
|
||||
|
||||
|
||||
The instance ID variable may be specified as 'foo.bar', in which case
|
||||
the lookup will traverse into nested dicts, equivalent to:
|
||||
|
||||
@ -632,7 +643,7 @@ class Command(NoArgsCommand):
|
||||
else:
|
||||
q = dict(name=self.inventory_name)
|
||||
try:
|
||||
self.inventory = Inventory.objects.filter(active=True).get(**q)
|
||||
self.inventory = Inventory.objects.get(**q)
|
||||
except Inventory.DoesNotExist:
|
||||
raise CommandError('Inventory with %s = %s cannot be found' % q.items()[0])
|
||||
except Inventory.MultipleObjectsReturned:
|
||||
@ -647,8 +658,7 @@ class Command(NoArgsCommand):
|
||||
if inventory_source_id:
|
||||
try:
|
||||
self.inventory_source = InventorySource.objects.get(pk=inventory_source_id,
|
||||
inventory=self.inventory,
|
||||
active=True)
|
||||
inventory=self.inventory)
|
||||
except InventorySource.DoesNotExist:
|
||||
raise CommandError('Inventory source with id=%s not found' %
|
||||
inventory_source_id)
|
||||
@ -668,7 +678,6 @@ class Command(NoArgsCommand):
|
||||
source_path=os.path.abspath(self.source),
|
||||
overwrite=self.overwrite,
|
||||
overwrite_vars=self.overwrite_vars,
|
||||
active=True,
|
||||
)
|
||||
self.inventory_update = self.inventory_source.create_inventory_update(
|
||||
job_args=json.dumps(sys.argv),
|
||||
@ -702,7 +711,7 @@ class Command(NoArgsCommand):
|
||||
host_qs = self.inventory_source.group.all_hosts
|
||||
else:
|
||||
host_qs = self.inventory.hosts.all()
|
||||
host_qs = host_qs.filter(active=True, instance_id='',
|
||||
host_qs = host_qs.filter(instance_id='',
|
||||
variables__contains=self.instance_id_var.split('.')[0])
|
||||
for host in host_qs:
|
||||
instance_id = self._get_instance_id(host.variables_dict)
|
||||
@ -739,7 +748,7 @@ class Command(NoArgsCommand):
|
||||
hosts_qs = self.inventory_source.group.all_hosts
|
||||
# FIXME: Also include hosts from inventory_source.managed_hosts?
|
||||
else:
|
||||
hosts_qs = self.inventory.hosts.filter(active=True)
|
||||
hosts_qs = self.inventory.hosts
|
||||
# Build list of all host pks, remove all that should not be deleted.
|
||||
del_host_pks = set(hosts_qs.values_list('pk', flat=True))
|
||||
if self.instance_id_var:
|
||||
@ -764,7 +773,7 @@ class Command(NoArgsCommand):
|
||||
del_pks = all_del_pks[offset:(offset + self._batch_size)]
|
||||
for host in hosts_qs.filter(pk__in=del_pks):
|
||||
host_name = host.name
|
||||
host.mark_inactive()
|
||||
host.delete()
|
||||
self.logger.info('Deleted host "%s"', host_name)
|
||||
if settings.SQL_DEBUG:
|
||||
self.logger.warning('host deletions took %d queries for %d hosts',
|
||||
@ -781,10 +790,10 @@ class Command(NoArgsCommand):
|
||||
if settings.SQL_DEBUG:
|
||||
queries_before = len(connection.queries)
|
||||
if self.inventory_source.group:
|
||||
groups_qs = self.inventory_source.group.all_children
|
||||
groups_qs = self.inventory_source.group.all_children.all()
|
||||
# FIXME: Also include groups from inventory_source.managed_groups?
|
||||
else:
|
||||
groups_qs = self.inventory.groups.filter(active=True)
|
||||
groups_qs = self.inventory.groups.all()
|
||||
# Build list of all group pks, remove those that should not be deleted.
|
||||
del_group_pks = set(groups_qs.values_list('pk', flat=True))
|
||||
all_group_names = self.all_group.all_groups.keys()
|
||||
@ -798,7 +807,8 @@ class Command(NoArgsCommand):
|
||||
del_pks = all_del_pks[offset:(offset + self._batch_size)]
|
||||
for group in groups_qs.filter(pk__in=del_pks):
|
||||
group_name = group.name
|
||||
group.mark_inactive(recompute=False)
|
||||
with ignore_inventory_computed_fields():
|
||||
group.delete()
|
||||
self.logger.info('Group "%s" deleted', group_name)
|
||||
if settings.SQL_DEBUG:
|
||||
self.logger.warning('group deletions took %d queries for %d groups',
|
||||
@ -820,10 +830,10 @@ class Command(NoArgsCommand):
|
||||
if self.inventory_source.group:
|
||||
db_groups = self.inventory_source.group.all_children
|
||||
else:
|
||||
db_groups = self.inventory.groups.filter(active=True)
|
||||
for db_group in db_groups:
|
||||
db_groups = self.inventory.groups
|
||||
for db_group in db_groups.all():
|
||||
# Delete child group relationships not present in imported data.
|
||||
db_children = db_group.children.filter(active=True)
|
||||
db_children = db_group.children
|
||||
db_children_name_pk_map = dict(db_children.values_list('name', 'pk'))
|
||||
mem_children = self.all_group.all_groups[db_group.name].children
|
||||
for mem_group in mem_children:
|
||||
@ -838,7 +848,7 @@ class Command(NoArgsCommand):
|
||||
db_child.name, db_group.name)
|
||||
# FIXME: Inventory source group relationships
|
||||
# Delete group/host relationships not present in imported data.
|
||||
db_hosts = db_group.hosts.filter(active=True)
|
||||
db_hosts = db_group.hosts
|
||||
del_host_pks = set(db_hosts.values_list('pk', flat=True))
|
||||
mem_hosts = self.all_group.all_groups[db_group.name].hosts
|
||||
all_mem_host_names = [h.name for h in mem_hosts if not h.instance_id]
|
||||
@ -859,7 +869,7 @@ class Command(NoArgsCommand):
|
||||
del_pks = del_host_pks[offset:(offset + self._batch_size)]
|
||||
for db_host in db_hosts.filter(pk__in=del_pks):
|
||||
group_host_count += 1
|
||||
if db_host not in db_group.hosts.filter(active=True):
|
||||
if db_host not in db_group.hosts.all():
|
||||
continue
|
||||
db_group.hosts.remove(db_host)
|
||||
self.logger.info('Host "%s" removed from group "%s"',
|
||||
@ -1035,7 +1045,7 @@ class Command(NoArgsCommand):
|
||||
all_host_pks = sorted(mem_host_pk_map.keys())
|
||||
for offset in xrange(0, len(all_host_pks), self._batch_size):
|
||||
host_pks = all_host_pks[offset:(offset + self._batch_size)]
|
||||
for db_host in self.inventory.hosts.filter(active=True, pk__in=host_pks):
|
||||
for db_host in self.inventory.hosts.filter( pk__in=host_pks):
|
||||
if db_host.pk in host_pks_updated:
|
||||
continue
|
||||
mem_host = mem_host_pk_map[db_host.pk]
|
||||
@ -1047,7 +1057,7 @@ class Command(NoArgsCommand):
|
||||
all_instance_ids = sorted(mem_host_instance_id_map.keys())
|
||||
for offset in xrange(0, len(all_instance_ids), self._batch_size):
|
||||
instance_ids = all_instance_ids[offset:(offset + self._batch_size)]
|
||||
for db_host in self.inventory.hosts.filter(active=True, instance_id__in=instance_ids):
|
||||
for db_host in self.inventory.hosts.filter( instance_id__in=instance_ids):
|
||||
if db_host.pk in host_pks_updated:
|
||||
continue
|
||||
mem_host = mem_host_instance_id_map[db_host.instance_id]
|
||||
@ -1059,7 +1069,7 @@ class Command(NoArgsCommand):
|
||||
all_host_names = sorted(mem_host_name_map.keys())
|
||||
for offset in xrange(0, len(all_host_names), self._batch_size):
|
||||
host_names = all_host_names[offset:(offset + self._batch_size)]
|
||||
for db_host in self.inventory.hosts.filter(active=True, name__in=host_names):
|
||||
for db_host in self.inventory.hosts.filter( name__in=host_names):
|
||||
if db_host.pk in host_pks_updated:
|
||||
continue
|
||||
mem_host = mem_host_name_map[db_host.name]
|
||||
@ -1109,13 +1119,14 @@ class Command(NoArgsCommand):
|
||||
for db_group in self.inventory.groups.filter(name__in=group_names):
|
||||
mem_group = self.all_group.all_groups[db_group.name]
|
||||
group_group_count += len(mem_group.children)
|
||||
child_names = set([g.name for g in mem_group.children])
|
||||
db_children_qs = self.inventory.groups.filter(name__in=child_names)
|
||||
# FIXME: May fail unit tests when len(child_names) > 1000.
|
||||
for db_child in db_children_qs.filter(children__id=db_group.id):
|
||||
self.logger.info('Group "%s" already child of group "%s"', db_child.name, db_group.name)
|
||||
for db_child in db_children_qs.exclude(children__id=db_group.id):
|
||||
self._batch_add_m2m(db_group.children, db_child)
|
||||
all_child_names = sorted([g.name for g in mem_group.children])
|
||||
for offset2 in xrange(0, len(all_child_names), self._batch_size):
|
||||
child_names = all_child_names[offset2:(offset2 + self._batch_size)]
|
||||
db_children_qs = self.inventory.groups.filter(name__in=child_names)
|
||||
for db_child in db_children_qs.filter(children__id=db_group.id):
|
||||
self.logger.info('Group "%s" already child of group "%s"', db_child.name, db_group.name)
|
||||
for db_child in db_children_qs.exclude(children__id=db_group.id):
|
||||
self._batch_add_m2m(db_group.children, db_child)
|
||||
self.logger.info('Group "%s" added as child of "%s"', db_child.name, db_group.name)
|
||||
self._batch_add_m2m(db_group.children, flush=True)
|
||||
if settings.SQL_DEBUG:
|
||||
@ -1179,7 +1190,7 @@ class Command(NoArgsCommand):
|
||||
|
||||
def check_license(self):
|
||||
reader = LicenseReader()
|
||||
license_info = reader.from_file()
|
||||
license_info = reader.from_database()
|
||||
if not license_info or len(license_info) == 0:
|
||||
self.logger.error(LICENSE_NON_EXISTANT_MESSAGE)
|
||||
raise CommandError('No Tower license found!')
|
||||
@ -1272,42 +1283,43 @@ class Command(NoArgsCommand):
|
||||
self.is_custom)
|
||||
self.all_group.debug_tree()
|
||||
|
||||
# Ensure that this is managed as an atomic SQL transaction,
|
||||
# and thus properly rolled back if there is an issue.
|
||||
with transaction.atomic():
|
||||
# Merge/overwrite inventory into database.
|
||||
if settings.SQL_DEBUG:
|
||||
self.logger.warning('loading into database...')
|
||||
with ignore_inventory_computed_fields():
|
||||
if getattr(settings, 'ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC', True):
|
||||
self.load_into_database()
|
||||
else:
|
||||
with disable_activity_stream():
|
||||
with batch_role_ancestor_rebuilding():
|
||||
# Ensure that this is managed as an atomic SQL transaction,
|
||||
# and thus properly rolled back if there is an issue.
|
||||
with transaction.atomic():
|
||||
# Merge/overwrite inventory into database.
|
||||
if settings.SQL_DEBUG:
|
||||
self.logger.warning('loading into database...')
|
||||
with ignore_inventory_computed_fields():
|
||||
if getattr(settings, 'ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC', True):
|
||||
self.load_into_database()
|
||||
else:
|
||||
with disable_activity_stream():
|
||||
self.load_into_database()
|
||||
if settings.SQL_DEBUG:
|
||||
queries_before2 = len(connection.queries)
|
||||
self.inventory.update_computed_fields()
|
||||
if settings.SQL_DEBUG:
|
||||
self.logger.warning('update computed fields took %d queries',
|
||||
len(connection.queries) - queries_before2)
|
||||
try:
|
||||
self.check_license()
|
||||
except CommandError as e:
|
||||
self.mark_license_failure(save=True)
|
||||
raise e
|
||||
|
||||
if self.inventory_source.group:
|
||||
inv_name = 'group "%s"' % (self.inventory_source.group.name)
|
||||
else:
|
||||
inv_name = '"%s" (id=%s)' % (self.inventory.name,
|
||||
self.inventory.id)
|
||||
if settings.SQL_DEBUG:
|
||||
queries_before2 = len(connection.queries)
|
||||
self.inventory.update_computed_fields()
|
||||
if settings.SQL_DEBUG:
|
||||
self.logger.warning('update computed fields took %d queries',
|
||||
len(connection.queries) - queries_before2)
|
||||
try:
|
||||
self.check_license()
|
||||
except CommandError as e:
|
||||
self.mark_license_failure(save=True)
|
||||
raise e
|
||||
|
||||
if self.inventory_source.group:
|
||||
inv_name = 'group "%s"' % (self.inventory_source.group.name)
|
||||
else:
|
||||
inv_name = '"%s" (id=%s)' % (self.inventory.name,
|
||||
self.inventory.id)
|
||||
if settings.SQL_DEBUG:
|
||||
self.logger.warning('Inventory import completed for %s in %0.1fs',
|
||||
inv_name, time.time() - begin)
|
||||
else:
|
||||
self.logger.info('Inventory import completed for %s in %0.1fs',
|
||||
inv_name, time.time() - begin)
|
||||
status = 'successful'
|
||||
self.logger.warning('Inventory import completed for %s in %0.1fs',
|
||||
inv_name, time.time() - begin)
|
||||
else:
|
||||
self.logger.info('Inventory import completed for %s in %0.1fs',
|
||||
inv_name, time.time() - begin)
|
||||
status = 'successful'
|
||||
|
||||
# If we're in debug mode, then log the queries and time
|
||||
# used to do the operation.
|
||||
@ -1317,7 +1329,7 @@ class Command(NoArgsCommand):
|
||||
self.logger.warning('Inventory import required %d queries '
|
||||
'taking %0.3fs', len(queries_this_import),
|
||||
sqltime)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
if isinstance(e, KeyboardInterrupt):
|
||||
status = 'canceled'
|
||||
exc = e
|
||||
@ -1334,7 +1346,7 @@ class Command(NoArgsCommand):
|
||||
self.inventory_update.result_traceback = tb
|
||||
self.inventory_update.status = status
|
||||
self.inventory_update.save(update_fields=['status', 'result_traceback'])
|
||||
|
||||
|
||||
if exc and isinstance(exc, CommandError):
|
||||
sys.exit(1)
|
||||
elif exc:
|
||||
|
||||
@ -16,7 +16,7 @@ from django.conf import settings
|
||||
from django.core.management.base import NoArgsCommand
|
||||
from django.db import transaction, DatabaseError
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.tzinfo import FixedOffset
|
||||
from django.utils.timezone import FixedOffset
|
||||
from django.db import connection
|
||||
|
||||
# AWX
|
||||
@ -173,7 +173,7 @@ class CallbackReceiver(object):
|
||||
# If for any reason there's a problem, just use 0.
|
||||
try:
|
||||
verbose = Job.objects.get(id=data['job_id']).verbosity
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
verbose = 0
|
||||
|
||||
# Convert the datetime for the job event's creation appropriately,
|
||||
@ -191,7 +191,7 @@ class CallbackReceiver(object):
|
||||
|
||||
# Print the data to stdout if we're in DEBUG mode.
|
||||
if settings.DEBUG:
|
||||
print data
|
||||
print(data)
|
||||
|
||||
# Sanity check: Don't honor keys that we don't recognize.
|
||||
for key in data.keys():
|
||||
@ -234,7 +234,7 @@ class CallbackReceiver(object):
|
||||
# If for any reason there's a problem, just use 0.
|
||||
try:
|
||||
verbose = AdHocCommand.objects.get(id=data['ad_hoc_command_id']).verbosity
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
verbose = 0
|
||||
|
||||
# Convert the datetime for the job event's creation appropriately,
|
||||
@ -252,7 +252,7 @@ class CallbackReceiver(object):
|
||||
|
||||
# Print the data to stdout if we're in DEBUG mode.
|
||||
if settings.DEBUG:
|
||||
print data
|
||||
print(data)
|
||||
|
||||
# Sanity check: Don't honor keys that we don't recognize.
|
||||
for key in data.keys():
|
||||
@ -288,7 +288,7 @@ class CallbackReceiver(object):
|
||||
message = queue_actual.get(block=True, timeout=1)
|
||||
except QueueEmpty:
|
||||
continue
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.error("Exception on listen socket, restarting: " + str(e))
|
||||
break
|
||||
self.process_job_event(message)
|
||||
|
||||
@ -9,9 +9,11 @@ from datetime import datetime
|
||||
# Django
|
||||
from django.core.management.base import NoArgsCommand
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
# AWX
|
||||
from awx.fact.models.fact import * # noqa
|
||||
from awx.main.models.fact import Fact
|
||||
from awx.main.models.inventory import Host
|
||||
from awx.main.socket import Socket
|
||||
|
||||
logger = logging.getLogger('awx.main.commands.run_fact_cache_receiver')
|
||||
@ -47,35 +49,34 @@ class FactCacheReceiver(object):
|
||||
# ansible v2 will not emit this message. Thus, this can be removed at that time.
|
||||
if 'module_setup' in facts_data and len(facts_data) == 1:
|
||||
logger.info('Received module_setup message')
|
||||
return
|
||||
return None
|
||||
|
||||
try:
|
||||
host = FactHost.objects.get(hostname=hostname, inventory_id=inventory_id)
|
||||
except FactHost.DoesNotExist:
|
||||
logger.info('Creating new host <hostname, inventory_id> <%s, %s>' % (hostname, inventory_id))
|
||||
host = FactHost(hostname=hostname, inventory_id=inventory_id)
|
||||
host.save()
|
||||
logger.info('Created new host <%s>' % (host.id))
|
||||
except FactHost.MultipleObjectsReturned:
|
||||
query = "db['fact_host'].find(hostname=%s, inventory_id=%s)" % (hostname, inventory_id)
|
||||
logger.warn('Database inconsistent. Multiple FactHost "%s" exist. Try the query %s to find the records.' % (hostname, query))
|
||||
host_obj = Host.objects.get(name=hostname, inventory__id=inventory_id)
|
||||
except Fact.DoesNotExist:
|
||||
logger.warn('Failed to intake fact. Host does not exist <hostname, inventory_id> <%s, %s>' % (hostname, inventory_id))
|
||||
return
|
||||
except Exception, e:
|
||||
except Fact.MultipleObjectsReturned:
|
||||
logger.warn('Database inconsistent. Multiple Hosts found for <hostname, inventory_id> <%s, %s>.' % (hostname, inventory_id))
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error("Exception communicating with Fact Cache Database: %s" % str(e))
|
||||
return
|
||||
return None
|
||||
|
||||
(module, facts) = self.process_facts(facts_data)
|
||||
self.timestamp = datetime.fromtimestamp(date_key, None)
|
||||
(module_name, facts) = self.process_facts(facts_data)
|
||||
self.timestamp = datetime.fromtimestamp(date_key, timezone.utc)
|
||||
|
||||
# Update existing Fact entry
|
||||
try:
|
||||
# Update existing Fact entry
|
||||
version_obj = FactVersion.objects.get(timestamp=self.timestamp, host=host, module=module)
|
||||
Fact.objects(id=version_obj.fact.id).update_one(fact=facts)
|
||||
logger.info('Updated existing fact <%s>' % (version_obj.fact.id))
|
||||
except FactVersion.DoesNotExist:
|
||||
fact_obj = Fact.objects.get(host__id=host_obj.id, module=module_name, timestamp=self.timestamp)
|
||||
fact_obj.facts = facts
|
||||
fact_obj.save()
|
||||
logger.info('Updated existing fact <%s>' % (fact_obj.id))
|
||||
except Fact.DoesNotExist:
|
||||
# Create new Fact entry
|
||||
(fact_obj, version_obj) = Fact.add_fact(self.timestamp, facts, host, module)
|
||||
logger.info('Created new fact <fact, fact_version> <%s, %s>' % (fact_obj.id, version_obj.id))
|
||||
fact_obj = Fact.add_fact(host_obj.id, module_name, self.timestamp, facts)
|
||||
logger.info('Created new fact <fact_id, module> <%s, %s>' % (fact_obj.id, module_name))
|
||||
return fact_obj
|
||||
|
||||
def run_receiver(self, use_processing_threads=True):
|
||||
with Socket('fact_cache', 'r') as facts:
|
||||
|
||||
@ -96,7 +96,7 @@ class SocketController(object):
|
||||
if socket_session and socket_session.is_valid():
|
||||
try:
|
||||
socket.send_packet(packet)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.error("Error sending client packet to %s: %s" % (str(session_id), str(packet)))
|
||||
logger.error("Error was: " + str(e))
|
||||
|
||||
@ -116,7 +116,7 @@ class SocketController(object):
|
||||
if socket:
|
||||
try:
|
||||
socket.send_packet(packet)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.error("Error sending client packet to %s: %s" % (str(socket_session.session_id), str(packet)))
|
||||
logger.error("Error was: " + str(e))
|
||||
|
||||
@ -129,18 +129,18 @@ socketController = SocketController(SocketSessionManager())
|
||||
#
|
||||
# Socket session is attached to self.session['socket_session']
|
||||
# self.session and self.socket.session point to the same dict
|
||||
#
|
||||
#
|
||||
class TowerBaseNamespace(BaseNamespace):
|
||||
|
||||
def get_allowed_methods(self):
|
||||
return ['recv_disconnect']
|
||||
|
||||
|
||||
def get_initial_acl(self):
|
||||
request_token = self._get_request_token()
|
||||
if request_token:
|
||||
# (1) This is the first time the socket has been seen (first
|
||||
# (1) This is the first time the socket has been seen (first
|
||||
# namespace joined).
|
||||
# (2) This socket has already been seen (already joined and maybe
|
||||
# (2) This socket has already been seen (already joined and maybe
|
||||
# left a namespace)
|
||||
#
|
||||
# Note: Assume that the user token is valid if the session is found
|
||||
@ -168,7 +168,7 @@ class TowerBaseNamespace(BaseNamespace):
|
||||
if k == "Token":
|
||||
token_actual = urllib.unquote_plus(v).decode().replace("\"","")
|
||||
return token_actual
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.error("Exception validating user: " + str(e))
|
||||
return False
|
||||
return False
|
||||
|
||||
@ -15,7 +15,7 @@ from django.core.management.base import NoArgsCommand
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.queue import FifoQueue
|
||||
from awx.main.tasks import handle_work_error
|
||||
from awx.main.tasks import handle_work_error, handle_work_success
|
||||
from awx.main.utils import get_system_task_capacity
|
||||
|
||||
# Celery
|
||||
@ -108,6 +108,8 @@ class SimpleDAG(object):
|
||||
return "inventory_update"
|
||||
elif type(obj) == ProjectUpdate:
|
||||
return "project_update"
|
||||
elif type(obj) == SystemJob:
|
||||
return "system_job"
|
||||
return "unknown"
|
||||
|
||||
def get_dependencies(self, obj):
|
||||
@ -205,7 +207,15 @@ def rebuild_graph(message):
|
||||
# Create and process dependencies for new tasks
|
||||
for task in new_tasks:
|
||||
logger.debug("Checking dependencies for: %s" % str(task))
|
||||
task_dependencies = task.generate_dependencies(running_tasks + waiting_tasks) # TODO: other 'new' tasks? Need to investigate this scenario
|
||||
try:
|
||||
task_dependencies = task.generate_dependencies(running_tasks + waiting_tasks)
|
||||
except Exception, e:
|
||||
logger.error("Failed processing dependencies for {}: {}".format(task, e))
|
||||
task.status = 'failed'
|
||||
task.job_explanation += 'Task failed to generate dependencies: {}'.format(e)
|
||||
task.save()
|
||||
task.socketio_emit_status("failed")
|
||||
continue
|
||||
logger.debug("New dependencies: %s" % str(task_dependencies))
|
||||
for dep in task_dependencies:
|
||||
# We recalculate the created time for the moment to ensure the
|
||||
@ -265,14 +275,15 @@ def process_graph(graph, task_capacity):
|
||||
[{'type': graph.get_node_type(n['node_object']),
|
||||
'id': n['node_object'].id} for n in node_dependencies]
|
||||
error_handler = handle_work_error.s(subtasks=dependent_nodes)
|
||||
start_status = node_obj.start(error_callback=error_handler)
|
||||
success_handler = handle_work_success.s(task_actual={'type': graph.get_node_type(node_obj),
|
||||
'id': node_obj.id})
|
||||
start_status = node_obj.start(error_callback=error_handler, success_callback=success_handler)
|
||||
if not start_status:
|
||||
node_obj.status = 'failed'
|
||||
if node_obj.job_explanation:
|
||||
node_obj.job_explanation += ' '
|
||||
node_obj.job_explanation += 'Task failed pre-start check.'
|
||||
node_obj.save()
|
||||
# TODO: Run error handler
|
||||
continue
|
||||
remaining_volume -= impact
|
||||
running_impact += impact
|
||||
|
||||
45
awx/main/management/commands/update_password.py
Normal file
45
awx/main/management/commands/update_password.py
Normal file
@ -0,0 +1,45 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Python
|
||||
from optparse import make_option
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
class UpdatePassword(object):
|
||||
def update_password(self, username, password):
|
||||
changed = False
|
||||
u = User.objects.get(username=username)
|
||||
if not u:
|
||||
raise RuntimeError("User not found")
|
||||
check = u.check_password(password)
|
||||
if not check:
|
||||
u.set_password(password)
|
||||
u.save()
|
||||
changed = True
|
||||
return changed
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--username', dest='username', action='store', type='string', default=None,
|
||||
help='username to change the password for'),
|
||||
make_option('--password', dest='password', action='store', type='string', default=None,
|
||||
help='new password for user'),
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if not options['username']:
|
||||
raise CommandError('username required')
|
||||
if not options['password']:
|
||||
raise CommandError('password required')
|
||||
|
||||
cp = UpdatePassword()
|
||||
res = cp.update_password(options['username'], options['password'])
|
||||
if res:
|
||||
return "Password updated"
|
||||
return "Password not updated"
|
||||
|
||||
|
||||
@ -13,9 +13,9 @@ class HostManager(models.Manager):
|
||||
def active_count(self):
|
||||
"""Return count of active, unique hosts for licensing."""
|
||||
try:
|
||||
return self.filter(active=True, inventory__active=True).distinct('name').count()
|
||||
return self.order_by('name').distinct('name').count()
|
||||
except NotImplementedError: # For unit tests only, SQLite doesn't support distinct('name')
|
||||
return len(set(self.filter(active=True, inventory__active=True).values_list('name', flat=True)))
|
||||
return len(set(self.values_list('name', flat=True)))
|
||||
|
||||
class InstanceManager(models.Manager):
|
||||
"""A custom manager class for the Instance model.
|
||||
|
||||
@ -11,10 +11,10 @@ from django.db import IntegrityError
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.template.response import TemplateResponse
|
||||
from django.utils.functional import curry
|
||||
from django.conf import settings
|
||||
|
||||
from awx import __version__ as version
|
||||
from awx.main.models import ActivityStream, Instance
|
||||
from awx.main.conf import tower_settings
|
||||
from awx.api.authentication import TokenAuthentication
|
||||
|
||||
|
||||
@ -117,6 +117,6 @@ class AuthTokenTimeoutMiddleware(object):
|
||||
if not TokenAuthentication._get_x_auth_token_header(request):
|
||||
return response
|
||||
|
||||
response['Auth-Token-Timeout'] = int(settings.AUTH_TOKEN_EXPIRATION)
|
||||
response['Auth-Token-Timeout'] = int(tower_settings.AUTH_TOKEN_EXPIRATION)
|
||||
return response
|
||||
|
||||
|
||||
991
awx/main/migrations/0001_initial.py
Normal file
991
awx/main/migrations/0001_initial.py
Normal file
@ -0,0 +1,991 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.utils.timezone
|
||||
import jsonfield.fields
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
import taggit.managers
|
||||
import awx.main.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('taggit', '0002_auto_20150616_2121'),
|
||||
('contenttypes', '0002_remove_content_type_name'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ActivityStream',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('operation', models.CharField(max_length=13, choices=[(b'create', 'Entity Created'), (b'update', 'Entity Updated'), (b'delete', 'Entity Deleted'), (b'associate', 'Entity Associated with another Entity'), (b'disassociate', 'Entity was Disassociated with another Entity')])),
|
||||
('timestamp', models.DateTimeField(auto_now_add=True)),
|
||||
('changes', models.TextField(blank=True)),
|
||||
('object_relationship_type', models.TextField(blank=True)),
|
||||
('object1', models.TextField()),
|
||||
('object2', models.TextField()),
|
||||
('actor', models.ForeignKey(related_name='activity_stream', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AdHocCommandEvent',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_skipped', 'Host Skipped')])),
|
||||
('event_data', jsonfield.fields.JSONField(default={}, blank=True)),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('changed', models.BooleanField(default=False, editable=False)),
|
||||
('counter', models.PositiveIntegerField(default=0)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('-pk',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AuthToken',
|
||||
fields=[
|
||||
('key', models.CharField(max_length=40, serialize=False, primary_key=True)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('modified', models.DateTimeField(auto_now=True)),
|
||||
('expires', models.DateTimeField(default=django.utils.timezone.now)),
|
||||
('request_hash', models.CharField(default=b'', max_length=40, blank=True)),
|
||||
('reason', models.CharField(default=b'', help_text='Reason the auth token was invalidated.', max_length=1024, blank=True)),
|
||||
('user', models.ForeignKey(related_name='auth_tokens', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Credential',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('kind', models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'openstack', 'OpenStack')])),
|
||||
('cloud', models.BooleanField(default=False, editable=False)),
|
||||
('host', models.CharField(default=b'', help_text='The hostname or IP address to use.', max_length=1024, verbose_name='Host', blank=True)),
|
||||
('username', models.CharField(default=b'', help_text='Username for this credential.', max_length=1024, verbose_name='Username', blank=True)),
|
||||
('password', models.CharField(default=b'', help_text='Password for this credential (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='Password', blank=True)),
|
||||
('security_token', models.CharField(default=b'', help_text='Security Token for this credential', max_length=1024, verbose_name='Security Token', blank=True)),
|
||||
('project', models.CharField(default=b'', help_text='The identifier for the project.', max_length=100, verbose_name='Project', blank=True)),
|
||||
('ssh_key_data', models.TextField(default=b'', help_text='RSA or DSA private key to be used instead of password.', verbose_name='SSH private key', blank=True)),
|
||||
('ssh_key_unlock', models.CharField(default=b'', help_text='Passphrase to unlock SSH private key if encrypted (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='SSH key unlock', blank=True)),
|
||||
('become_method', models.CharField(default=b'', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[(b'', 'None'), (b'sudo', 'Sudo'), (b'su', 'Su'), (b'pbrun', 'Pbrun'), (b'pfexec', 'Pfexec')])),
|
||||
('become_username', models.CharField(default=b'', help_text='Privilege escalation username.', max_length=1024, blank=True)),
|
||||
('become_password', models.CharField(default=b'', help_text='Password for privilege escalation method.', max_length=1024, blank=True)),
|
||||
('vault_password', models.CharField(default=b'', help_text='Vault password (or "ASK" to prompt the user).', max_length=1024, blank=True)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
|
||||
],
|
||||
options={
|
||||
'ordering': ('kind', 'name'),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='CustomInventoryScript',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('script', models.TextField(default=b'', help_text='Inventory script contents', blank=True)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('name',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Group',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('variables', models.TextField(default=b'', help_text='Group variables in JSON or YAML format.', blank=True)),
|
||||
('total_hosts', models.PositiveIntegerField(default=0, help_text='Total number of hosts directly or indirectly in this group.', editable=False)),
|
||||
('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether this group has any hosts with active failures.', editable=False)),
|
||||
('hosts_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of hosts in this group with active failures.', editable=False)),
|
||||
('total_groups', models.PositiveIntegerField(default=0, help_text='Total number of child groups contained within this group.', editable=False)),
|
||||
('groups_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of child groups within this group that have active failures.', editable=False)),
|
||||
('has_inventory_sources', models.BooleanField(default=False, help_text='Flag indicating whether this group was created/updated from any external inventory sources.', editable=False)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'group', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('name',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Host',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('enabled', models.BooleanField(default=True, help_text='Is this host online and available for running jobs?')),
|
||||
('instance_id', models.CharField(default=b'', max_length=100, blank=True)),
|
||||
('variables', models.TextField(default=b'', help_text='Host variables in JSON or YAML format.', blank=True)),
|
||||
('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether the last job failed for this host.', editable=False)),
|
||||
('has_inventory_sources', models.BooleanField(default=False, help_text='Flag indicating whether this host was created/updated from any external inventory sources.', editable=False)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'host', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('inventory', 'name'),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Instance',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('uuid', models.CharField(unique=True, max_length=40)),
|
||||
('hostname', models.CharField(unique=True, max_length=250)),
|
||||
('primary', models.BooleanField(default=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('modified', models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Inventory',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('variables', models.TextField(default=b'', help_text='Inventory variables in JSON or YAML format.', blank=True)),
|
||||
('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether any hosts in this inventory have failed.', editable=False)),
|
||||
('total_hosts', models.PositiveIntegerField(default=0, help_text='Total number of hosts in this inventory.', editable=False)),
|
||||
('hosts_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of hosts in this inventory with active failures.', editable=False)),
|
||||
('total_groups', models.PositiveIntegerField(default=0, help_text='Total number of groups in this inventory.', editable=False)),
|
||||
('groups_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of groups in this inventory with active failures.', editable=False)),
|
||||
('has_inventory_sources', models.BooleanField(default=False, help_text='Flag indicating whether this inventory has any external inventory sources.', editable=False)),
|
||||
('total_inventory_sources', models.PositiveIntegerField(default=0, help_text='Total number of external inventory sources configured within this inventory.', editable=False)),
|
||||
('inventory_sources_with_failures', models.PositiveIntegerField(default=0, help_text='Number of external inventory sources in this inventory with failures.', editable=False)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'inventory', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'inventory', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('name',),
|
||||
'verbose_name_plural': 'inventories',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='JobEvent',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_error', 'Host Failure'), (b'runner_on_skipped', 'Host Skipped'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_no_hosts', 'No Hosts Remaining'), (b'runner_on_async_poll', 'Host Polling'), (b'runner_on_async_ok', 'Host Async OK'), (b'runner_on_async_failed', 'Host Async Failure'), (b'runner_on_file_diff', 'File Difference'), (b'playbook_on_start', 'Playbook Started'), (b'playbook_on_notify', 'Running Handlers'), (b'playbook_on_no_hosts_matched', 'No Hosts Matched'), (b'playbook_on_no_hosts_remaining', 'No Hosts Remaining'), (b'playbook_on_task_start', 'Task Started'), (b'playbook_on_vars_prompt', 'Variables Prompted'), (b'playbook_on_setup', 'Gathering Facts'), (b'playbook_on_import_for_host', 'internal: on Import for Host'), (b'playbook_on_not_import_for_host', 'internal: on Not Import for Host'), (b'playbook_on_play_start', 'Play Started'), (b'playbook_on_stats', 'Playbook Complete')])),
|
||||
('event_data', jsonfield.fields.JSONField(default={}, blank=True)),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('changed', models.BooleanField(default=False, editable=False)),
|
||||
('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('play', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('role', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('task', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('counter', models.PositiveIntegerField(default=0)),
|
||||
('host', models.ForeignKey(related_name='job_events_as_primary_host', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Host', null=True)),
|
||||
('hosts', models.ManyToManyField(related_name='job_events', editable=False, to='main.Host')),
|
||||
('parent', models.ForeignKey(related_name='children', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.JobEvent', null=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('pk',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='JobHostSummary',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('changed', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('dark', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('failures', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('ok', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('processed', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('skipped', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('host', models.ForeignKey(related_name='job_host_summaries', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Host', null=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('-pk',),
|
||||
'verbose_name_plural': 'job host summaries',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='JobOrigin',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('modified', models.DateTimeField(auto_now=True)),
|
||||
('instance', models.ForeignKey(to='main.Instance')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Organization',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('admins', models.ManyToManyField(related_name='admin_of_organizations', to=settings.AUTH_USER_MODEL, blank=True)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'organization', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'organization', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
|
||||
('users', models.ManyToManyField(related_name='organizations', to=settings.AUTH_USER_MODEL, blank=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('name',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Permission',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('permission_type', models.CharField(max_length=64, choices=[(b'read', 'Read Inventory'), (b'write', 'Edit Inventory'), (b'admin', 'Administrate Inventory'), (b'run', 'Deploy To Inventory'), (b'check', 'Deploy To Inventory (Dry Run)'), (b'scan', 'Scan an Inventory'), (b'create', 'Create a Job Template')])),
|
||||
('run_ad_hoc_commands', models.BooleanField(default=False, help_text='Execute Commands on the Inventory')),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'permission', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('inventory', models.ForeignKey(related_name='permissions', on_delete=django.db.models.deletion.SET_NULL, to='main.Inventory', null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'permission', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Profile',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('ldap_dn', models.CharField(default=b'', max_length=1024)),
|
||||
('user', awx.main.fields.AutoOneToOneField(related_name='profile', editable=False, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Schedule',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('enabled', models.BooleanField(default=True)),
|
||||
('dtstart', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('dtend', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('rrule', models.CharField(max_length=255)),
|
||||
('next_run', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('extra_data', jsonfield.fields.JSONField(default={}, blank=True)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'schedule', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'schedule', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-next_run'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Team',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'team', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'team', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('organization', models.ForeignKey(related_name='teams', on_delete=django.db.models.deletion.SET_NULL, to='main.Organization', null=True)),
|
||||
('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
|
||||
('users', models.ManyToManyField(related_name='teams', to=settings.AUTH_USER_MODEL, blank=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('organization__name', 'name'),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='UnifiedJob',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('old_pk', models.PositiveIntegerField(default=None, null=True, editable=False)),
|
||||
('launch_type', models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency')])),
|
||||
('cancel_flag', models.BooleanField(default=False, editable=False)),
|
||||
('status', models.CharField(default=b'new', max_length=20, editable=False, choices=[(b'new', 'New'), (b'pending', 'Pending'), (b'waiting', 'Waiting'), (b'running', 'Running'), (b'successful', 'Successful'), (b'failed', 'Failed'), (b'error', 'Error'), (b'canceled', 'Canceled')])),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('started', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('finished', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('elapsed', models.DecimalField(editable=False, max_digits=12, decimal_places=3)),
|
||||
('job_args', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('job_cwd', models.CharField(default=b'', max_length=1024, editable=False, blank=True)),
|
||||
('job_env', jsonfield.fields.JSONField(default={}, editable=False, blank=True)),
|
||||
('job_explanation', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('start_args', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('result_stdout_text', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('result_stdout_file', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('result_traceback', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('celery_task_id', models.CharField(default=b'', max_length=100, editable=False, blank=True)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='UnifiedJobTemplate',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('old_pk', models.PositiveIntegerField(default=None, null=True, editable=False)),
|
||||
('last_job_failed', models.BooleanField(default=False, editable=False)),
|
||||
('last_job_run', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('has_schedules', models.BooleanField(default=False, editable=False)),
|
||||
('next_job_run', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('status', models.CharField(default=b'ok', max_length=32, editable=False, choices=[(b'new', 'New'), (b'pending', 'Pending'), (b'waiting', 'Waiting'), (b'running', 'Running'), (b'successful', 'Successful'), (b'failed', 'Failed'), (b'error', 'Error'), (b'canceled', 'Canceled'), (b'never updated', b'Never Updated'), (b'ok', b'OK'), (b'missing', b'Missing'), (b'none', 'No External Source'), (b'updating', 'Updating')])),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AdHocCommand',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check')])),
|
||||
('limit', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('module_name', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('module_args', models.TextField(default=b'', blank=True)),
|
||||
('forks', models.PositiveIntegerField(default=0, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])),
|
||||
('become_enabled', models.BooleanField(default=False)),
|
||||
],
|
||||
bases=('main.unifiedjob',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='InventorySource',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('source', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')])),
|
||||
('source_path', models.CharField(default=b'', max_length=1024, editable=False, blank=True)),
|
||||
('source_vars', models.TextField(default=b'', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
|
||||
('source_regions', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('instance_filters', models.CharField(default=b'', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
|
||||
('group_by', models.CharField(default=b'', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
|
||||
('overwrite', models.BooleanField(default=False, help_text='Overwrite local groups and hosts from remote inventory source.')),
|
||||
('overwrite_vars', models.BooleanField(default=False, help_text='Overwrite local variables from remote inventory source.')),
|
||||
('update_on_launch', models.BooleanField(default=False)),
|
||||
('update_cache_timeout', models.PositiveIntegerField(default=0)),
|
||||
],
|
||||
bases=('main.unifiedjobtemplate', models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='InventoryUpdate',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('source', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')])),
|
||||
('source_path', models.CharField(default=b'', max_length=1024, editable=False, blank=True)),
|
||||
('source_vars', models.TextField(default=b'', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
|
||||
('source_regions', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('instance_filters', models.CharField(default=b'', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
|
||||
('group_by', models.CharField(default=b'', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
|
||||
('overwrite', models.BooleanField(default=False, help_text='Overwrite local groups and hosts from remote inventory source.')),
|
||||
('overwrite_vars', models.BooleanField(default=False, help_text='Overwrite local variables from remote inventory source.')),
|
||||
('license_error', models.BooleanField(default=False, editable=False)),
|
||||
],
|
||||
bases=('main.unifiedjob', models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Job',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check'), (b'scan', 'Scan')])),
|
||||
('playbook', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('forks', models.PositiveIntegerField(default=0, blank=True)),
|
||||
('limit', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('job_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('force_handlers', models.BooleanField(default=False)),
|
||||
('skip_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('start_at_task', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('become_enabled', models.BooleanField(default=False)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('id',),
|
||||
},
|
||||
bases=('main.unifiedjob', models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='JobTemplate',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check'), (b'scan', 'Scan')])),
|
||||
('playbook', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('forks', models.PositiveIntegerField(default=0, blank=True)),
|
||||
('limit', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('job_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('force_handlers', models.BooleanField(default=False)),
|
||||
('skip_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('start_at_task', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('become_enabled', models.BooleanField(default=False)),
|
||||
('host_config_key', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('ask_variables_on_launch', models.BooleanField(default=False)),
|
||||
('survey_enabled', models.BooleanField(default=False)),
|
||||
('survey_spec', jsonfield.fields.JSONField(default={}, blank=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('name',),
|
||||
},
|
||||
bases=('main.unifiedjobtemplate', models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Project',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('local_path', models.CharField(help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True)),
|
||||
('scm_type', models.CharField(default=b'', max_length=8, verbose_name='SCM Type', blank=True, choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')])),
|
||||
('scm_url', models.CharField(default=b'', max_length=1024, verbose_name='SCM URL', blank=True)),
|
||||
('scm_branch', models.CharField(default=b'', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
|
||||
('scm_clean', models.BooleanField(default=False)),
|
||||
('scm_delete_on_update', models.BooleanField(default=False)),
|
||||
('scm_delete_on_next_update', models.BooleanField(default=False, editable=False)),
|
||||
('scm_update_on_launch', models.BooleanField(default=False)),
|
||||
('scm_update_cache_timeout', models.PositiveIntegerField(default=0, blank=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('id',),
|
||||
},
|
||||
bases=('main.unifiedjobtemplate', models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ProjectUpdate',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('local_path', models.CharField(help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True)),
|
||||
('scm_type', models.CharField(default=b'', max_length=8, verbose_name='SCM Type', blank=True, choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')])),
|
||||
('scm_url', models.CharField(default=b'', max_length=1024, verbose_name='SCM URL', blank=True)),
|
||||
('scm_branch', models.CharField(default=b'', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
|
||||
('scm_clean', models.BooleanField(default=False)),
|
||||
('scm_delete_on_update', models.BooleanField(default=False)),
|
||||
],
|
||||
bases=('main.unifiedjob', models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SystemJob',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('job_type', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_deleted', 'Purge previously deleted items from the database'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('id',),
|
||||
},
|
||||
bases=('main.unifiedjob', models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SystemJobTemplate',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('job_type', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_deleted', 'Purge previously deleted items from the database'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
|
||||
],
|
||||
bases=('main.unifiedjobtemplate', models.Model),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(related_name="{u'class': 'unifiedjobtemplate', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='current_job',
|
||||
field=models.ForeignKey(related_name='unifiedjobtemplate_as_current_job+', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.UnifiedJob', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='last_job',
|
||||
field=models.ForeignKey(related_name='unifiedjobtemplate_as_last_job+', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.UnifiedJob', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(related_name="{u'class': 'unifiedjobtemplate', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='next_schedule',
|
||||
field=models.ForeignKey(related_name='unifiedjobtemplate_as_next_schedule+', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Schedule', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='polymorphic_ctype',
|
||||
field=models.ForeignKey(related_name='polymorphic_main.unifiedjobtemplate_set+', editable=False, to='contenttypes.ContentType', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(related_name="{u'class': 'unifiedjob', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='dependent_jobs',
|
||||
field=models.ManyToManyField(related_name='_unifiedjob_dependent_jobs_+', editable=False, to='main.UnifiedJob'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(related_name="{u'class': 'unifiedjob', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='polymorphic_ctype',
|
||||
field=models.ForeignKey(related_name='polymorphic_main.unifiedjob_set+', editable=False, to='contenttypes.ContentType', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='schedule',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Schedule', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='unified_job_template',
|
||||
field=models.ForeignKey(related_name='unifiedjob_unified_jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.UnifiedJobTemplate', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='schedule',
|
||||
name='unified_job_template',
|
||||
field=models.ForeignKey(related_name='schedules', to='main.UnifiedJobTemplate'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='permission',
|
||||
name='team',
|
||||
field=models.ForeignKey(related_name='permissions', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='main.Team', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='permission',
|
||||
name='user',
|
||||
field=models.ForeignKey(related_name='permissions', on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='joborigin',
|
||||
name='unified_job',
|
||||
field=models.OneToOneField(related_name='job_origin', to='main.UnifiedJob'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='organization',
|
||||
field=models.ForeignKey(related_name='inventories', to='main.Organization', help_text='Organization containing this inventory.'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='host',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(related_name='hosts', to='main.Inventory'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='host',
|
||||
name='last_job_host_summary',
|
||||
field=models.ForeignKey(related_name='hosts_as_last_job_summary+', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, editable=False, to='main.JobHostSummary', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='host',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(related_name="{u'class': 'host', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='host',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='group',
|
||||
name='hosts',
|
||||
field=models.ManyToManyField(help_text='Hosts associated directly with this group.', related_name='groups', to='main.Host', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='group',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(related_name='groups', to='main.Inventory'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='group',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(related_name="{u'class': 'group', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='group',
|
||||
name='parents',
|
||||
field=models.ManyToManyField(related_name='children', to='main.Group', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='group',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='custominventoryscript',
|
||||
name='organization',
|
||||
field=models.ForeignKey(related_name='custom_inventory_scripts', on_delete=django.db.models.deletion.SET_NULL, to='main.Organization', help_text='Organization owning this inventory script', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='custominventoryscript',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='team',
|
||||
field=models.ForeignKey(related_name='credentials', default=None, blank=True, to='main.Team', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='user',
|
||||
field=models.ForeignKey(related_name='credentials', default=None, blank=True, to=settings.AUTH_USER_MODEL, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoccommandevent',
|
||||
name='host',
|
||||
field=models.ForeignKey(related_name='ad_hoc_command_events', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Host', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='credential',
|
||||
field=models.ManyToManyField(to='main.Credential', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='custom_inventory_script',
|
||||
field=models.ManyToManyField(to='main.CustomInventoryScript', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='group',
|
||||
field=models.ManyToManyField(to='main.Group', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='host',
|
||||
field=models.ManyToManyField(to='main.Host', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='inventory',
|
||||
field=models.ManyToManyField(to='main.Inventory', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='organization',
|
||||
field=models.ManyToManyField(to='main.Organization', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='permission',
|
||||
field=models.ManyToManyField(to='main.Permission', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='schedule',
|
||||
field=models.ManyToManyField(to='main.Schedule', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='team',
|
||||
field=models.ManyToManyField(to='main.Team', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='unified_job',
|
||||
field=models.ManyToManyField(related_name='_activitystream_unified_job_+', to='main.UnifiedJob', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='unified_job_template',
|
||||
field=models.ManyToManyField(related_name='_activitystream_unified_job_template_+', to='main.UnifiedJobTemplate', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='user',
|
||||
field=models.ManyToManyField(to=settings.AUTH_USER_MODEL, blank=True),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='unifiedjobtemplate',
|
||||
unique_together=set([('polymorphic_ctype', 'name')]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='team',
|
||||
name='projects',
|
||||
field=models.ManyToManyField(related_name='teams', to='main.Project', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='systemjob',
|
||||
name='system_job_template',
|
||||
field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.SystemJobTemplate', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='projectupdate',
|
||||
name='credential',
|
||||
field=models.ForeignKey(related_name='projectupdates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='projectupdate',
|
||||
name='project',
|
||||
field=models.ForeignKey(related_name='project_updates', editable=False, to='main.Project'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='credential',
|
||||
field=models.ForeignKey(related_name='projects', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='permission',
|
||||
name='project',
|
||||
field=models.ForeignKey(related_name='permissions', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='main.Project', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='projects',
|
||||
field=models.ManyToManyField(related_name='organizations', to='main.Project', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='cloud_credential',
|
||||
field=models.ForeignKey(related_name='jobtemplates_as_cloud_credential+', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='credential',
|
||||
field=models.ForeignKey(related_name='jobtemplates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(related_name='jobtemplates', on_delete=django.db.models.deletion.SET_NULL, to='main.Inventory', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='project',
|
||||
field=models.ForeignKey(related_name='jobtemplates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Project', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobhostsummary',
|
||||
name='job',
|
||||
field=models.ForeignKey(related_name='job_host_summaries', editable=False, to='main.Job'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
name='job',
|
||||
field=models.ForeignKey(related_name='job_events', editable=False, to='main.Job'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='cloud_credential',
|
||||
field=models.ForeignKey(related_name='jobs_as_cloud_credential+', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='credential',
|
||||
field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='hosts',
|
||||
field=models.ManyToManyField(related_name='jobs', editable=False, through='main.JobHostSummary', to='main.Host'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, to='main.Inventory', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='job_template',
|
||||
field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.JobTemplate', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='project',
|
||||
field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Project', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventoryupdate',
|
||||
name='credential',
|
||||
field=models.ForeignKey(related_name='inventoryupdates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventoryupdate',
|
||||
name='inventory_source',
|
||||
field=models.ForeignKey(related_name='inventory_updates', editable=False, to='main.InventorySource'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventoryupdate',
|
||||
name='source_script',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.CustomInventoryScript', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
name='credential',
|
||||
field=models.ForeignKey(related_name='inventorysources', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
name='group',
|
||||
field=awx.main.fields.AutoOneToOneField(related_name='inventory_source', null=True, default=None, editable=False, to='main.Group'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(related_name='inventory_sources', default=None, editable=False, to='main.Inventory', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
name='source_script',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.CustomInventoryScript', null=True),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='inventory',
|
||||
unique_together=set([('name', 'organization')]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='host',
|
||||
name='inventory_sources',
|
||||
field=models.ManyToManyField(help_text='Inventory source(s) that created or modified this host.', related_name='hosts', editable=False, to='main.InventorySource'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='host',
|
||||
name='last_job',
|
||||
field=models.ForeignKey(related_name='hosts_as_last_job+', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Job', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='group',
|
||||
name='inventory_sources',
|
||||
field=models.ManyToManyField(help_text='Inventory source(s) that created or modified this group.', related_name='groups', editable=False, to='main.InventorySource'),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='custominventoryscript',
|
||||
unique_together=set([('name', 'organization')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='credential',
|
||||
unique_together=set([('user', 'team', 'kind', 'name')]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoccommandevent',
|
||||
name='ad_hoc_command',
|
||||
field=models.ForeignKey(related_name='ad_hoc_command_events', editable=False, to='main.AdHocCommand'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoccommand',
|
||||
name='credential',
|
||||
field=models.ForeignKey(related_name='ad_hoc_commands', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.Credential', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoccommand',
|
||||
name='hosts',
|
||||
field=models.ManyToManyField(related_name='ad_hoc_commands', editable=False, through='main.AdHocCommandEvent', to='main.Host'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoccommand',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(related_name='ad_hoc_commands', on_delete=django.db.models.deletion.SET_NULL, to='main.Inventory', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='ad_hoc_command',
|
||||
field=models.ManyToManyField(to='main.AdHocCommand', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='inventory_source',
|
||||
field=models.ManyToManyField(to='main.InventorySource', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='inventory_update',
|
||||
field=models.ManyToManyField(to='main.InventoryUpdate', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='job',
|
||||
field=models.ManyToManyField(to='main.Job', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='job_template',
|
||||
field=models.ManyToManyField(to='main.JobTemplate', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='project',
|
||||
field=models.ManyToManyField(to='main.Project', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='project_update',
|
||||
field=models.ManyToManyField(to='main.ProjectUpdate', blank=True),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='team',
|
||||
unique_together=set([('organization', 'name')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='jobhostsummary',
|
||||
unique_together=set([('job', 'host_name')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='host',
|
||||
unique_together=set([('name', 'inventory')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='group',
|
||||
unique_together=set([('name', 'inventory')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='adhoccommandevent',
|
||||
unique_together=set([('ad_hoc_command', 'host_name')]),
|
||||
),
|
||||
]
|
||||
34
awx/main/migrations/0002_v300_tower_settings_changes.py
Normal file
34
awx/main/migrations/0002_v300_tower_settings_changes.py
Normal file
@ -0,0 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('main', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='TowerSettings',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('key', models.CharField(unique=True, max_length=255)),
|
||||
('description', models.TextField()),
|
||||
('category', models.CharField(max_length=128)),
|
||||
('value', models.TextField(blank=True)),
|
||||
('value_type', models.CharField(max_length=12, choices=[(b'string', 'String'), (b'int', 'Integer'), (b'float', 'Decimal'), (b'json', 'JSON'), (b'bool', 'Boolean'), (b'password', 'Password'), (b'list', 'List')])),
|
||||
('user', models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
],
|
||||
),
|
||||
]
|
||||
104
awx/main/migrations/0003_v300_notification_changes.py
Normal file
104
awx/main/migrations/0003_v300_notification_changes.py
Normal file
@ -0,0 +1,104 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import jsonfield.fields
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
import taggit.managers
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('taggit', '0002_auto_20150616_2121'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('main', '0002_v300_tower_settings_changes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Notification',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('status', models.CharField(default=b'pending', max_length=20, editable=False, choices=[(b'pending', 'Pending'), (b'successful', 'Successful'), (b'failed', 'Failed')])),
|
||||
('error', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('notifications_sent', models.IntegerField(default=0, editable=False)),
|
||||
('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'irc', 'IRC')])),
|
||||
('recipients', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('subject', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('body', jsonfield.fields.JSONField(default=dict, blank=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('pk',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='NotificationTemplate',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'irc', 'IRC')])),
|
||||
('notification_configuration', jsonfield.fields.JSONField(default=dict)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'notificationtemplate', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'notificationtemplate', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('organization', models.ForeignKey(related_name='notification_templates', on_delete=django.db.models.deletion.SET_NULL, to='main.Organization', null=True)),
|
||||
('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
|
||||
],
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='notification',
|
||||
name='notification_template',
|
||||
field=models.ForeignKey(related_name='notifications', editable=False, to='main.NotificationTemplate'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='notification',
|
||||
field=models.ManyToManyField(to='main.Notification', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='notification_template',
|
||||
field=models.ManyToManyField(to='main.NotificationTemplate', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notification_templates_any',
|
||||
field=models.ManyToManyField(related_name='organization_notification_templates_for_any', to='main.NotificationTemplate', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notification_templates_error',
|
||||
field=models.ManyToManyField(related_name='organization_notification_templates_for_errors', to='main.NotificationTemplate', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notification_templates_success',
|
||||
field=models.ManyToManyField(related_name='organization_notification_templates_for_success', to='main.NotificationTemplate', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='notifications',
|
||||
field=models.ManyToManyField(related_name='unifiedjob_notifications', editable=False, to='main.Notification'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='notification_templates_any',
|
||||
field=models.ManyToManyField(related_name='unifiedjobtemplate_notification_templates_for_any', to='main.NotificationTemplate', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='notification_templates_error',
|
||||
field=models.ManyToManyField(related_name='unifiedjobtemplate_notification_templates_for_errors', to='main.NotificationTemplate', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='notification_templates_success',
|
||||
field=models.ManyToManyField(related_name='unifiedjobtemplate_notification_templates_for_success', to='main.NotificationTemplate', blank=True),
|
||||
),
|
||||
]
|
||||
29
awx/main/migrations/0004_v300_fact_changes.py
Normal file
29
awx/main/migrations/0004_v300_fact_changes.py
Normal file
@ -0,0 +1,29 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import jsonbfield.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0003_v300_notification_changes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Fact',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('timestamp', models.DateTimeField(default=None, help_text='Date and time of the corresponding fact scan gathering time.', editable=False)),
|
||||
('module', models.CharField(max_length=128)),
|
||||
('facts', jsonbfield.fields.JSONField(default={}, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True)),
|
||||
('host', models.ForeignKey(related_name='facts', to='main.Host', help_text='Host for the facts that the fact scan captured.')),
|
||||
],
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name='fact',
|
||||
index_together=set([('timestamp', 'module', 'host')]),
|
||||
),
|
||||
]
|
||||
15
awx/main/migrations/0005_v300_migrate_facts.py
Normal file
15
awx/main/migrations/0005_v300_migrate_facts.py
Normal file
@ -0,0 +1,15 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from awx.main.migrations import _system_tracking as system_tracking
|
||||
from django.db import migrations
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0004_v300_fact_changes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(system_tracking.migrate_facts),
|
||||
]
|
||||
16
awx/main/migrations/0006_v300_active_flag_cleanup.py
Normal file
16
awx/main/migrations/0006_v300_active_flag_cleanup.py
Normal file
@ -0,0 +1,16 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from awx.main.migrations import _cleanup_deleted as cleanup_deleted
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0005_v300_migrate_facts'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(cleanup_deleted.cleanup_deleted),
|
||||
]
|
||||
58
awx/main/migrations/0007_v300_active_flag_removal.py
Normal file
58
awx/main/migrations/0007_v300_active_flag_removal.py
Normal file
@ -0,0 +1,58 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0006_v300_active_flag_cleanup'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='credential',
|
||||
name='active',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='custominventoryscript',
|
||||
name='active',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='group',
|
||||
name='active',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='host',
|
||||
name='active',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='inventory',
|
||||
name='active',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='organization',
|
||||
name='active',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='permission',
|
||||
name='active',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='schedule',
|
||||
name='active',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='team',
|
||||
name='active',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='unifiedjob',
|
||||
name='active',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='active',
|
||||
),
|
||||
]
|
||||
264
awx/main/migrations/0008_v300_rbac_changes.py
Normal file
264
awx/main/migrations/0008_v300_rbac_changes.py
Normal file
@ -0,0 +1,264 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
from django.conf import settings
|
||||
import awx.main.fields
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('contenttypes', '0002_remove_content_type_name'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('main', '0007_v300_active_flag_removal'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
#
|
||||
# Patch up existing
|
||||
#
|
||||
migrations.RenameField(
|
||||
'Organization',
|
||||
'admins',
|
||||
'deprecated_admins',
|
||||
),
|
||||
migrations.RenameField(
|
||||
'Organization',
|
||||
'users',
|
||||
'deprecated_users',
|
||||
),
|
||||
migrations.RenameField(
|
||||
'Team',
|
||||
'users',
|
||||
'deprecated_users',
|
||||
),
|
||||
migrations.RenameField(
|
||||
'Team',
|
||||
'projects',
|
||||
'deprecated_projects',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='organization',
|
||||
field=models.ForeignKey(related_name='projects', to='main.Organization', blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='deprecated_projects',
|
||||
field=models.ManyToManyField(related_name='deprecated_teams', to='main.Project', blank=True),
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='organization',
|
||||
old_name='projects',
|
||||
new_name='deprecated_projects',
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='organization',
|
||||
name='deprecated_projects',
|
||||
field=models.ManyToManyField(related_name='deprecated_organizations', to='main.Project', blank=True),
|
||||
),
|
||||
migrations.RenameField(
|
||||
'Credential',
|
||||
'team',
|
||||
'deprecated_team',
|
||||
),
|
||||
migrations.RenameField(
|
||||
'Credential',
|
||||
'user',
|
||||
'deprecated_user',
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='organization',
|
||||
name='deprecated_admins',
|
||||
field=models.ManyToManyField(related_name='deprecated_admin_of_organizations', to=settings.AUTH_USER_MODEL, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='organization',
|
||||
name='deprecated_users',
|
||||
field=models.ManyToManyField(related_name='deprecated_organizations', to=settings.AUTH_USER_MODEL, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='deprecated_users',
|
||||
field=models.ManyToManyField(related_name='deprecated_teams', to=settings.AUTH_USER_MODEL, blank=True),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='credential',
|
||||
unique_together=set([]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='organization',
|
||||
field=models.ForeignKey(related_name='credentials', default=None, blank=True, to='main.Organization', null=True),
|
||||
),
|
||||
|
||||
#
|
||||
# New RBAC models and fields
|
||||
#
|
||||
migrations.CreateModel(
|
||||
name='Role',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('role_field', models.TextField()),
|
||||
('singleton_name', models.TextField(default=None, unique=True, null=True, db_index=True)),
|
||||
('members', models.ManyToManyField(related_name='roles', to=settings.AUTH_USER_MODEL)),
|
||||
('parents', models.ManyToManyField(related_name='children', to='main.Role')),
|
||||
('implicit_parents', models.TextField(default=b'[]')),
|
||||
('content_type', models.ForeignKey(default=None, to='contenttypes.ContentType', null=True)),
|
||||
('object_id', models.PositiveIntegerField(default=None, null=True)),
|
||||
|
||||
],
|
||||
options={
|
||||
'db_table': 'main_rbac_roles',
|
||||
'verbose_name_plural': 'roles',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='RoleAncestorEntry',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('role_field', models.TextField()),
|
||||
('content_type_id', models.PositiveIntegerField()),
|
||||
('object_id', models.PositiveIntegerField()),
|
||||
('ancestor', models.ForeignKey(related_name='+', to='main.Role')),
|
||||
('descendent', models.ForeignKey(related_name='+', to='main.Role')),
|
||||
],
|
||||
options={
|
||||
'db_table': 'main_rbac_role_ancestors',
|
||||
'verbose_name_plural': 'role_ancestors',
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='ancestors',
|
||||
field=models.ManyToManyField(related_name='descendents', through='main.RoleAncestorEntry', to='main.Role'),
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name='role',
|
||||
index_together=set([('content_type', 'object_id')]),
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name='roleancestorentry',
|
||||
index_together=set([('ancestor', 'content_type_id', 'object_id'), ('ancestor', 'content_type_id', 'role_field'), ('ancestor', 'descendent')]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='custominventoryscript',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'organization.admin_role', to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='custominventoryscript',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'organization.member_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'organization.admin_role', to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='adhoc_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='update_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'adhoc_role', to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'update_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'project.organization.admin_role', b'inventory.organization.admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'project.organization.auditor_role', b'inventory.organization.auditor_role', b'execute_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_administrator', to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='auditor_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_auditor', to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'member_role', b'auditor_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.admin_role', b'singleton:system_administrator'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='update_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'singleton:system_auditor', b'use_role', b'update_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='team',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'organization.admin_role', to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='team',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=None, to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='team',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role', b'organization.auditor_role', b'member_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
]
|
||||
26
awx/main/migrations/0009_v300_rbac_migrations.py
Normal file
26
awx/main/migrations/0009_v300_rbac_migrations.py
Normal file
@ -0,0 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from awx.main.migrations import _rbac as rbac
|
||||
from awx.main.migrations import _migration_utils as migration_utils
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0008_v300_rbac_changes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(rbac.migrate_users),
|
||||
migrations.RunPython(rbac.create_roles),
|
||||
migrations.RunPython(rbac.migrate_organization),
|
||||
migrations.RunPython(rbac.migrate_team),
|
||||
migrations.RunPython(rbac.migrate_inventory),
|
||||
migrations.RunPython(rbac.migrate_projects),
|
||||
migrations.RunPython(rbac.migrate_credential),
|
||||
migrations.RunPython(rbac.migrate_job_templates),
|
||||
migrations.RunPython(rbac.rebuild_role_hierarchy),
|
||||
]
|
||||
114
awx/main/migrations/0010_v300_create_system_job_templates.py
Normal file
114
awx/main/migrations/0010_v300_create_system_job_templates.py
Normal file
@ -0,0 +1,114 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
from django.utils.timezone import now
|
||||
|
||||
from awx.api.license import feature_enabled
|
||||
|
||||
|
||||
def create_system_job_templates(apps, schema_editor):
|
||||
'''
|
||||
Create default system job templates if not present. Create default schedules
|
||||
only if new system job templates were created (i.e. new database).
|
||||
'''
|
||||
|
||||
SystemJobTemplate = apps.get_model('main', 'SystemJobTemplate')
|
||||
Schedule = apps.get_model('main', 'Schedule')
|
||||
ContentType = apps.get_model('contenttypes', 'ContentType')
|
||||
sjt_ct = ContentType.objects.get_for_model(SystemJobTemplate)
|
||||
now_dt = now()
|
||||
now_str = now_dt.strftime('%Y%m%dT%H%M%SZ')
|
||||
|
||||
sjt, created = SystemJobTemplate.objects.get_or_create(
|
||||
job_type='cleanup_jobs',
|
||||
defaults=dict(
|
||||
name='Cleanup Job Details',
|
||||
description='Remove job history',
|
||||
created=now_dt,
|
||||
modified=now_dt,
|
||||
polymorphic_ctype=sjt_ct,
|
||||
),
|
||||
)
|
||||
if created:
|
||||
sched = Schedule(
|
||||
name='Cleanup Job Schedule',
|
||||
rrule='DTSTART:%s RRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU' % now_str,
|
||||
description='Automatically Generated Schedule',
|
||||
enabled=True,
|
||||
extra_data={'days': '120'},
|
||||
created=now_dt,
|
||||
modified=now_dt,
|
||||
)
|
||||
sched.unified_job_template = sjt
|
||||
sched.save()
|
||||
|
||||
existing_cd_jobs = SystemJobTemplate.objects.filter(job_type='cleanup_deleted')
|
||||
Schedule.objects.filter(unified_job_template__in=existing_cd_jobs).delete()
|
||||
existing_cd_jobs.delete()
|
||||
|
||||
sjt, created = SystemJobTemplate.objects.get_or_create(
|
||||
job_type='cleanup_activitystream',
|
||||
defaults=dict(
|
||||
name='Cleanup Activity Stream',
|
||||
description='Remove activity stream history',
|
||||
created=now_dt,
|
||||
modified=now_dt,
|
||||
polymorphic_ctype=sjt_ct,
|
||||
),
|
||||
)
|
||||
if created:
|
||||
sched = Schedule(
|
||||
name='Cleanup Activity Schedule',
|
||||
rrule='DTSTART:%s RRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=TU' % now_str,
|
||||
description='Automatically Generated Schedule',
|
||||
enabled=True,
|
||||
extra_data={'days': '355'},
|
||||
created=now_dt,
|
||||
modified=now_dt,
|
||||
)
|
||||
sched.unified_job_template = sjt
|
||||
sched.save()
|
||||
|
||||
sjt, created = SystemJobTemplate.objects.get_or_create(
|
||||
job_type='cleanup_facts',
|
||||
defaults=dict(
|
||||
name='Cleanup Fact Details',
|
||||
description='Remove system tracking history',
|
||||
created=now_dt,
|
||||
modified=now_dt,
|
||||
polymorphic_ctype=sjt_ct,
|
||||
),
|
||||
)
|
||||
if created and feature_enabled('system_tracking', bypass_database=True):
|
||||
sched = Schedule(
|
||||
name='Cleanup Fact Schedule',
|
||||
rrule='DTSTART:%s RRULE:FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=1' % now_str,
|
||||
description='Automatically Generated Schedule',
|
||||
enabled=True,
|
||||
extra_data={'older_than': '120d', 'granularity': '1w'},
|
||||
created=now_dt,
|
||||
modified=now_dt,
|
||||
)
|
||||
sched.unified_job_template = sjt
|
||||
sched.save()
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0009_v300_rbac_migrations'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(create_system_job_templates, migrations.RunPython.noop),
|
||||
migrations.AlterField(
|
||||
model_name='systemjob',
|
||||
name='job_type',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='systemjobtemplate',
|
||||
name='job_type',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
|
||||
),
|
||||
]
|
||||
19
awx/main/migrations/0011_v300_credential_domain_field.py
Normal file
19
awx/main/migrations/0011_v300_credential_domain_field.py
Normal file
@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0010_v300_create_system_job_templates'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='domain',
|
||||
field=models.CharField(default=b'', help_text='The identifier for the domain.', max_length=100, verbose_name='Domain', blank=True),
|
||||
),
|
||||
]
|
||||
55
awx/main/migrations/0012_v300_create_labels.py
Normal file
55
awx/main/migrations/0012_v300_create_labels.py
Normal file
@ -0,0 +1,55 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
import taggit.managers
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('taggit', '0002_auto_20150616_2121'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('main', '0011_v300_credential_domain_field'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Label',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'label', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'label', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('organization', models.ForeignKey(related_name='labels', to='main.Organization', help_text='Organization this label belongs to.')),
|
||||
('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
|
||||
],
|
||||
options={
|
||||
'ordering': ('organization', 'name'),
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='label',
|
||||
field=models.ManyToManyField(to='main.Label', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='labels',
|
||||
field=models.ManyToManyField(related_name='job_labels', to='main.Label', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='labels',
|
||||
field=models.ManyToManyField(related_name='jobtemplate_labels', to='main.Label', blank=True),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='label',
|
||||
unique_together=set([('name', 'organization')]),
|
||||
),
|
||||
]
|
||||
21
awx/main/migrations/0013_v300_label_changes.py
Normal file
21
awx/main/migrations/0013_v300_label_changes.py
Normal file
@ -0,0 +1,21 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0012_v300_create_labels'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='label',
|
||||
name='organization',
|
||||
field=models.ForeignKey(related_name='labels', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Organization', help_text='Organization this label belongs to.', null=True),
|
||||
),
|
||||
]
|
||||
66
awx/main/migrations/0014_v300_invsource_cred.py
Normal file
66
awx/main/migrations/0014_v300_invsource_cred.py
Normal file
@ -0,0 +1,66 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0013_v300_label_changes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='network_credential',
|
||||
field=models.ForeignKey(related_name='jobs_as_network_credential+', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='network_credential',
|
||||
field=models.ForeignKey(related_name='jobtemplates_as_network_credential+', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='authorize',
|
||||
field=models.BooleanField(default=False, help_text='Whether to use the authorize mechanism.'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='authorize_password',
|
||||
field=models.CharField(default=b'', help_text='Password used by the authorize mechanism.', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='deprecated_team',
|
||||
field=models.ForeignKey(related_name='deprecated_credentials', default=None, blank=True, to='main.Team', null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='deprecated_user',
|
||||
field=models.ForeignKey(related_name='deprecated_credentials', default=None, blank=True, to=settings.AUTH_USER_MODEL, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'openstack', 'OpenStack')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='deprecated_projects',
|
||||
field=models.ManyToManyField(related_name='deprecated_teams', to='main.Project', blank=True),
|
||||
),
|
||||
]
|
||||
19
awx/main/migrations/0015_v300_label_changes.py
Normal file
19
awx/main/migrations/0015_v300_label_changes.py
Normal file
@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0014_v300_invsource_cred'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='label',
|
||||
name='organization',
|
||||
field=models.ForeignKey(related_name='labels', to='main.Organization', help_text='Organization this label belongs to.'),
|
||||
),
|
||||
]
|
||||
51
awx/main/migrations/0016_v300_prompting_changes.py
Normal file
51
awx/main/migrations/0016_v300_prompting_changes.py
Normal file
@ -0,0 +1,51 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0015_v300_label_changes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='ask_limit_on_launch',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='ask_inventory_on_launch',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='ask_credential_on_launch',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='ask_job_type_on_launch',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='ask_tags_on_launch',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='job',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='jobtemplate',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(related_name='jobtemplates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True),
|
||||
),
|
||||
]
|
||||
20
awx/main/migrations/0017_v300_prompting_migrations.py
Normal file
20
awx/main/migrations/0017_v300_prompting_migrations.py
Normal file
@ -0,0 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from awx.main.migrations import _rbac as rbac
|
||||
from awx.main.migrations import _ask_for_variables as ask_for_variables
|
||||
from awx.main.migrations import _migration_utils as migration_utils
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0016_v300_prompting_changes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(ask_for_variables.migrate_credential),
|
||||
migrations.RunPython(rbac.rebuild_role_hierarchy),
|
||||
]
|
||||
18
awx/main/migrations/0018_v300_host_ordering.py
Normal file
18
awx/main/migrations/0018_v300_host_ordering.py
Normal file
@ -0,0 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0017_v300_prompting_migrations'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='host',
|
||||
options={'ordering': ('name',)},
|
||||
),
|
||||
]
|
||||
55
awx/main/migrations/0019_v300_new_azure_credential.py
Normal file
55
awx/main/migrations/0019_v300_new_azure_credential.py
Normal file
@ -0,0 +1,55 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0018_v300_host_ordering'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='client',
|
||||
field=models.CharField(default=b'', help_text='Client Id or Application Id for the credential', max_length=128, blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='secret',
|
||||
field=models.CharField(default=b'', help_text='Secret Token for this credential', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='subscription',
|
||||
field=models.CharField(default=b'', help_text='Subscription identifier for this credential', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='tenant',
|
||||
field=models.CharField(default=b'', help_text='Tenant identifier for this credential', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'openstack', 'OpenStack')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='host',
|
||||
name='instance_id',
|
||||
field=models.CharField(default=b'', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
),
|
||||
|
||||
]
|
||||
32
awx/main/migrations/0020_v300_labels_changes.py
Normal file
32
awx/main/migrations/0020_v300_labels_changes.py
Normal file
@ -0,0 +1,32 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0019_v300_new_azure_credential'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='job',
|
||||
name='labels',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='jobtemplate',
|
||||
name='labels',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='labels',
|
||||
field=models.ManyToManyField(related_name='unifiedjob_labels', to='main.Label', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='labels',
|
||||
field=models.ManyToManyField(related_name='unifiedjobtemplate_labels', to='main.Label', blank=True),
|
||||
),
|
||||
]
|
||||
19
awx/main/migrations/0021_v300_activity_stream.py
Normal file
19
awx/main/migrations/0021_v300_activity_stream.py
Normal file
@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0020_v300_labels_changes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='role',
|
||||
field=models.ManyToManyField(to='main.Role', blank=True),
|
||||
),
|
||||
]
|
||||
34
awx/main/migrations/0022_v300_adhoc_extravars.py
Normal file
34
awx/main/migrations/0022_v300_adhoc_extravars.py
Normal file
@ -0,0 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0021_v300_activity_stream'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='adhoccommand',
|
||||
name='extra_vars',
|
||||
field=models.TextField(default=b'', blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'openstack', 'OpenStack')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
),
|
||||
]
|
||||
18
awx/main/migrations/0023_v300_activity_stream_ordering.py
Normal file
18
awx/main/migrations/0023_v300_activity_stream_ordering.py
Normal file
@ -0,0 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0022_v300_adhoc_extravars'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='activitystream',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
]
|
||||
19
awx/main/migrations/0024_v300_jobtemplate_allow_simul.py
Normal file
19
awx/main/migrations/0024_v300_jobtemplate_allow_simul.py
Normal file
@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0023_v300_activity_stream_ordering'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='allow_simultaneous',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
30
awx/main/migrations/0025_v300_update_rbac_parents.py
Normal file
30
awx/main/migrations/0025_v300_update_rbac_parents.py
Normal file
@ -0,0 +1,30 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
import awx.main.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0024_v300_jobtemplate_allow_simul'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.admin_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'member_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
]
|
||||
27
awx/main/migrations/0026_v300_credential_unique.py
Normal file
27
awx/main/migrations/0026_v300_credential_unique.py
Normal file
@ -0,0 +1,27 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from awx.main.migrations import _rbac as rbac
|
||||
from awx.main.migrations import _migration_utils as migration_utils
|
||||
from django.db import migrations
|
||||
import awx.main.fields
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0025_v300_update_rbac_parents'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name='credential',
|
||||
unique_together=set([('organization', 'name', 'kind')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(rbac.rebuild_role_hierarchy),
|
||||
]
|
||||
20
awx/main/migrations/0027_v300_team_migrations.py
Normal file
20
awx/main/migrations/0027_v300_team_migrations.py
Normal file
@ -0,0 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from awx.main.migrations import _rbac as rbac
|
||||
from awx.main.migrations import _team_cleanup as team_cleanup
|
||||
from awx.main.migrations import _migration_utils as migration_utils
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0026_v300_credential_unique'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(team_cleanup.migrate_team),
|
||||
migrations.RunPython(rbac.rebuild_role_hierarchy),
|
||||
]
|
||||
21
awx/main/migrations/0028_v300_org_team_cascade.py
Normal file
21
awx/main/migrations/0028_v300_org_team_cascade.py
Normal file
@ -0,0 +1,21 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import awx.main.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0027_v300_team_migrations'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='organization',
|
||||
field=models.ForeignKey(related_name='teams', to='main.Organization'),
|
||||
preserve_default=False,
|
||||
),
|
||||
]
|
||||
@ -1,2 +1,2 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
9
awx/main/migrations/_ask_for_variables.py
Normal file
9
awx/main/migrations/_ask_for_variables.py
Normal file
@ -0,0 +1,9 @@
|
||||
def migrate_credential(apps, schema_editor):
|
||||
'''If credential is not currently present, set ask_for_credential_on_launch
|
||||
equal to True, and otherwise leave it as the default False value.
|
||||
'''
|
||||
JobTemplate = apps.get_model('main', 'JobTemplate')
|
||||
for jt in JobTemplate.objects.iterator():
|
||||
if jt.credential is None:
|
||||
jt.ask_credential_on_launch = True
|
||||
jt.save()
|
||||
85
awx/main/migrations/_cleanup_deleted.py
Normal file
85
awx/main/migrations/_cleanup_deleted.py
Normal file
@ -0,0 +1,85 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import logging
|
||||
|
||||
# Django
|
||||
from django.db import transaction
|
||||
from django.utils.dateparse import parse_datetime
|
||||
|
||||
def cleanup_deleted(apps, schema_editor):
|
||||
logger = logging.getLogger('awx.main.migrations.cleanup_deleted')
|
||||
|
||||
def cleanup_model(model):
|
||||
|
||||
'''
|
||||
Presume the '_deleted_' string to be in the 'name' field unless considering the User model.
|
||||
When considering the User model, presume the '_d_' string to be in the 'username' field.
|
||||
'''
|
||||
logger.debug('cleaning up model %s', model)
|
||||
|
||||
name_field = 'name'
|
||||
name_prefix = '_deleted_'
|
||||
active_field = None
|
||||
n_deleted_items = 0
|
||||
for field in model._meta.fields:
|
||||
if field.name in ('is_active', 'active'):
|
||||
active_field = field.name
|
||||
if field.name == 'is_active': # is User model
|
||||
name_field = 'username'
|
||||
name_prefix = '_d_'
|
||||
if not active_field:
|
||||
logger.warning('skipping model %s, no active field', model)
|
||||
return n_deleted_items
|
||||
qs = model.objects.filter(**{
|
||||
active_field: False,
|
||||
'%s__startswith' % name_field: name_prefix,
|
||||
})
|
||||
pks_to_delete = set()
|
||||
for instance in qs.iterator():
|
||||
dt = parse_datetime(getattr(instance, name_field).split('_')[2])
|
||||
if not dt:
|
||||
logger.warning('unable to find deleted timestamp in %s field', name_field)
|
||||
else:
|
||||
action_text = 'deleting'
|
||||
logger.info('%s %s', action_text, instance)
|
||||
n_deleted_items += 1
|
||||
instance.delete()
|
||||
|
||||
# Cleanup objects in batches instead of deleting each one individually.
|
||||
if len(pks_to_delete) >= 50:
|
||||
model.objects.filter(pk__in=pks_to_delete).delete()
|
||||
pks_to_delete.clear()
|
||||
if len(pks_to_delete):
|
||||
model.objects.filter(pk__in=pks_to_delete).delete()
|
||||
return n_deleted_items
|
||||
|
||||
logger = logging.getLogger('awx.main.commands.cleanup_deleted')
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(logging.Formatter('%(message)s'))
|
||||
logger.addHandler(handler)
|
||||
logger.propagate = False
|
||||
|
||||
with transaction.atomic():
|
||||
n_deleted_items = 0
|
||||
|
||||
models = [
|
||||
apps.get_model('auth', "User"),
|
||||
apps.get_model('main', 'Credential'),
|
||||
apps.get_model('main', 'CustomInventoryScript'),
|
||||
apps.get_model('main', 'Group'),
|
||||
apps.get_model('main', 'Host'),
|
||||
apps.get_model('main', 'Inventory'),
|
||||
apps.get_model('main', 'NotificationTemplate'),
|
||||
apps.get_model('main', 'Organization'),
|
||||
apps.get_model('main', 'Permission'),
|
||||
apps.get_model('main', 'Schedule'),
|
||||
apps.get_model('main', 'Team'),
|
||||
apps.get_model('main', 'UnifiedJob'),
|
||||
apps.get_model('main', 'UnifiedJobTemplate'),
|
||||
]
|
||||
|
||||
for model in models:
|
||||
n_deleted_items += cleanup_model(model)
|
||||
logger.log(99, "Removed %d items", n_deleted_items)
|
||||
11
awx/main/migrations/_migration_utils.py
Normal file
11
awx/main/migrations/_migration_utils.py
Normal file
@ -0,0 +1,11 @@
|
||||
from awx.main.utils import set_current_apps
|
||||
|
||||
|
||||
def set_current_apps_for_migrations(apps, schema_editor):
|
||||
'''
|
||||
This is necessary for migrations which do explicit saves on any model that
|
||||
has an ImplicitRoleFIeld (which generally means anything that has
|
||||
some RBAC bindings associated with it). This sets the current 'apps' that
|
||||
the ImplicitRoleFIeld should be using when creating new roles.
|
||||
'''
|
||||
set_current_apps(apps)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user