mirror of
https://github.com/ansible/awx.git
synced 2026-02-04 19:18:13 -03:30
Compare commits
271 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b9b2affe44 | ||
|
|
f61b6f9615 | ||
|
|
3b259de200 | ||
|
|
63e3e733e0 | ||
|
|
844b0f86b8 | ||
|
|
d4c3c089df | ||
|
|
1328fb80a0 | ||
|
|
1fbcd1b10b | ||
|
|
11b26c199b | ||
|
|
463c4c1f7e | ||
|
|
76a16b329e | ||
|
|
123f646cea | ||
|
|
d99c9c8dce | ||
|
|
4f3a8ef766 | ||
|
|
c114243082 | ||
|
|
229e997e7e | ||
|
|
dc7ec9dfe0 | ||
|
|
07aae8cefc | ||
|
|
902fb83493 | ||
|
|
1ef2d4cdad | ||
|
|
a6b362e455 | ||
|
|
2c3549331c | ||
|
|
016fc7f6bf | ||
|
|
e8eda28ce5 | ||
|
|
83c232eb20 | ||
|
|
c30639c4e6 | ||
|
|
5e84782b9c | ||
|
|
d134291097 | ||
|
|
4b669fb16d | ||
|
|
b53621e74c | ||
|
|
925c6543c4 | ||
|
|
bb5312f4fc | ||
|
|
7333e55748 | ||
|
|
5e20dcb6ca | ||
|
|
cab6b8b333 | ||
|
|
46020379aa | ||
|
|
e23fb31a4a | ||
|
|
17c95f200a | ||
|
|
7676ccdbac | ||
|
|
4626aa0144 | ||
|
|
fb7596929f | ||
|
|
d63518d789 | ||
|
|
6f1cbac324 | ||
|
|
2b80f0f7b6 | ||
|
|
10945faba1 | ||
|
|
d4ccb00338 | ||
|
|
d10d5f1539 | ||
|
|
3e5f328b52 | ||
|
|
d558ffd699 | ||
|
|
b64d401e74 | ||
|
|
0a3f131adc | ||
|
|
6f9cf6a649 | ||
|
|
5db43b8283 | ||
|
|
aa9e60c508 | ||
|
|
2162e8e0cc | ||
|
|
1eeffe4ae2 | ||
|
|
2927803a82 | ||
|
|
1b50b26901 | ||
|
|
44819987f7 | ||
|
|
9bf0d052ab | ||
|
|
5c98d04e09 | ||
|
|
6560ab0fab | ||
|
|
efb7a729c7 | ||
|
|
6e1deed79e | ||
|
|
ad3721bdb2 | ||
|
|
ca64630740 | ||
|
|
8686575311 | ||
|
|
0ecd6542bf | ||
|
|
5e3d47683d | ||
|
|
73f617d811 | ||
|
|
ea7e15bfc4 | ||
|
|
eca530c788 | ||
|
|
c1b48e2c9c | ||
|
|
9d501327fc | ||
|
|
31b3bad658 | ||
|
|
155c214df0 | ||
|
|
5931c13b04 | ||
|
|
5d7b7d5888 | ||
|
|
53ad819d65 | ||
|
|
3ce3786303 | ||
|
|
46bc146e26 | ||
|
|
88eaf1154a | ||
|
|
5421c243d7 | ||
|
|
5cdab1b57a | ||
|
|
2a86c5b944 | ||
|
|
daeeaf413a | ||
|
|
2d119f7b02 | ||
|
|
68950d56ca | ||
|
|
477c5df022 | ||
|
|
4c8f4f4cc5 | ||
|
|
6726e203b9 | ||
|
|
9a10811366 | ||
|
|
62bffaa7e6 | ||
|
|
14423c4f3f | ||
|
|
8037cddfe5 | ||
|
|
be4b3c75b4 | ||
|
|
818b261bea | ||
|
|
4707dc2a05 | ||
|
|
ebc2b821be | ||
|
|
85a875bbfe | ||
|
|
a9663c2900 | ||
|
|
05c24df9e3 | ||
|
|
1becd4c39d | ||
|
|
9817ab14d0 | ||
|
|
51b51a9bf7 | ||
|
|
55c5dd06cf | ||
|
|
0e5e23372d | ||
|
|
1e44d5c833 | ||
|
|
f7bc8fb662 | ||
|
|
416dcc83c9 | ||
|
|
13ed656506 | ||
|
|
f683f87ce3 | ||
|
|
c15cbe0f6e | ||
|
|
a8728670e1 | ||
|
|
cf9dffbaf8 | ||
|
|
3d1b32c72f | ||
|
|
e95da84e5a | ||
|
|
fcd759fa1f | ||
|
|
6772c81927 | ||
|
|
774ec40989 | ||
|
|
b7ba280da3 | ||
|
|
058e2c0d81 | ||
|
|
072919040b | ||
|
|
91ae343e3b | ||
|
|
5afabc7a19 | ||
|
|
4788f0814f | ||
|
|
c528ece5df | ||
|
|
a1c03cd6a1 | ||
|
|
42fbb81337 | ||
|
|
5286e24721 | ||
|
|
0bde309d23 | ||
|
|
b2442d42a3 | ||
|
|
18409f89c5 | ||
|
|
88d5fb0420 | ||
|
|
1cc0f81913 | ||
|
|
8cb8e63db5 | ||
|
|
8597670299 | ||
|
|
c0ff4dad59 | ||
|
|
9b2ca04118 | ||
|
|
d98c60519e | ||
|
|
589531163a | ||
|
|
5dd8c3ace2 | ||
|
|
d021c253aa | ||
|
|
c48c8c04f4 | ||
|
|
a2102c92ec | ||
|
|
99288a5e18 | ||
|
|
44c48d1d66 | ||
|
|
c785c38748 | ||
|
|
ebe0ded9c2 | ||
|
|
2dadfbcc14 | ||
|
|
3a58a5b772 | ||
|
|
5010e98b8f | ||
|
|
3ef4cc9bfa | ||
|
|
c01c671642 | ||
|
|
a86e270905 | ||
|
|
4058d18593 | ||
|
|
caa55f112f | ||
|
|
d0af952685 | ||
|
|
fbc7f496c5 | ||
|
|
bb19a4234e | ||
|
|
11f7e90f6a | ||
|
|
5c080678a6 | ||
|
|
2df51a923d | ||
|
|
0da0a8e67b | ||
|
|
b75ba7ebea | ||
|
|
24de951f6c | ||
|
|
974306541e | ||
|
|
d2fa5cc182 | ||
|
|
e45e4b3cda | ||
|
|
65641c7edd | ||
|
|
5f01c3f5a8 | ||
|
|
7b39198f26 | ||
|
|
f1e3be5ec8 | ||
|
|
bf5657a06a | ||
|
|
f583dd73e8 | ||
|
|
57b8aa4892 | ||
|
|
474876872e | ||
|
|
3eaed52b83 | ||
|
|
28822d891c | ||
|
|
37dbfa88f9 | ||
|
|
b6c30e8ef5 | ||
|
|
d938c96a76 | ||
|
|
4ce18618cb | ||
|
|
6c7f11395b | ||
|
|
134950ade1 | ||
|
|
7258a43bad | ||
|
|
27f98163ff | ||
|
|
6d04bd34ce | ||
|
|
584ec9cf75 | ||
|
|
aebeeb170e | ||
|
|
c434d38876 | ||
|
|
ae3ab89515 | ||
|
|
0c250cd6af | ||
|
|
33c1416f6c | ||
|
|
3d7fcb3835 | ||
|
|
04da4503db | ||
|
|
2016798e0f | ||
|
|
39d119534c | ||
|
|
d273472927 | ||
|
|
5aa99b2ca1 | ||
|
|
96b9bd6ab6 | ||
|
|
2c5bdf3611 | ||
|
|
af4234556e | ||
|
|
c6482137d1 | ||
|
|
f223df303f | ||
|
|
f132ce9b64 | ||
|
|
f22fd58392 | ||
|
|
cccc038600 | ||
|
|
b9607dd415 | ||
|
|
7b32262f75 | ||
|
|
d69f6acf64 | ||
|
|
66a859872e | ||
|
|
ef3aab1357 | ||
|
|
62ebf85b96 | ||
|
|
0c074e0988 | ||
|
|
32c705a62a | ||
|
|
7194338653 | ||
|
|
d43521bb77 | ||
|
|
b1710f9523 | ||
|
|
3b456d3e72 | ||
|
|
12a04a6da6 | ||
|
|
99205fde16 | ||
|
|
8539eae114 | ||
|
|
3e2dd4f86b | ||
|
|
32c14d6eab | ||
|
|
4f9901db38 | ||
|
|
a77c981e0c | ||
|
|
77d2364022 | ||
|
|
d1b42fd583 | ||
|
|
2dfb0abb69 | ||
|
|
7bcbaabd71 | ||
|
|
9c20e1b494 | ||
|
|
2b5210842d | ||
|
|
0b3e51458d | ||
|
|
d57fc998d5 | ||
|
|
1079051b12 | ||
|
|
4641056829 | ||
|
|
db2bb19d65 | ||
|
|
e5ad2e44fb | ||
|
|
aa8cda0001 | ||
|
|
949f383564 | ||
|
|
479ad13630 | ||
|
|
23c2e1be31 | ||
|
|
7628ef01f1 | ||
|
|
c0730aa562 | ||
|
|
67d6a9f9ea | ||
|
|
f9854abfa1 | ||
|
|
2697615dbf | ||
|
|
a131250dc1 | ||
|
|
2d237b6dbb | ||
|
|
c8b15005b4 | ||
|
|
a5c4350695 | ||
|
|
9f18f8dbdb | ||
|
|
a8e1c8960f | ||
|
|
7f66053654 | ||
|
|
1d6c88b7e2 | ||
|
|
049f85f3c9 | ||
|
|
4858868428 | ||
|
|
4e37076955 | ||
|
|
e6f654b568 | ||
|
|
65e110cdbf | ||
|
|
10e99c76a8 | ||
|
|
b0e3bc96dd | ||
|
|
59df54b363 | ||
|
|
5950f26c69 | ||
|
|
a3a5c6bf9f | ||
|
|
ca16787e7c | ||
|
|
271bd10b47 | ||
|
|
c2660af60d | ||
|
|
d5c6c589b2 | ||
|
|
fc0a039097 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -62,6 +62,7 @@ __pycache__
|
||||
# UI build flag files
|
||||
awx/ui/.deps_built
|
||||
awx/ui/.release_built
|
||||
awx/ui/.release_deps_built
|
||||
|
||||
# Testing
|
||||
.cache
|
||||
|
||||
@@ -18,7 +18,7 @@ $ pip install --upgrade ansible-tower-cli
|
||||
|
||||
The AWX host URL, user, and password must be set for the AWX instance to be exported:
|
||||
```
|
||||
$ tower-cli config host <old-awx-host.example.com>
|
||||
$ tower-cli config host http://<old-awx-host.example.com>
|
||||
$ tower-cli config username <user>
|
||||
$ tower-cli config password <pass>
|
||||
```
|
||||
@@ -62,7 +62,7 @@ For other install methods, refer to the [Install.md](https://github.com/ansible/
|
||||
Configure tower-cli for your new AWX host as shown earlier. Import from a JSON file named assets.json
|
||||
|
||||
```
|
||||
$ tower-cli config host <new-awx-host.example.com>
|
||||
$ tower-cli config host http://<new-awx-host.example.com>
|
||||
$ tower-cli config username <user>
|
||||
$ tower-cli config password <pass>
|
||||
$ tower-cli send assets.json
|
||||
|
||||
78
Makefile
78
Makefile
@@ -1,4 +1,4 @@
|
||||
PYTHON ?= python
|
||||
PYTHON ?= python3
|
||||
PYTHON_VERSION = $(shell $(PYTHON) -c "from distutils.sysconfig import get_python_version; print(get_python_version())")
|
||||
SITELIB=$(shell $(PYTHON) -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")
|
||||
OFFICIAL ?= no
|
||||
@@ -53,6 +53,7 @@ WHEEL_FILE ?= $(WHEEL_NAME)-py2-none-any.whl
|
||||
|
||||
# UI flag files
|
||||
UI_DEPS_FLAG_FILE = awx/ui/.deps_built
|
||||
UI_RELEASE_DEPS_FLAG_FILE = awx/ui/.release_deps_built
|
||||
UI_RELEASE_FLAG_FILE = awx/ui/.release_built
|
||||
|
||||
I18N_FLAG_FILE = .i18n_built
|
||||
@@ -73,6 +74,7 @@ clean-ui:
|
||||
rm -rf awx/ui/test/e2e/reports/
|
||||
rm -rf awx/ui/client/languages/
|
||||
rm -f $(UI_DEPS_FLAG_FILE)
|
||||
rm -f $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
rm -f $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
clean-tmp:
|
||||
@@ -120,23 +122,30 @@ virtualenv_ansible:
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/ansible" ]; then \
|
||||
virtualenv --system-site-packages $(VENV_BASE)/ansible && \
|
||||
virtualenv -p python --system-site-packages $(VENV_BASE)/ansible && \
|
||||
$(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) --ignore-installed six packaging appdirs && \
|
||||
$(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) --ignore-installed setuptools==36.0.1 && \
|
||||
$(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) --ignore-installed pip==9.0.1; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
virtualenv_ansible_py3:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
if [ ! -d "$(VENV_BASE)" ]; then \
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/ansible3" ]; then \
|
||||
python3 -m venv --system-site-packages $(VENV_BASE)/ansible3; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
virtualenv_awx:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
if [ ! -d "$(VENV_BASE)" ]; then \
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/awx" ]; then \
|
||||
virtualenv --system-site-packages $(VENV_BASE)/awx && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed six packaging appdirs && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed setuptools==36.0.1 && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed pip==9.0.1; \
|
||||
$(PYTHON) -m venv $(VENV_BASE)/awx; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
@@ -148,6 +157,11 @@ requirements_ansible: virtualenv_ansible
|
||||
fi
|
||||
$(VENV_BASE)/ansible/bin/pip uninstall --yes -r requirements/requirements_ansible_uninstall.txt
|
||||
|
||||
requirements_ansible_py3: virtualenv_ansible_py3
|
||||
cat requirements/requirements_ansible.txt requirements/requirements_ansible_git.txt | $(VENV_BASE)/ansible3/bin/pip3 install $(PIP_OPTIONS) --no-binary $(SRC_ONLY_PKGS) --ignore-installed -r /dev/stdin
|
||||
$(VENV_BASE)/ansible3/bin/pip3 install ansible # can't inherit from system ansible, it's py2
|
||||
$(VENV_BASE)/ansible3/bin/pip3 uninstall --yes -r requirements/requirements_ansible_uninstall.txt
|
||||
|
||||
requirements_ansible_dev:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
$(VENV_BASE)/ansible/bin/pip install pytest mock; \
|
||||
@@ -155,11 +169,9 @@ requirements_ansible_dev:
|
||||
|
||||
requirements_isolated:
|
||||
if [ ! -d "$(VENV_BASE)/awx" ]; then \
|
||||
virtualenv --system-site-packages $(VENV_BASE)/awx && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed six packaging appdirs && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed setuptools==35.0.2 && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed pip==9.0.1; \
|
||||
$(PYTHON) -m venv $(VENV_BASE)/awx; \
|
||||
fi;
|
||||
echo "include-system-site-packages = true" >> $(VENV_BASE)/awx/lib/python$(PYTHON_VERSION)/pyvenv.cfg
|
||||
$(VENV_BASE)/awx/bin/pip install -r requirements/requirements_isolated.txt
|
||||
|
||||
# Install third-party requirements needed for AWX's environment.
|
||||
@@ -169,6 +181,7 @@ requirements_awx: virtualenv_awx
|
||||
else \
|
||||
cat requirements/requirements.txt requirements/requirements_git.txt | $(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --no-binary $(SRC_ONLY_PKGS) --ignore-installed -r /dev/stdin ; \
|
||||
fi
|
||||
echo "include-system-site-packages = true" >> $(VENV_BASE)/awx/lib/python$(PYTHON_VERSION)/pyvenv.cfg
|
||||
#$(VENV_BASE)/awx/bin/pip uninstall --yes -r requirements/requirements_tower_uninstall.txt
|
||||
|
||||
requirements_awx_dev:
|
||||
@@ -176,7 +189,7 @@ requirements_awx_dev:
|
||||
|
||||
requirements: requirements_ansible requirements_awx
|
||||
|
||||
requirements_dev: requirements requirements_awx_dev requirements_ansible_dev
|
||||
requirements_dev: requirements requirements_ansible_py3 requirements_awx_dev requirements_ansible_dev
|
||||
|
||||
requirements_test: requirements
|
||||
|
||||
@@ -195,7 +208,7 @@ version_file:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
python -c "import awx as awx; print awx.__version__" > /var/lib/awx/.awx_version; \
|
||||
python -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \
|
||||
|
||||
# Do any one-time init tasks.
|
||||
comma := ,
|
||||
@@ -259,7 +272,7 @@ supervisor:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
supervisord --configuration /supervisor.conf --pidfile=/tmp/supervisor_pid
|
||||
supervisord --pidfile=/tmp/supervisor_pid
|
||||
|
||||
# Alternate approach to tmux to run all development tasks specified in
|
||||
# Procfile.
|
||||
@@ -356,7 +369,7 @@ check: flake8 pep8 # pyflakes pylint
|
||||
awx-link:
|
||||
cp -R /tmp/awx.egg-info /awx_devel/ || true
|
||||
sed -i "s/placeholder/$(shell git describe --long | sed 's/\./\\./g')/" /awx_devel/awx.egg-info/PKG-INFO
|
||||
cp -f /tmp/awx.egg-link /venv/awx/lib/python2.7/site-packages/awx.egg-link
|
||||
cp -f /tmp/awx.egg-link /venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.egg-link
|
||||
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||
|
||||
@@ -453,7 +466,7 @@ messages:
|
||||
# generate l10n .json .mo
|
||||
languages: $(I18N_FLAG_FILE)
|
||||
|
||||
$(I18N_FLAG_FILE): $(UI_DEPS_FLAG_FILE)
|
||||
$(I18N_FLAG_FILE): $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run languages
|
||||
$(PYTHON) tools/scripts/compilemessages.py
|
||||
touch $(I18N_FLAG_FILE)
|
||||
@@ -461,13 +474,31 @@ $(I18N_FLAG_FILE): $(UI_DEPS_FLAG_FILE)
|
||||
# End l10n TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# UI TASKS
|
||||
# UI RELEASE TASKS
|
||||
# --------------------------------------
|
||||
ui-release: $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
$(UI_RELEASE_FLAG_FILE): $(I18N_FLAG_FILE) $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run build-release
|
||||
touch $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
$(UI_RELEASE_DEPS_FLAG_FILE):
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 $(NPM_BIN) --unsafe-perm --prefix awx/ui ci --no-save awx/ui
|
||||
touch $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
|
||||
# END UI RELEASE TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# UI TASKS
|
||||
# --------------------------------------
|
||||
ui-deps: $(UI_DEPS_FLAG_FILE)
|
||||
|
||||
$(UI_DEPS_FLAG_FILE):
|
||||
$(NPM_BIN) --unsafe-perm --prefix awx/ui install --no-save awx/ui
|
||||
@if [ -f ${UI_RELEASE_DEPS_FLAG_FILE} ]; then \
|
||||
rm -rf awx/ui/node_modules; \
|
||||
rm -f ${UI_RELEASE_DEPS_FLAG_FILE}; \
|
||||
fi; \
|
||||
$(NPM_BIN) --unsafe-perm --prefix awx/ui ci --no-save awx/ui
|
||||
touch $(UI_DEPS_FLAG_FILE)
|
||||
|
||||
ui-docker-machine: $(UI_DEPS_FLAG_FILE)
|
||||
@@ -481,12 +512,6 @@ ui-docker: $(UI_DEPS_FLAG_FILE)
|
||||
ui-devel: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run build-devel -- $(MAKEFLAGS)
|
||||
|
||||
ui-release: $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
$(UI_RELEASE_FLAG_FILE): $(I18N_FLAG_FILE) $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run build-release
|
||||
touch $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
ui-test: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run test
|
||||
|
||||
@@ -497,9 +522,6 @@ ui-test-ci: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run test:ci
|
||||
$(NPM_BIN) --prefix awx/ui run unit
|
||||
|
||||
testjs_ci:
|
||||
echo "Update UI unittests later" #ui-test-ci
|
||||
|
||||
jshint: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) run --prefix awx/ui jshint
|
||||
$(NPM_BIN) run --prefix awx/ui lint
|
||||
@@ -547,7 +569,7 @@ docker-isolated:
|
||||
TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-isolated-override.yml create
|
||||
docker start tools_awx_1
|
||||
docker start tools_isolated_1
|
||||
echo "__version__ = '`git describe --long | cut -d - -f 1-1`'" | docker exec -i tools_isolated_1 /bin/bash -c "cat > /venv/awx/lib/python2.7/site-packages/awx.py"
|
||||
echo "__version__ = '`git describe --long | cut -d - -f 1-1`'" | docker exec -i tools_isolated_1 /bin/bash -c "cat > /venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.py"
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-isolated-override.yml up
|
||||
|
||||
# Docker Compose Development environment
|
||||
@@ -574,7 +596,7 @@ docker-compose-detect-schema-change:
|
||||
$(MAKE) docker-compose-genschema
|
||||
curl https://s3.amazonaws.com/awx-public-ci-files/schema.json -o reference-schema.json
|
||||
# Ignore differences in whitespace with -b
|
||||
diff -u -b schema.json reference-schema.json
|
||||
diff -u -b reference-schema.json schema.json
|
||||
|
||||
docker-compose-clean:
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm -w /awx_devel --service-ports awx make clean
|
||||
|
||||
@@ -91,16 +91,6 @@ def prepare_env():
|
||||
# Monkeypatch Django find_commands to also work with .pyc files.
|
||||
import django.core.management
|
||||
django.core.management.find_commands = find_commands
|
||||
# Fixup sys.modules reference to django.utils.six to allow jsonfield to
|
||||
# work when using Django 1.4.
|
||||
import django.utils
|
||||
try:
|
||||
import django.utils.six
|
||||
except ImportError: # pragma: no cover
|
||||
import six
|
||||
sys.modules['django.utils.six'] = sys.modules['six']
|
||||
django.utils.six = sys.modules['django.utils.six']
|
||||
from django.utils import six # noqa
|
||||
# Use the AWX_TEST_DATABASE_* environment variables to specify the test
|
||||
# database settings to use when management command is run as an external
|
||||
# program via unit tests.
|
||||
|
||||
@@ -43,7 +43,7 @@ register(
|
||||
help_text=_('Dictionary for customizing OAuth 2 timeouts, available items are '
|
||||
'`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number '
|
||||
'of seconds, and `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
|
||||
'authorization grants in the number of seconds.'),
|
||||
'authorization codes in the number of seconds.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
@@ -65,7 +65,7 @@ class TypeFilterBackend(BaseFilterBackend):
|
||||
model = queryset.model
|
||||
model_type = get_type_for_model(model)
|
||||
if 'polymorphic_ctype' in get_all_field_names(model):
|
||||
types_pks = set([v for k,v in types_map.items() if k in types])
|
||||
types_pks = set([v for k, v in types_map.items() if k in types])
|
||||
queryset = queryset.filter(polymorphic_ctype_id__in=types_pks)
|
||||
elif model_type in types:
|
||||
queryset = queryset
|
||||
@@ -192,7 +192,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
|
||||
def value_to_python(self, model, lookup, value):
|
||||
try:
|
||||
lookup = lookup.encode("ascii")
|
||||
lookup.encode("ascii")
|
||||
except UnicodeEncodeError:
|
||||
raise ValueError("%r is not an allowed field name. Must be ascii encodable." % lookup)
|
||||
|
||||
@@ -363,12 +363,12 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
args.append(q)
|
||||
if search_filters and search_filter_relation == 'OR':
|
||||
q = Q()
|
||||
for term, constrains in search_filters.iteritems():
|
||||
for term, constrains in search_filters.items():
|
||||
for constrain in constrains:
|
||||
q |= Q(**{constrain: term})
|
||||
args.append(q)
|
||||
elif search_filters and search_filter_relation == 'AND':
|
||||
for term, constrains in search_filters.iteritems():
|
||||
for term, constrains in search_filters.items():
|
||||
q_chain = Q()
|
||||
for constrain in constrains:
|
||||
q_chain |= Q(**{constrain: term})
|
||||
|
||||
@@ -5,8 +5,7 @@
|
||||
import inspect
|
||||
import logging
|
||||
import time
|
||||
import six
|
||||
import urllib
|
||||
import urllib.parse
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -32,9 +31,6 @@ from rest_framework.permissions import AllowAny
|
||||
from rest_framework.renderers import StaticHTMLRenderer, JSONRenderer
|
||||
from rest_framework.negotiation import DefaultContentNegotiation
|
||||
|
||||
# cryptography
|
||||
from cryptography.fernet import InvalidToken
|
||||
|
||||
# AWX
|
||||
from awx.api.filters import FieldLookupBackend
|
||||
from awx.main.models import * # noqa
|
||||
@@ -90,9 +86,10 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
logger.info(smart_text(u"User {} logged in.".format(self.request.user.username)))
|
||||
ret.set_cookie('userLoggedIn', 'true')
|
||||
current_user = UserSerializer(self.request.user)
|
||||
current_user = JSONRenderer().render(current_user.data)
|
||||
current_user = urllib.quote('%s' % current_user, '')
|
||||
current_user = smart_text(JSONRenderer().render(current_user.data))
|
||||
current_user = urllib.parse.quote('%s' % current_user, '')
|
||||
ret.set_cookie('current_user', current_user, secure=settings.SESSION_COOKIE_SECURE or None)
|
||||
|
||||
return ret
|
||||
else:
|
||||
ret.status_code = 401
|
||||
@@ -304,7 +301,7 @@ class APIView(views.APIView):
|
||||
# submitted data was rejected.
|
||||
request_method = getattr(self, '_raw_data_request_method', None)
|
||||
response_status = getattr(self, '_raw_data_response_status', 0)
|
||||
if request_method in ('POST', 'PUT', 'PATCH') and response_status in xrange(400, 500):
|
||||
if request_method in ('POST', 'PUT', 'PATCH') and response_status in range(400, 500):
|
||||
return self.request.data.copy()
|
||||
|
||||
return data
|
||||
@@ -347,7 +344,7 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
# form.
|
||||
if hasattr(self, '_raw_data_form_marker'):
|
||||
# Always remove read only fields from serializer.
|
||||
for name, field in serializer.fields.items():
|
||||
for name, field in list(serializer.fields.items()):
|
||||
if getattr(field, 'read_only', None):
|
||||
del serializer.fields[name]
|
||||
serializer._data = self.update_raw_data(serializer.data)
|
||||
@@ -747,7 +744,7 @@ class SubListAttachDetachAPIView(SubListCreateAttachDetachAPIView):
|
||||
def update_raw_data(self, data):
|
||||
request_method = getattr(self, '_raw_data_request_method', None)
|
||||
response_status = getattr(self, '_raw_data_response_status', 0)
|
||||
if request_method == 'POST' and response_status in xrange(400, 500):
|
||||
if request_method == 'POST' and response_status in range(400, 500):
|
||||
return super(SubListAttachDetachAPIView, self).update_raw_data(data)
|
||||
return {'id': None}
|
||||
|
||||
@@ -853,15 +850,18 @@ class CopyAPIView(GenericAPIView):
|
||||
return field_val
|
||||
if isinstance(field_val, dict):
|
||||
for sub_field in field_val:
|
||||
if isinstance(sub_field, six.string_types) \
|
||||
and isinstance(field_val[sub_field], six.string_types):
|
||||
if isinstance(sub_field, str) \
|
||||
and isinstance(field_val[sub_field], str):
|
||||
try:
|
||||
field_val[sub_field] = decrypt_field(obj, field_name, sub_field)
|
||||
except InvalidToken:
|
||||
except AttributeError:
|
||||
# Catching the corner case with v1 credential fields
|
||||
field_val[sub_field] = decrypt_field(obj, sub_field)
|
||||
elif isinstance(field_val, six.string_types):
|
||||
field_val = decrypt_field(obj, field_name)
|
||||
elif isinstance(field_val, str):
|
||||
try:
|
||||
field_val = decrypt_field(obj, field_name)
|
||||
except AttributeError:
|
||||
return field_val
|
||||
return field_val
|
||||
|
||||
def _build_create_dict(self, obj):
|
||||
@@ -915,7 +915,7 @@ class CopyAPIView(GenericAPIView):
|
||||
obj, field.name, field_val
|
||||
)
|
||||
new_obj = model.objects.create(**create_kwargs)
|
||||
logger.debug(six.text_type('Deep copy: Created new object {}({})').format(
|
||||
logger.debug('Deep copy: Created new object {}({})'.format(
|
||||
new_obj, model
|
||||
))
|
||||
# Need to save separatedly because Djang-crum get_current_user would
|
||||
|
||||
@@ -157,7 +157,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
finally:
|
||||
view.request = request
|
||||
|
||||
for field, meta in actions[method].items():
|
||||
for field, meta in list(actions[method].items()):
|
||||
if not isinstance(meta, dict):
|
||||
continue
|
||||
|
||||
@@ -234,17 +234,17 @@ class RoleMetadata(Metadata):
|
||||
|
||||
# TODO: Tower 3.3 remove class and all uses in views.py when API v1 is removed
|
||||
class JobTypeMetadata(Metadata):
|
||||
def get_field_info(self, field):
|
||||
res = super(JobTypeMetadata, self).get_field_info(field)
|
||||
def get_field_info(self, field):
|
||||
res = super(JobTypeMetadata, self).get_field_info(field)
|
||||
|
||||
if field.field_name == 'job_type':
|
||||
index = 0
|
||||
for choice in res['choices']:
|
||||
if choice[0] == 'scan':
|
||||
res['choices'].pop(index)
|
||||
break
|
||||
index += 1
|
||||
return res
|
||||
if field.field_name == 'job_type':
|
||||
index = 0
|
||||
for choice in res['choices']:
|
||||
if choice[0] == 'scan':
|
||||
res['choices'].pop(index)
|
||||
break
|
||||
index += 1
|
||||
return res
|
||||
|
||||
|
||||
class SublistAttachDetatchMetadata(Metadata):
|
||||
|
||||
@@ -4,7 +4,7 @@ import json
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils import six
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
@@ -25,7 +25,7 @@ class JSONParser(parsers.JSONParser):
|
||||
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
|
||||
|
||||
try:
|
||||
data = stream.read().decode(encoding)
|
||||
data = smart_str(stream.read(), encoding=encoding)
|
||||
if not data:
|
||||
return {}
|
||||
obj = json.loads(data, object_pairs_hook=OrderedDict)
|
||||
@@ -33,4 +33,4 @@ class JSONParser(parsers.JSONParser):
|
||||
raise ParseError(_('JSON parse error - not a JSON object'))
|
||||
return obj
|
||||
except ValueError as exc:
|
||||
raise ParseError(_('JSON parse error - %s\nPossible cause: trailing comma.' % six.text_type(exc)))
|
||||
raise ParseError(_('JSON parse error - %s\nPossible cause: trailing comma.' % str(exc)))
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.utils.safestring import SafeText
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework import renderers
|
||||
from rest_framework.request import override_method
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
'''
|
||||
@@ -20,6 +20,19 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
return renderers.JSONRenderer()
|
||||
return renderer
|
||||
|
||||
def get_content(self, renderer, data, accepted_media_type, renderer_context):
|
||||
if isinstance(data, SafeText):
|
||||
# Older versions of Django (pre-2.0) have a py3 bug which causes
|
||||
# bytestrings marked as "safe" to not actually get _treated_ as
|
||||
# safe; this causes certain embedded strings (like the stdout HTML
|
||||
# view) to be improperly escaped
|
||||
# see: https://github.com/ansible/awx/issues/3108
|
||||
# https://code.djangoproject.com/ticket/28121
|
||||
return data
|
||||
return super(BrowsableAPIRenderer, self).get_content(renderer, data,
|
||||
accepted_media_type,
|
||||
renderer_context)
|
||||
|
||||
def get_context(self, data, accepted_media_type, renderer_context):
|
||||
# Store the associated response status to know how to populate the raw
|
||||
# data form.
|
||||
@@ -71,8 +84,8 @@ class PlainTextRenderer(renderers.BaseRenderer):
|
||||
format = 'txt'
|
||||
|
||||
def render(self, data, media_type=None, renderer_context=None):
|
||||
if not isinstance(data, six.string_types):
|
||||
data = six.text_type(data)
|
||||
if not isinstance(data, str):
|
||||
data = str(data)
|
||||
return data.encode(self.charset)
|
||||
|
||||
|
||||
|
||||
@@ -5,10 +5,8 @@
|
||||
import copy
|
||||
import json
|
||||
import logging
|
||||
import operator
|
||||
import re
|
||||
import six
|
||||
import urllib
|
||||
import urllib.parse
|
||||
from collections import OrderedDict
|
||||
from datetime import timedelta
|
||||
|
||||
@@ -40,17 +38,16 @@ from rest_framework.utils.serializer_helpers import ReturnList
|
||||
from polymorphic.models import PolymorphicModel
|
||||
|
||||
# AWX
|
||||
from awx.main.access import get_user_capabilities
|
||||
from awx.main.constants import (
|
||||
SCHEDULEABLE_PROVIDERS,
|
||||
ANSI_SGR_PATTERN,
|
||||
ACTIVE_STATES,
|
||||
CENSOR_VALUE,
|
||||
CHOICES_PRIVILEGE_ESCALATION_METHODS,
|
||||
)
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.models.base import NEW_JOB_TYPE_CHOICES
|
||||
from awx.main.access import get_user_capabilities
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.fields import ImplicitRoleField, JSONBField
|
||||
from awx.main.utils import (
|
||||
get_type_for_model, get_model_for_type, timestamp_apiformat,
|
||||
camelcase_to_underscore, getattrd, parse_yaml_or_json,
|
||||
@@ -203,11 +200,11 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass):
|
||||
|
||||
@staticmethod
|
||||
def _is_list_of_strings(x):
|
||||
return isinstance(x, (list, tuple)) and all([isinstance(y, basestring) for y in x])
|
||||
return isinstance(x, (list, tuple)) and all([isinstance(y, str) for y in x])
|
||||
|
||||
@staticmethod
|
||||
def _is_extra_kwargs(x):
|
||||
return isinstance(x, dict) and all([isinstance(k, basestring) and isinstance(v, dict) for k,v in x.items()])
|
||||
return isinstance(x, dict) and all([isinstance(k, str) and isinstance(v, dict) for k,v in x.items()])
|
||||
|
||||
@classmethod
|
||||
def _update_meta(cls, base, meta, other=None):
|
||||
@@ -259,9 +256,7 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass):
|
||||
return super(BaseSerializerMetaclass, cls).__new__(cls, name, bases, attrs)
|
||||
|
||||
|
||||
class BaseSerializer(serializers.ModelSerializer):
|
||||
|
||||
__metaclass__ = BaseSerializerMetaclass
|
||||
class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
class Meta:
|
||||
fields = ('id', 'type', 'url', 'related', 'summary_fields', 'created',
|
||||
@@ -284,7 +279,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
# The following lines fix the problem of being able to pass JSON dict into PrimaryKeyRelatedField.
|
||||
data = kwargs.get('data', False)
|
||||
if data:
|
||||
for field_name, field_instance in six.iteritems(self.fields):
|
||||
for field_name, field_instance in self.fields.items():
|
||||
if isinstance(field_instance, ManyRelatedField) and not field_instance.read_only:
|
||||
if isinstance(data.get(field_name, False), dict):
|
||||
raise serializers.ValidationError(_('Cannot use dictionary for %s' % field_name))
|
||||
@@ -294,7 +289,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
"""
|
||||
The request version component of the URL as an integer i.e., 1 or 2
|
||||
"""
|
||||
return get_request_version(self.context.get('request'))
|
||||
return get_request_version(self.context.get('request')) or 1
|
||||
|
||||
def get_type(self, obj):
|
||||
return get_type_for_model(self.Meta.model)
|
||||
@@ -612,7 +607,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
v2.extend(e)
|
||||
else:
|
||||
v2.append(e)
|
||||
d[k] = map(force_text, v2)
|
||||
d[k] = list(map(force_text, v2))
|
||||
raise ValidationError(d)
|
||||
return attrs
|
||||
|
||||
@@ -632,9 +627,7 @@ class EmptySerializer(serializers.Serializer):
|
||||
pass
|
||||
|
||||
|
||||
class BaseFactSerializer(BaseSerializer):
|
||||
|
||||
__metaclass__ = BaseSerializerMetaclass
|
||||
class BaseFactSerializer(BaseSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
def get_fields(self):
|
||||
ret = super(BaseFactSerializer, self).get_fields()
|
||||
@@ -1050,7 +1043,7 @@ class BaseOAuth2TokenSerializer(BaseSerializer):
|
||||
return ret
|
||||
|
||||
def _is_valid_scope(self, value):
|
||||
if not value or (not isinstance(value, six.string_types)):
|
||||
if not value or (not isinstance(value, str)):
|
||||
return False
|
||||
words = value.split()
|
||||
for word in words:
|
||||
@@ -1549,6 +1542,18 @@ class InventorySerializer(BaseSerializerWithVariables):
|
||||
def validate_host_filter(self, host_filter):
|
||||
if host_filter:
|
||||
try:
|
||||
for match in JSONBField.get_lookups().keys():
|
||||
if match == 'exact':
|
||||
# __exact is allowed
|
||||
continue
|
||||
match = '__{}'.format(match)
|
||||
if re.match(
|
||||
'ansible_facts[^=]+{}='.format(match),
|
||||
host_filter
|
||||
):
|
||||
raise models.base.ValidationError({
|
||||
'host_filter': 'ansible_facts does not support searching with {}'.format(match)
|
||||
})
|
||||
SmartFilter().query_from_string(host_filter)
|
||||
except RuntimeError as e:
|
||||
raise models.base.ValidationError(e)
|
||||
@@ -2139,10 +2144,10 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
|
||||
return attrs.get(fd, self.instance and getattr(self.instance, fd) or None)
|
||||
|
||||
if get_field_from_model_or_attrs('source') != 'scm':
|
||||
redundant_scm_fields = filter(
|
||||
redundant_scm_fields = list(filter(
|
||||
lambda x: attrs.get(x, None),
|
||||
['source_project', 'source_path', 'update_on_project_update']
|
||||
)
|
||||
))
|
||||
if redundant_scm_fields:
|
||||
raise serializers.ValidationError(
|
||||
{"detail": _("Cannot set %s if not SCM type." % ' '.join(redundant_scm_fields))}
|
||||
@@ -2179,10 +2184,12 @@ class InventorySourceUpdateSerializer(InventorySourceSerializer):
|
||||
|
||||
class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSerializer):
|
||||
|
||||
custom_virtualenv = serializers.ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
model = InventoryUpdate
|
||||
fields = ('*', 'inventory', 'inventory_source', 'license_error', 'source_project_update',
|
||||
'-controller_node',)
|
||||
'custom_virtualenv', '-controller_node',)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(InventoryUpdateSerializer, self).get_related(obj)
|
||||
@@ -2211,6 +2218,44 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri
|
||||
return res
|
||||
|
||||
|
||||
class InventoryUpdateDetailSerializer(InventoryUpdateSerializer):
|
||||
|
||||
source_project = serializers.SerializerMethodField(
|
||||
help_text=_('The project used for this job.'),
|
||||
method_name='get_source_project_id'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = InventoryUpdate
|
||||
fields = ('*', 'source_project',)
|
||||
|
||||
def get_source_project(self, obj):
|
||||
return getattrd(obj, 'source_project_update.unified_job_template', None)
|
||||
|
||||
def get_source_project_id(self, obj):
|
||||
return getattrd(obj, 'source_project_update.unified_job_template.id', None)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(InventoryUpdateDetailSerializer, self).get_related(obj)
|
||||
source_project_id = self.get_source_project_id(obj)
|
||||
|
||||
if source_project_id:
|
||||
res['source_project'] = self.reverse('api:project_detail', kwargs={'pk': source_project_id})
|
||||
return res
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
summary_fields = super(InventoryUpdateDetailSerializer, self).get_summary_fields(obj)
|
||||
summary_obj = self.get_source_project(obj)
|
||||
|
||||
if summary_obj:
|
||||
summary_fields['source_project'] = {}
|
||||
for field in SUMMARIZABLE_FK_FIELDS['project']:
|
||||
value = getattr(summary_obj, field, None)
|
||||
if value is not None:
|
||||
summary_fields['source_project'][field] = value
|
||||
return summary_fields
|
||||
|
||||
|
||||
class InventoryUpdateListSerializer(InventoryUpdateSerializer, UnifiedJobListSerializer):
|
||||
|
||||
class Meta:
|
||||
@@ -2463,25 +2508,21 @@ class CredentialTypeSerializer(BaseSerializer):
|
||||
field['label'] = _(field['label'])
|
||||
if 'help_text' in field:
|
||||
field['help_text'] = _(field['help_text'])
|
||||
if field['type'] == 'become_method':
|
||||
field.pop('type')
|
||||
field['choices'] = map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS)
|
||||
return value
|
||||
|
||||
def filter_field_metadata(self, fields, method):
|
||||
# API-created/modified CredentialType kinds are limited to
|
||||
# `cloud` and `net`
|
||||
if method in ('PUT', 'POST'):
|
||||
fields['kind']['choices'] = filter(
|
||||
fields['kind']['choices'] = list(filter(
|
||||
lambda choice: choice[0] in ('cloud', 'net'),
|
||||
fields['kind']['choices']
|
||||
)
|
||||
))
|
||||
return fields
|
||||
|
||||
|
||||
# TODO: remove when API v1 is removed
|
||||
@six.add_metaclass(BaseSerializerMetaclass)
|
||||
class V1CredentialFields(BaseSerializer):
|
||||
class V1CredentialFields(BaseSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
class Meta:
|
||||
model = Credential
|
||||
@@ -2499,8 +2540,7 @@ class V1CredentialFields(BaseSerializer):
|
||||
return super(V1CredentialFields, self).build_field(field_name, info, model_class, nested_depth)
|
||||
|
||||
|
||||
@six.add_metaclass(BaseSerializerMetaclass)
|
||||
class V2CredentialFields(BaseSerializer):
|
||||
class V2CredentialFields(BaseSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
class Meta:
|
||||
model = Credential
|
||||
@@ -2626,8 +2666,8 @@ class CredentialSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError({"kind": _('"%s" is not a valid choice' % kind)})
|
||||
data['credential_type'] = credential_type.pk
|
||||
value = OrderedDict(
|
||||
{'credential_type': credential_type}.items() +
|
||||
super(CredentialSerializer, self).to_internal_value(data).items()
|
||||
list({'credential_type': credential_type}.items()) +
|
||||
list(super(CredentialSerializer, self).to_internal_value(data).items())
|
||||
)
|
||||
|
||||
# Make a set of the keys in the POST/PUT payload
|
||||
@@ -2788,8 +2828,7 @@ class LabelsListMixin(object):
|
||||
|
||||
|
||||
# TODO: remove when API v1 is removed
|
||||
@six.add_metaclass(BaseSerializerMetaclass)
|
||||
class V1JobOptionsSerializer(BaseSerializer):
|
||||
class V1JobOptionsSerializer(BaseSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
class Meta:
|
||||
model = Credential
|
||||
@@ -2803,8 +2842,7 @@ class V1JobOptionsSerializer(BaseSerializer):
|
||||
return super(V1JobOptionsSerializer, self).build_field(field_name, info, model_class, nested_depth)
|
||||
|
||||
|
||||
@six.add_metaclass(BaseSerializerMetaclass)
|
||||
class LegacyCredentialFields(BaseSerializer):
|
||||
class LegacyCredentialFields(BaseSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
class Meta:
|
||||
model = Credential
|
||||
@@ -3297,10 +3335,11 @@ class JobDetailSerializer(JobSerializer):
|
||||
playbook_counts = serializers.SerializerMethodField(
|
||||
help_text=_('A count of all plays and tasks for the job run.'),
|
||||
)
|
||||
custom_virtualenv = serializers.ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
model = Job
|
||||
fields = ('*', 'host_status_counts', 'playbook_counts',)
|
||||
fields = ('*', 'host_status_counts', 'playbook_counts', 'custom_virtualenv')
|
||||
|
||||
def get_playbook_counts(self, obj):
|
||||
task_count = obj.job_events.filter(event='playbook_on_task_start').count()
|
||||
@@ -3487,12 +3526,16 @@ class AdHocCommandSerializer(UnifiedJobSerializer):
|
||||
ret['name'] = obj.module_name
|
||||
return ret
|
||||
|
||||
def validate(self, attrs):
|
||||
ret = super(AdHocCommandSerializer, self).validate(attrs)
|
||||
return ret
|
||||
|
||||
def validate_extra_vars(self, value):
|
||||
redacted_extra_vars, removed_vars = extract_ansible_vars(value)
|
||||
if removed_vars:
|
||||
raise serializers.ValidationError(_(
|
||||
"{} are prohibited from use in ad hoc commands."
|
||||
).format(", ".join(removed_vars)))
|
||||
).format(", ".join(sorted(removed_vars, reverse=True))))
|
||||
return vars_validate_or_raise(value)
|
||||
|
||||
|
||||
@@ -3720,7 +3763,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
for field in self.instance._meta.fields:
|
||||
setattr(mock_obj, field.name, getattr(self.instance, field.name))
|
||||
field_names = set(field.name for field in self.Meta.model._meta.fields)
|
||||
for field_name, value in attrs.items():
|
||||
for field_name, value in list(attrs.items()):
|
||||
setattr(mock_obj, field_name, value)
|
||||
if field_name not in field_names:
|
||||
attrs.pop(field_name)
|
||||
@@ -4334,7 +4377,7 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
passwords_needed=cred.passwords_needed
|
||||
)
|
||||
if cred.credential_type.managed_by_tower and 'vault_id' in cred.credential_type.defined_fields:
|
||||
cred_dict['vault_id'] = cred.inputs.get('vault_id') or None
|
||||
cred_dict['vault_id'] = cred.get_input('vault_id', default=None)
|
||||
defaults_dict.setdefault(field_name, []).append(cred_dict)
|
||||
else:
|
||||
defaults_dict[field_name] = getattr(obj, field_name)
|
||||
@@ -4384,7 +4427,7 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
errors.setdefault('credentials', []).append(_(
|
||||
'Removing {} credential at launch time without replacement is not supported. '
|
||||
'Provided list lacked credential(s): {}.'
|
||||
).format(cred.unique_hash(display=True), ', '.join([six.text_type(c) for c in removed_creds])))
|
||||
).format(cred.unique_hash(display=True), ', '.join([str(c) for c in removed_creds])))
|
||||
|
||||
# verify that credentials (either provided or existing) don't
|
||||
# require launch-time passwords that have not been provided
|
||||
@@ -4490,11 +4533,11 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
model = NotificationTemplate
|
||||
fields = ('*', 'organization', 'notification_type', 'notification_configuration')
|
||||
|
||||
type_map = {"string": (str, unicode),
|
||||
type_map = {"string": (str,),
|
||||
"int": (int,),
|
||||
"bool": (bool,),
|
||||
"list": (list,),
|
||||
"password": (str, unicode),
|
||||
"password": (str,),
|
||||
"object": (dict, OrderedDict)}
|
||||
|
||||
def to_representation(self, obj):
|
||||
@@ -4722,8 +4765,8 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
||||
raise serializers.ValidationError(_('Manual Project cannot have a schedule set.'))
|
||||
elif type(value) == InventorySource and value.source == 'scm' and value.update_on_project_update:
|
||||
raise serializers.ValidationError(_(
|
||||
six.text_type('Inventory sources with `update_on_project_update` cannot be scheduled. '
|
||||
'Schedule its source project `{}` instead.').format(value.source_project.name)))
|
||||
'Inventory sources with `update_on_project_update` cannot be scheduled. '
|
||||
'Schedule its source project `{}` instead.'.format(value.source_project.name)))
|
||||
return value
|
||||
|
||||
|
||||
@@ -4877,7 +4920,7 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
for key in summary_dict.keys():
|
||||
if 'id' not in summary_dict[key]:
|
||||
summary_dict[key] = summary_dict[key] + ('id',)
|
||||
field_list = summary_dict.items()
|
||||
field_list = list(summary_dict.items())
|
||||
# Needed related fields that are not in the default summary fields
|
||||
field_list += [
|
||||
('workflow_job_template_node', ('id', 'unified_job_template_id')),
|
||||
@@ -4897,7 +4940,7 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
|
||||
def get_fields(self):
|
||||
ret = super(ActivityStreamSerializer, self).get_fields()
|
||||
for key, field in ret.items():
|
||||
for key, field in list(ret.items()):
|
||||
if key == 'changes':
|
||||
field.help_text = _('A summary of the new and changed values when an object is created, updated, or deleted')
|
||||
if key == 'object1':
|
||||
@@ -5039,7 +5082,7 @@ class FactVersionSerializer(BaseFactSerializer):
|
||||
}
|
||||
res['fact_view'] = '%s?%s' % (
|
||||
reverse('api:host_fact_compare_view', kwargs={'pk': obj.host.pk}, request=self.context.get('request')),
|
||||
urllib.urlencode(params)
|
||||
urllib.parse.urlencode(params)
|
||||
)
|
||||
return res
|
||||
|
||||
@@ -5061,6 +5104,6 @@ class FactSerializer(BaseFactSerializer):
|
||||
ret = super(FactSerializer, self).to_representation(obj)
|
||||
if obj is None:
|
||||
return ret
|
||||
if 'facts' in ret and isinstance(ret['facts'], six.string_types):
|
||||
if 'facts' in ret and isinstance(ret['facts'], str):
|
||||
ret['facts'] = json.loads(ret['facts'])
|
||||
return ret
|
||||
|
||||
@@ -12,13 +12,12 @@ import requests
|
||||
import functools
|
||||
from base64 import b64encode
|
||||
from collections import OrderedDict, Iterable
|
||||
import six
|
||||
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import FieldError, ObjectDoesNotExist
|
||||
from django.db.models import Q
|
||||
from django.db.models import Q, Sum
|
||||
from django.db import IntegrityError, transaction, connection
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.safestring import mark_safe
|
||||
@@ -70,7 +69,6 @@ from awx.main.models import * # noqa
|
||||
from awx.main.utils import * # noqa
|
||||
from awx.main.utils import (
|
||||
extract_ansible_vars,
|
||||
decrypt_field,
|
||||
)
|
||||
from awx.main.utils.encryption import encrypt_value
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
@@ -173,7 +171,7 @@ class DashboardView(APIView):
|
||||
user_inventory = get_user_queryset(request.user, Inventory)
|
||||
inventory_with_failed_hosts = user_inventory.filter(hosts_with_active_failures__gt=0)
|
||||
user_inventory_external = user_inventory.filter(has_inventory_sources=True)
|
||||
failed_inventory = sum(i.inventory_sources_with_failures for i in user_inventory)
|
||||
failed_inventory = user_inventory.aggregate(Sum('inventory_sources_with_failures'))['inventory_sources_with_failures__sum']
|
||||
data['inventories'] = {'url': reverse('api:inventory_list', request=request),
|
||||
'total': user_inventory.count(),
|
||||
'total_with_inventory_source': user_inventory_external.count(),
|
||||
@@ -517,7 +515,7 @@ class AuthView(APIView):
|
||||
from rest_framework.reverse import reverse
|
||||
data = OrderedDict()
|
||||
err_backend, err_message = request.session.get('social_auth_error', (None, None))
|
||||
auth_backends = load_backends(settings.AUTHENTICATION_BACKENDS, force_load=True).items()
|
||||
auth_backends = list(load_backends(settings.AUTHENTICATION_BACKENDS, force_load=True).items())
|
||||
# Return auth backends in consistent order: Google, GitHub, SAML.
|
||||
auth_backends.sort(key=lambda x: 'g' if x[0] == 'google-oauth2' else x[0])
|
||||
for name, backend in auth_backends:
|
||||
@@ -525,7 +523,7 @@ class AuthView(APIView):
|
||||
not feature_enabled('ldap')) or \
|
||||
(not feature_enabled('enterprise_auth') and
|
||||
name in ['saml', 'radius']):
|
||||
continue
|
||||
continue
|
||||
|
||||
login_url = reverse('social:begin', args=(name,))
|
||||
complete_url = request.build_absolute_uri(reverse('social:complete', args=(name,)))
|
||||
@@ -1436,7 +1434,7 @@ class HostList(HostRelatedSearchMixin, ListCreateAPIView):
|
||||
try:
|
||||
return super(HostList, self).list(*args, **kwargs)
|
||||
except Exception as e:
|
||||
return Response(dict(error=_(six.text_type(e))), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(error=_(str(e))), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class HostDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, RetrieveUpdateDestroyAPIView):
|
||||
@@ -1592,7 +1590,7 @@ class HostInsights(GenericAPIView):
|
||||
serializer_class = EmptySerializer
|
||||
|
||||
def _extract_insights_creds(self, credential):
|
||||
return (credential.inputs['username'], decrypt_field(credential, 'password'))
|
||||
return (credential.get_input('username', default=''), credential.get_input('password', default=''))
|
||||
|
||||
def _get_insights(self, url, username, password):
|
||||
session = requests.Session()
|
||||
@@ -1879,7 +1877,7 @@ class InventoryScriptView(RetrieveAPIView):
|
||||
show_all = bool(request.query_params.get('all', ''))
|
||||
subset = request.query_params.get('subset', '')
|
||||
if subset:
|
||||
if not isinstance(subset, six.string_types):
|
||||
if not isinstance(subset, str):
|
||||
raise ParseError(_('Inventory subset argument must be a string.'))
|
||||
if subset.startswith('slice'):
|
||||
slice_number, slice_count = Inventory.parse_slice_params(subset)
|
||||
@@ -1973,7 +1971,7 @@ class InventoryInventorySourcesUpdate(RetrieveAPIView):
|
||||
details['status'] = None
|
||||
if inventory_source.can_update:
|
||||
update = inventory_source.update()
|
||||
details.update(InventoryUpdateSerializer(update, context=self.get_serializer_context()).to_representation(update))
|
||||
details.update(InventoryUpdateDetailSerializer(update, context=self.get_serializer_context()).to_representation(update))
|
||||
details['status'] = 'started'
|
||||
details['inventory_update'] = update.id
|
||||
successes += 1
|
||||
@@ -2136,7 +2134,7 @@ class InventorySourceUpdateView(RetrieveAPIView):
|
||||
headers = {'Location': update.get_absolute_url(request=request)}
|
||||
data = OrderedDict()
|
||||
data['inventory_update'] = update.id
|
||||
data.update(InventoryUpdateSerializer(update, context=self.get_serializer_context()).to_representation(update))
|
||||
data.update(InventoryUpdateDetailSerializer(update, context=self.get_serializer_context()).to_representation(update))
|
||||
return Response(data, status=status.HTTP_202_ACCEPTED, headers=headers)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
@@ -2151,7 +2149,7 @@ class InventoryUpdateList(ListAPIView):
|
||||
class InventoryUpdateDetail(UnifiedJobDeletionMixin, RetrieveDestroyAPIView):
|
||||
|
||||
model = InventoryUpdate
|
||||
serializer_class = InventoryUpdateSerializer
|
||||
serializer_class = InventoryUpdateDetailSerializer
|
||||
|
||||
|
||||
class InventoryUpdateCredentialsList(SubListAPIView):
|
||||
@@ -2308,7 +2306,7 @@ class JobTemplateLaunch(RetrieveAPIView):
|
||||
raise ParseError({key: [msg], 'credentials': [msg]})
|
||||
|
||||
# add the deprecated credential specified in the request
|
||||
if not isinstance(prompted_value, Iterable) or isinstance(prompted_value, basestring):
|
||||
if not isinstance(prompted_value, Iterable) or isinstance(prompted_value, str):
|
||||
prompted_value = [prompted_value]
|
||||
|
||||
# If user gave extra_credentials, special case to use exactly
|
||||
@@ -2417,11 +2415,11 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
serializer_class = EmptySerializer
|
||||
|
||||
ALLOWED_TYPES = {
|
||||
'text': six.string_types,
|
||||
'textarea': six.string_types,
|
||||
'password': six.string_types,
|
||||
'multiplechoice': six.string_types,
|
||||
'multiselect': six.string_types,
|
||||
'text': str,
|
||||
'textarea': str,
|
||||
'password': str,
|
||||
'multiplechoice': str,
|
||||
'multiselect': str,
|
||||
'integer': int,
|
||||
'float': float
|
||||
}
|
||||
@@ -2456,8 +2454,8 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
def _validate_spec_data(new_spec, old_spec):
|
||||
schema_errors = {}
|
||||
for field, expect_type, type_label in [
|
||||
('name', six.string_types, 'string'),
|
||||
('description', six.string_types, 'string'),
|
||||
('name', str, 'string'),
|
||||
('description', str, 'string'),
|
||||
('spec', list, 'list of items')]:
|
||||
if field not in new_spec:
|
||||
schema_errors['error'] = _("Field '{}' is missing from survey spec.").format(field)
|
||||
@@ -2475,7 +2473,7 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
old_spec_dict = JobTemplate.pivot_spec(old_spec)
|
||||
for idx, survey_item in enumerate(new_spec["spec"]):
|
||||
context = dict(
|
||||
idx=six.text_type(idx),
|
||||
idx=str(idx),
|
||||
survey_item=survey_item
|
||||
)
|
||||
# General element validation
|
||||
@@ -2487,7 +2485,7 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
field_name=field_name, **context
|
||||
)), status=status.HTTP_400_BAD_REQUEST)
|
||||
val = survey_item[field_name]
|
||||
allow_types = six.string_types
|
||||
allow_types = str
|
||||
type_label = 'string'
|
||||
if field_name == 'required':
|
||||
allow_types = bool
|
||||
@@ -2535,7 +2533,7 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
)))
|
||||
|
||||
# Process encryption substitution
|
||||
if ("default" in survey_item and isinstance(survey_item['default'], six.string_types) and
|
||||
if ("default" in survey_item and isinstance(survey_item['default'], str) and
|
||||
survey_item['default'].startswith('$encrypted$')):
|
||||
# Submission expects the existence of encrypted DB value to replace given default
|
||||
if qtype != "password":
|
||||
@@ -2547,7 +2545,7 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
encryptedish_default_exists = False
|
||||
if 'default' in old_element:
|
||||
old_default = old_element['default']
|
||||
if isinstance(old_default, six.string_types):
|
||||
if isinstance(old_default, str):
|
||||
if old_default.startswith('$encrypted$'):
|
||||
encryptedish_default_exists = True
|
||||
elif old_default == "": # unencrypted blank string is allowed as DB value as special case
|
||||
@@ -3076,8 +3074,8 @@ class WorkflowJobTemplateCopy(WorkflowsEnforcementMixin, CopyAPIView):
|
||||
elif field_name in ['credentials']:
|
||||
for cred in item.all():
|
||||
if not user.can_access(cred.__class__, 'use', cred):
|
||||
logger.debug(six.text_type(
|
||||
'Deep copy: removing {} from relationship due to permissions').format(cred))
|
||||
logger.debug(
|
||||
'Deep copy: removing {} from relationship due to permissions'.format(cred))
|
||||
item.remove(cred.pk)
|
||||
obj.save()
|
||||
|
||||
@@ -3154,9 +3152,10 @@ class WorkflowJobRelaunch(WorkflowsEnforcementMixin, GenericAPIView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.is_sliced_job:
|
||||
if not obj.job_template_id:
|
||||
jt = obj.job_template
|
||||
if not jt:
|
||||
raise ParseError(_('Cannot relaunch slice workflow job orphaned from job template.'))
|
||||
elif obj.job_template.job_slice_count != obj.workflow_nodes.count():
|
||||
elif not jt.inventory or min(jt.inventory.hosts.count(), jt.job_slice_count) != obj.workflow_nodes.count():
|
||||
raise ParseError(_('Cannot relaunch sliced workflow job after slice count has changed.'))
|
||||
new_workflow_job = obj.create_relaunch_workflow_job()
|
||||
new_workflow_job.signal_start()
|
||||
@@ -3619,11 +3618,6 @@ class JobRelaunch(RetrieveAPIView):
|
||||
'Cannot relaunch because previous job had 0 {status_value} hosts.'
|
||||
).format(status_value=retry_hosts)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
copy_kwargs['limit'] = ','.join(retry_host_list)
|
||||
limit_length = len(copy_kwargs['limit'])
|
||||
if limit_length > 1024:
|
||||
return Response({'limit': _(
|
||||
'Cannot relaunch because the limit length {limit_length} exceeds the max of {limit_max}.'
|
||||
).format(limit_length=limit_length, limit_max=1024)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
new_job = obj.copy_unified_job(**copy_kwargs)
|
||||
result = new_job.signal_start(**serializer.validated_data['credential_passwords'])
|
||||
@@ -4458,7 +4452,7 @@ class RoleChildrenList(SubListAPIView):
|
||||
# in URL patterns and reverse URL lookups, converting CamelCase names to
|
||||
# lowercase_with_underscore (e.g. MyView.as_view() becomes my_view).
|
||||
this_module = sys.modules[__name__]
|
||||
for attr, value in locals().items():
|
||||
for attr, value in list(locals().items()):
|
||||
if isinstance(value, type) and issubclass(value, APIView):
|
||||
name = camelcase_to_underscore(attr)
|
||||
view = value.as_view()
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
import logging
|
||||
import operator
|
||||
import json
|
||||
from collections import OrderedDict
|
||||
|
||||
@@ -26,6 +27,7 @@ from awx.main.utils import (
|
||||
)
|
||||
from awx.api.versioning import reverse, get_request_version, drf_reverse
|
||||
from awx.conf.license import get_license, feature_enabled
|
||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||
from awx.main.models import (
|
||||
Project,
|
||||
Organization,
|
||||
@@ -161,7 +163,7 @@ class ApiV1PingView(APIView):
|
||||
for instance in Instance.objects.all():
|
||||
response['instances'].append(dict(node=instance.hostname, heartbeat=instance.modified,
|
||||
capacity=instance.capacity, version=instance.version))
|
||||
response['instances'].sort()
|
||||
sorted(response['instances'], key=operator.itemgetter('node'))
|
||||
response['instance_groups'] = []
|
||||
for instance_group in InstanceGroup.objects.all():
|
||||
response['instance_groups'].append(dict(name=instance_group.name,
|
||||
@@ -202,7 +204,8 @@ class ApiV1ConfigView(APIView):
|
||||
version=get_awx_version(),
|
||||
ansible_version=get_ansible_version(),
|
||||
eula=render_to_string("eula.md") if license_data.get('license_type', 'UNLICENSED') != 'open' else '',
|
||||
analytics_status=pendo_state
|
||||
analytics_status=pendo_state,
|
||||
become_methods=PRIVILEGE_ESCALATION_METHODS,
|
||||
)
|
||||
|
||||
# If LDAP is enabled, user_ldap_fields will return a list of field
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Python
|
||||
import logging
|
||||
import urlparse
|
||||
import urllib.parse as urlparse
|
||||
from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
@@ -10,8 +10,6 @@ from django.utils.translation import ugettext_lazy as _
|
||||
# Django REST Framework
|
||||
from rest_framework.fields import * # noqa
|
||||
|
||||
import six
|
||||
|
||||
logger = logging.getLogger('awx.conf.fields')
|
||||
|
||||
# Use DRF fields to convert/validate settings:
|
||||
@@ -71,7 +69,7 @@ class StringListBooleanField(ListField):
|
||||
return False
|
||||
elif value in NullBooleanField.NULL_VALUES:
|
||||
return None
|
||||
elif isinstance(value, basestring):
|
||||
elif isinstance(value, str):
|
||||
return self.child.to_representation(value)
|
||||
except TypeError:
|
||||
pass
|
||||
@@ -88,7 +86,7 @@ class StringListBooleanField(ListField):
|
||||
return False
|
||||
elif data in NullBooleanField.NULL_VALUES:
|
||||
return None
|
||||
elif isinstance(data, basestring):
|
||||
elif isinstance(data, str):
|
||||
return self.child.run_validation(data)
|
||||
except TypeError:
|
||||
pass
|
||||
@@ -139,7 +137,7 @@ class KeyValueField(DictField):
|
||||
def to_internal_value(self, data):
|
||||
ret = super(KeyValueField, self).to_internal_value(data)
|
||||
for value in data.values():
|
||||
if not isinstance(value, six.string_types + six.integer_types + (float,)):
|
||||
if not isinstance(value, (str, int, float)):
|
||||
if isinstance(value, OrderedDict):
|
||||
value = dict(value)
|
||||
self.fail('invalid_child', input=value)
|
||||
|
||||
@@ -1,480 +0,0 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import base64
|
||||
import collections
|
||||
import difflib
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import transaction
|
||||
from django.utils.text import slugify
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Tower
|
||||
from awx import MODE
|
||||
from awx.conf import settings_registry
|
||||
from awx.conf.fields import empty, SkipField
|
||||
from awx.conf.models import Setting
|
||||
from awx.conf.utils import comment_assignments
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'category',
|
||||
nargs='*',
|
||||
type=str,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--dry-run',
|
||||
action='store_true',
|
||||
dest='dry_run',
|
||||
default=False,
|
||||
help=_('Only show which settings would be commented/migrated.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--skip-errors',
|
||||
action='store_true',
|
||||
dest='skip_errors',
|
||||
default=False,
|
||||
help=_('Skip over settings that would raise an error when commenting/migrating.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-comment',
|
||||
action='store_true',
|
||||
dest='no_comment',
|
||||
default=False,
|
||||
help=_('Skip commenting out settings in files.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--comment-only',
|
||||
action='store_true',
|
||||
dest='comment_only',
|
||||
default=False,
|
||||
help=_('Skip migrating and only comment out settings in files.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--backup-suffix',
|
||||
dest='backup_suffix',
|
||||
default=now().strftime('.%Y%m%d%H%M%S'),
|
||||
help=_('Backup existing settings files with this suffix.'),
|
||||
)
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.dry_run = bool(options.get('dry_run', False))
|
||||
self.skip_errors = bool(options.get('skip_errors', False))
|
||||
self.no_comment = bool(options.get('no_comment', False))
|
||||
self.comment_only = bool(options.get('comment_only', False))
|
||||
self.backup_suffix = options.get('backup_suffix', '')
|
||||
self.categories = options.get('category', None) or ['all']
|
||||
self.style.HEADING = self.style.MIGRATE_HEADING
|
||||
self.style.LABEL = self.style.MIGRATE_LABEL
|
||||
self.style.OK = self.style.SQL_FIELD
|
||||
self.style.SKIP = self.style.WARNING
|
||||
self.style.VALUE = self.style.SQL_KEYWORD
|
||||
|
||||
# Determine if any categories provided are invalid.
|
||||
category_slugs = []
|
||||
invalid_categories = []
|
||||
for category in self.categories:
|
||||
category_slug = slugify(category)
|
||||
if category_slug in settings_registry.get_registered_categories():
|
||||
if category_slug not in category_slugs:
|
||||
category_slugs.append(category_slug)
|
||||
else:
|
||||
if category not in invalid_categories:
|
||||
invalid_categories.append(category)
|
||||
if len(invalid_categories) == 1:
|
||||
raise CommandError('Invalid setting category: {}'.format(invalid_categories[0]))
|
||||
elif len(invalid_categories) > 1:
|
||||
raise CommandError('Invalid setting categories: {}'.format(', '.join(invalid_categories)))
|
||||
|
||||
# Build a list of all settings to be migrated.
|
||||
registered_settings = []
|
||||
for category_slug in category_slugs:
|
||||
for registered_setting in settings_registry.get_registered_settings(category_slug=category_slug, read_only=False):
|
||||
if registered_setting not in registered_settings:
|
||||
registered_settings.append(registered_setting)
|
||||
|
||||
self._migrate_settings(registered_settings)
|
||||
|
||||
def _get_settings_file_patterns(self):
|
||||
if MODE == 'development':
|
||||
return [
|
||||
'/etc/tower/settings.py',
|
||||
'/etc/tower/conf.d/*.py',
|
||||
os.path.join(os.path.dirname(__file__), '..', '..', '..', 'settings', 'local_*.py')
|
||||
]
|
||||
else:
|
||||
return [
|
||||
os.environ.get('AWX_SETTINGS_FILE', '/etc/tower/settings.py'),
|
||||
os.path.join(os.environ.get('AWX_SETTINGS_DIR', '/etc/tower/conf.d/'), '*.py'),
|
||||
]
|
||||
|
||||
def _get_license_file(self):
|
||||
return os.environ.get('AWX_LICENSE_FILE', '/etc/tower/license')
|
||||
|
||||
def _comment_license_file(self, dry_run=True):
|
||||
license_file = self._get_license_file()
|
||||
diff_lines = []
|
||||
if os.path.exists(license_file):
|
||||
try:
|
||||
raw_license_data = open(license_file).read()
|
||||
json.loads(raw_license_data)
|
||||
except Exception as e:
|
||||
raise CommandError('Error reading license from {0}: {1!r}'.format(license_file, e))
|
||||
if self.backup_suffix:
|
||||
backup_license_file = '{}{}'.format(license_file, self.backup_suffix)
|
||||
else:
|
||||
backup_license_file = '{}.old'.format(license_file)
|
||||
diff_lines = list(difflib.unified_diff(
|
||||
raw_license_data.splitlines(),
|
||||
[],
|
||||
fromfile=backup_license_file,
|
||||
tofile=license_file,
|
||||
lineterm='',
|
||||
))
|
||||
if not dry_run:
|
||||
if self.backup_suffix:
|
||||
shutil.copy2(license_file, backup_license_file)
|
||||
os.remove(license_file)
|
||||
return diff_lines
|
||||
|
||||
def _get_local_settings_file(self):
|
||||
if MODE == 'development':
|
||||
static_root = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'ui', 'static')
|
||||
else:
|
||||
static_root = settings.STATIC_ROOT
|
||||
return os.path.join(static_root, 'local_settings.json')
|
||||
|
||||
def _comment_local_settings_file(self, dry_run=True):
|
||||
local_settings_file = self._get_local_settings_file()
|
||||
diff_lines = []
|
||||
if os.path.exists(local_settings_file):
|
||||
try:
|
||||
raw_local_settings_data = open(local_settings_file).read()
|
||||
json.loads(raw_local_settings_data)
|
||||
except Exception as e:
|
||||
if not self.skip_errors:
|
||||
raise CommandError('Error reading local settings from {0}: {1!r}'.format(local_settings_file, e))
|
||||
return diff_lines
|
||||
if self.backup_suffix:
|
||||
backup_local_settings_file = '{}{}'.format(local_settings_file, self.backup_suffix)
|
||||
else:
|
||||
backup_local_settings_file = '{}.old'.format(local_settings_file)
|
||||
diff_lines = list(difflib.unified_diff(
|
||||
raw_local_settings_data.splitlines(),
|
||||
[],
|
||||
fromfile=backup_local_settings_file,
|
||||
tofile=local_settings_file,
|
||||
lineterm='',
|
||||
))
|
||||
if not dry_run:
|
||||
if self.backup_suffix:
|
||||
shutil.copy2(local_settings_file, backup_local_settings_file)
|
||||
os.remove(local_settings_file)
|
||||
return diff_lines
|
||||
|
||||
def _get_custom_logo_file(self):
|
||||
if MODE == 'development':
|
||||
static_root = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'ui', 'static')
|
||||
else:
|
||||
static_root = settings.STATIC_ROOT
|
||||
return os.path.join(static_root, 'assets', 'custom_console_logo.png')
|
||||
|
||||
def _comment_custom_logo_file(self, dry_run=True):
|
||||
custom_logo_file = self._get_custom_logo_file()
|
||||
diff_lines = []
|
||||
if os.path.exists(custom_logo_file):
|
||||
try:
|
||||
raw_custom_logo_data = open(custom_logo_file).read()
|
||||
except Exception as e:
|
||||
if not self.skip_errors:
|
||||
raise CommandError('Error reading custom logo from {0}: {1!r}'.format(custom_logo_file, e))
|
||||
return diff_lines
|
||||
if self.backup_suffix:
|
||||
backup_custom_logo_file = '{}{}'.format(custom_logo_file, self.backup_suffix)
|
||||
else:
|
||||
backup_custom_logo_file = '{}.old'.format(custom_logo_file)
|
||||
diff_lines = list(difflib.unified_diff(
|
||||
['<PNG Image ({} bytes)>'.format(len(raw_custom_logo_data))],
|
||||
[],
|
||||
fromfile=backup_custom_logo_file,
|
||||
tofile=custom_logo_file,
|
||||
lineterm='',
|
||||
))
|
||||
if not dry_run:
|
||||
if self.backup_suffix:
|
||||
shutil.copy2(custom_logo_file, backup_custom_logo_file)
|
||||
os.remove(custom_logo_file)
|
||||
return diff_lines
|
||||
|
||||
def _check_if_needs_comment(self, patterns, setting):
|
||||
files_to_comment = []
|
||||
# If any diffs are returned, this setting needs to be commented.
|
||||
diffs = comment_assignments(patterns, setting, dry_run=True)
|
||||
if setting == 'LICENSE':
|
||||
diffs.extend(self._comment_license_file(dry_run=True))
|
||||
elif setting == 'CUSTOM_LOGIN_INFO':
|
||||
diffs.extend(self._comment_local_settings_file(dry_run=True))
|
||||
elif setting == 'CUSTOM_LOGO':
|
||||
diffs.extend(self._comment_custom_logo_file(dry_run=True))
|
||||
for diff in diffs:
|
||||
for line in diff.splitlines():
|
||||
if line.startswith('+++ '):
|
||||
files_to_comment.append(line[4:])
|
||||
return files_to_comment
|
||||
|
||||
def _check_if_needs_migration(self, setting):
|
||||
# Check whether the current value differs from the default.
|
||||
default_value = settings.DEFAULTS_SNAPSHOT.get(setting, empty)
|
||||
if default_value is empty and setting != 'LICENSE':
|
||||
field = settings_registry.get_setting_field(setting, read_only=True)
|
||||
try:
|
||||
default_value = field.get_default()
|
||||
except SkipField:
|
||||
pass
|
||||
current_value = getattr(settings, setting, empty)
|
||||
if setting == 'CUSTOM_LOGIN_INFO' and current_value in {empty, ''}:
|
||||
local_settings_file = self._get_local_settings_file()
|
||||
try:
|
||||
if os.path.exists(local_settings_file):
|
||||
local_settings = json.load(open(local_settings_file))
|
||||
current_value = local_settings.get('custom_login_info', '')
|
||||
except Exception as e:
|
||||
if not self.skip_errors:
|
||||
raise CommandError('Error reading custom login info from {0}: {1!r}'.format(local_settings_file, e))
|
||||
if setting == 'CUSTOM_LOGO' and current_value in {empty, ''}:
|
||||
custom_logo_file = self._get_custom_logo_file()
|
||||
try:
|
||||
if os.path.exists(custom_logo_file):
|
||||
custom_logo_data = open(custom_logo_file).read()
|
||||
if custom_logo_data:
|
||||
current_value = 'data:image/png;base64,{}'.format(base64.b64encode(custom_logo_data))
|
||||
else:
|
||||
current_value = ''
|
||||
except Exception as e:
|
||||
if not self.skip_errors:
|
||||
raise CommandError('Error reading custom logo from {0}: {1!r}'.format(custom_logo_file, e))
|
||||
if current_value != default_value:
|
||||
if current_value is empty:
|
||||
current_value = None
|
||||
return current_value
|
||||
return empty
|
||||
|
||||
def _display_tbd(self, setting, files_to_comment, migrate_value, comment_error=None, migrate_error=None):
|
||||
if self.verbosity >= 1:
|
||||
if files_to_comment:
|
||||
if migrate_value is not empty:
|
||||
action = 'Migrate + Comment'
|
||||
else:
|
||||
action = 'Comment'
|
||||
if comment_error or migrate_error:
|
||||
action = self.style.ERROR('{} (skipped)'.format(action))
|
||||
else:
|
||||
action = self.style.OK(action)
|
||||
self.stdout.write(' {}: {}'.format(
|
||||
self.style.LABEL(setting),
|
||||
action,
|
||||
))
|
||||
if self.verbosity >= 2:
|
||||
if migrate_error:
|
||||
self.stdout.write(' - Migrate value: {}'.format(
|
||||
self.style.ERROR(migrate_error),
|
||||
))
|
||||
elif migrate_value is not empty:
|
||||
self.stdout.write(' - Migrate value: {}'.format(
|
||||
self.style.VALUE(repr(migrate_value)),
|
||||
))
|
||||
if comment_error:
|
||||
self.stdout.write(' - Comment: {}'.format(
|
||||
self.style.ERROR(comment_error),
|
||||
))
|
||||
elif files_to_comment:
|
||||
for file_to_comment in files_to_comment:
|
||||
self.stdout.write(' - Comment in: {}'.format(
|
||||
self.style.VALUE(file_to_comment),
|
||||
))
|
||||
else:
|
||||
if self.verbosity >= 2:
|
||||
self.stdout.write(' {}: {}'.format(
|
||||
self.style.LABEL(setting),
|
||||
self.style.SKIP('No Migration'),
|
||||
))
|
||||
|
||||
def _display_migrate(self, setting, action, display_value):
|
||||
if self.verbosity >= 1:
|
||||
if action == 'No Change':
|
||||
action = self.style.SKIP(action)
|
||||
else:
|
||||
action = self.style.OK(action)
|
||||
self.stdout.write(' {}: {}'.format(
|
||||
self.style.LABEL(setting),
|
||||
action,
|
||||
))
|
||||
if self.verbosity >= 2:
|
||||
for line in display_value.splitlines():
|
||||
self.stdout.write(' {}'.format(
|
||||
self.style.VALUE(line),
|
||||
))
|
||||
|
||||
def _display_diff_summary(self, filename, added, removed):
|
||||
self.stdout.write(' {} {}{} {}{}'.format(
|
||||
self.style.LABEL(filename),
|
||||
self.style.ERROR('-'),
|
||||
self.style.ERROR(int(removed)),
|
||||
self.style.OK('+'),
|
||||
self.style.OK(str(added)),
|
||||
))
|
||||
|
||||
def _display_comment(self, diffs):
|
||||
for diff in diffs:
|
||||
if self.verbosity >= 2:
|
||||
for line in diff.splitlines():
|
||||
display_line = line
|
||||
if line.startswith('--- ') or line.startswith('+++ '):
|
||||
display_line = self.style.LABEL(line)
|
||||
elif line.startswith('-'):
|
||||
display_line = self.style.ERROR(line)
|
||||
elif line.startswith('+'):
|
||||
display_line = self.style.OK(line)
|
||||
elif line.startswith('@@'):
|
||||
display_line = self.style.VALUE(line)
|
||||
if line.startswith('--- ') or line.startswith('+++ '):
|
||||
self.stdout.write(' ' + display_line)
|
||||
else:
|
||||
self.stdout.write(' ' + display_line)
|
||||
elif self.verbosity >= 1:
|
||||
filename, lines_added, lines_removed = None, 0, 0
|
||||
for line in diff.splitlines():
|
||||
if line.startswith('+++ '):
|
||||
if filename:
|
||||
self._display_diff_summary(filename, lines_added, lines_removed)
|
||||
filename, lines_added, lines_removed = line[4:], 0, 0
|
||||
elif line.startswith('+'):
|
||||
lines_added += 1
|
||||
elif line.startswith('-'):
|
||||
lines_removed += 1
|
||||
if filename:
|
||||
self._display_diff_summary(filename, lines_added, lines_removed)
|
||||
|
||||
def _discover_settings(self, registered_settings):
|
||||
if self.verbosity >= 1:
|
||||
self.stdout.write(self.style.HEADING('Discovering settings to be migrated and commented:'))
|
||||
|
||||
# Determine which settings need to be commented/migrated.
|
||||
to_migrate = collections.OrderedDict()
|
||||
to_comment = collections.OrderedDict()
|
||||
patterns = self._get_settings_file_patterns()
|
||||
|
||||
for name in registered_settings:
|
||||
comment_error, migrate_error = None, None
|
||||
files_to_comment = []
|
||||
try:
|
||||
files_to_comment = self._check_if_needs_comment(patterns, name)
|
||||
except Exception as e:
|
||||
comment_error = 'Error commenting {0}: {1!r}'.format(name, e)
|
||||
if not self.skip_errors:
|
||||
raise CommandError(comment_error)
|
||||
if files_to_comment:
|
||||
to_comment[name] = files_to_comment
|
||||
migrate_value = empty
|
||||
if files_to_comment:
|
||||
migrate_value = self._check_if_needs_migration(name)
|
||||
if migrate_value is not empty:
|
||||
field = settings_registry.get_setting_field(name)
|
||||
assert not field.read_only
|
||||
try:
|
||||
data = field.to_representation(migrate_value)
|
||||
setting_value = field.run_validation(data)
|
||||
db_value = field.to_representation(setting_value)
|
||||
to_migrate[name] = db_value
|
||||
except Exception as e:
|
||||
to_comment.pop(name)
|
||||
migrate_error = 'Unable to assign value {0!r} to setting "{1}: {2!s}".'.format(migrate_value, name, e)
|
||||
if not self.skip_errors:
|
||||
raise CommandError(migrate_error)
|
||||
self._display_tbd(name, files_to_comment, migrate_value, comment_error, migrate_error)
|
||||
if self.verbosity == 1 and not to_migrate and not to_comment:
|
||||
self.stdout.write(' No settings found to migrate or comment!')
|
||||
return (to_migrate, to_comment)
|
||||
|
||||
def _migrate(self, to_migrate):
|
||||
if self.verbosity >= 1:
|
||||
if self.dry_run:
|
||||
self.stdout.write(self.style.HEADING('Migrating settings to database (dry-run):'))
|
||||
else:
|
||||
self.stdout.write(self.style.HEADING('Migrating settings to database:'))
|
||||
if not to_migrate:
|
||||
self.stdout.write(' No settings to migrate!')
|
||||
|
||||
# Now migrate those settings to the database.
|
||||
for name, db_value in to_migrate.items():
|
||||
display_value = json.dumps(db_value, indent=4)
|
||||
setting = Setting.objects.filter(key=name, user__isnull=True).order_by('pk').first()
|
||||
action = 'No Change'
|
||||
if not setting:
|
||||
action = 'Migrated'
|
||||
if not self.dry_run:
|
||||
Setting.objects.create(key=name, user=None, value=db_value)
|
||||
elif setting.value != db_value or type(setting.value) != type(db_value):
|
||||
action = 'Updated'
|
||||
if not self.dry_run:
|
||||
setting.value = db_value
|
||||
setting.save(update_fields=['value'])
|
||||
self._display_migrate(name, action, display_value)
|
||||
|
||||
def _comment(self, to_comment):
|
||||
if self.verbosity >= 1:
|
||||
if bool(self.dry_run or self.no_comment):
|
||||
self.stdout.write(self.style.HEADING('Commenting settings in files (dry-run):'))
|
||||
else:
|
||||
self.stdout.write(self.style.HEADING('Commenting settings in files:'))
|
||||
if not to_comment:
|
||||
self.stdout.write(' No settings to comment!')
|
||||
|
||||
# Now comment settings in settings files.
|
||||
if to_comment:
|
||||
to_comment_patterns = []
|
||||
license_file_to_comment = None
|
||||
local_settings_file_to_comment = None
|
||||
custom_logo_file_to_comment = None
|
||||
for files_to_comment in to_comment.values():
|
||||
for file_to_comment in files_to_comment:
|
||||
if file_to_comment == self._get_license_file():
|
||||
license_file_to_comment = file_to_comment
|
||||
elif file_to_comment == self._get_local_settings_file():
|
||||
local_settings_file_to_comment = file_to_comment
|
||||
elif file_to_comment == self._get_custom_logo_file():
|
||||
custom_logo_file_to_comment = file_to_comment
|
||||
elif file_to_comment not in to_comment_patterns:
|
||||
to_comment_patterns.append(file_to_comment)
|
||||
# Run once in dry-run mode to catch any errors from updating the files.
|
||||
diffs = comment_assignments(to_comment_patterns, to_comment.keys(), dry_run=True, backup_suffix=self.backup_suffix)
|
||||
# Then, if really updating, run again.
|
||||
if not self.dry_run and not self.no_comment:
|
||||
diffs = comment_assignments(to_comment_patterns, to_comment.keys(), dry_run=False, backup_suffix=self.backup_suffix)
|
||||
if license_file_to_comment:
|
||||
diffs.extend(self._comment_license_file(dry_run=False))
|
||||
if local_settings_file_to_comment:
|
||||
diffs.extend(self._comment_local_settings_file(dry_run=False))
|
||||
if custom_logo_file_to_comment:
|
||||
diffs.extend(self._comment_custom_logo_file(dry_run=False))
|
||||
self._display_comment(diffs)
|
||||
|
||||
def _migrate_settings(self, registered_settings):
|
||||
to_migrate, to_comment = self._discover_settings(registered_settings)
|
||||
|
||||
if not bool(self.comment_only):
|
||||
self._migrate(to_migrate)
|
||||
self._comment(to_comment)
|
||||
@@ -1,7 +1,6 @@
|
||||
import base64
|
||||
import hashlib
|
||||
|
||||
import six
|
||||
from django.utils.encoding import smart_str
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
@@ -91,7 +90,7 @@ def encrypt_field(instance, field_name, ask=False, subfield=None, skip_utf8=Fals
|
||||
if skip_utf8:
|
||||
utf8 = False
|
||||
else:
|
||||
utf8 = type(value) == six.text_type
|
||||
utf8 = type(value) == str
|
||||
value = smart_str(value)
|
||||
key = get_encryption_key(field_name, getattr(instance, 'pk', None))
|
||||
encryptor = Cipher(AES(key), ECB(), default_backend()).encryptor()
|
||||
|
||||
@@ -33,7 +33,7 @@ class Setting(CreatedModifiedModel):
|
||||
on_delete=models.CASCADE,
|
||||
))
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
try:
|
||||
json_value = json.dumps(self.value)
|
||||
except ValueError:
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
# Django REST Framework
|
||||
from rest_framework import serializers
|
||||
|
||||
import six
|
||||
|
||||
# Tower
|
||||
from awx.api.fields import VerbatimField
|
||||
from awx.api.serializers import BaseSerializer
|
||||
@@ -47,12 +45,12 @@ class SettingFieldMixin(object):
|
||||
"""Mixin to use a registered setting field class for API display/validation."""
|
||||
|
||||
def to_representation(self, obj):
|
||||
if getattr(self, 'encrypted', False) and isinstance(obj, six.string_types) and obj:
|
||||
if getattr(self, 'encrypted', False) and isinstance(obj, str) and obj:
|
||||
return '$encrypted$'
|
||||
return obj
|
||||
|
||||
def to_internal_value(self, value):
|
||||
if getattr(self, 'encrypted', False) and isinstance(value, six.string_types) and value.startswith('$encrypted$'):
|
||||
if getattr(self, 'encrypted', False) and isinstance(value, str) and value.startswith('$encrypted$'):
|
||||
raise serializers.SkipField()
|
||||
obj = super(SettingFieldMixin, self).to_internal_value(value)
|
||||
return super(SettingFieldMixin, self).to_representation(obj)
|
||||
|
||||
@@ -6,18 +6,17 @@ import re
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import StringIO
|
||||
import traceback
|
||||
import urllib
|
||||
|
||||
import six
|
||||
import urllib.parse
|
||||
from io import StringIO
|
||||
|
||||
# Django
|
||||
from django.conf import LazySettings
|
||||
from django.conf import settings, UserSettingsHolder
|
||||
from django.core.cache import cache as django_cache
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import ProgrammingError, OperationalError, transaction, connection
|
||||
from django.db import transaction, connection
|
||||
from django.db.utils import Error as DBError
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
# Django REST Framework
|
||||
@@ -67,7 +66,7 @@ def normalize_broker_url(value):
|
||||
match = re.search('(amqp://[^:]+:)(.*)', parts[0])
|
||||
if match:
|
||||
prefix, password = match.group(1), match.group(2)
|
||||
parts[0] = prefix + urllib.quote(password)
|
||||
parts[0] = prefix + urllib.parse.quote(password)
|
||||
return '@'.join(parts)
|
||||
|
||||
|
||||
@@ -90,21 +89,21 @@ def _ctit_db_wrapper(trans_safe=False):
|
||||
logger.debug('Obtaining database settings in spite of broken transaction.')
|
||||
transaction.set_rollback(False)
|
||||
yield
|
||||
except (ProgrammingError, OperationalError):
|
||||
except DBError:
|
||||
if 'migrate' in sys.argv and get_tower_migration_version() < '310':
|
||||
logger.info('Using default settings until version 3.1 migration.')
|
||||
else:
|
||||
# We want the _full_ traceback with the context
|
||||
# First we get the current call stack, which constitutes the "top",
|
||||
# it has the context up to the point where the context manager is used
|
||||
top_stack = StringIO.StringIO()
|
||||
top_stack = StringIO()
|
||||
traceback.print_stack(file=top_stack)
|
||||
top_lines = top_stack.getvalue().strip('\n').split('\n')
|
||||
top_stack.close()
|
||||
# Get "bottom" stack from the local error that happened
|
||||
# inside of the "with" block this wraps
|
||||
exc_type, exc_value, exc_traceback = sys.exc_info()
|
||||
bottom_stack = StringIO.StringIO()
|
||||
bottom_stack = StringIO()
|
||||
traceback.print_tb(exc_traceback, file=bottom_stack)
|
||||
bottom_lines = bottom_stack.getvalue().strip('\n').split('\n')
|
||||
# Glue together top and bottom where overlap is found
|
||||
@@ -168,15 +167,6 @@ class EncryptedCacheProxy(object):
|
||||
def get(self, key, **kwargs):
|
||||
value = self.cache.get(key, **kwargs)
|
||||
value = self._handle_encryption(self.decrypter, key, value)
|
||||
|
||||
# python-memcached auto-encodes unicode on cache set in python2
|
||||
# https://github.com/linsomniac/python-memcached/issues/79
|
||||
# https://github.com/linsomniac/python-memcached/blob/288c159720eebcdf667727a859ef341f1e908308/memcache.py#L961
|
||||
if six.PY2 and isinstance(value, six.binary_type):
|
||||
try:
|
||||
six.text_type(value)
|
||||
except UnicodeDecodeError:
|
||||
value = value.decode('utf-8')
|
||||
logger.debug('cache get(%r, %r) -> %r', key, empty, filter_sensitive(self.registry, key, value))
|
||||
return value
|
||||
|
||||
@@ -309,7 +299,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
self.__dict__['_awx_conf_preload_expires'] = time.time() + SETTING_CACHE_TIMEOUT
|
||||
# Check for any settings that have been defined in Python files and
|
||||
# make those read-only to avoid overriding in the database.
|
||||
if not self._awx_conf_init_readonly and 'migrate_to_database_settings' not in sys.argv:
|
||||
if not self._awx_conf_init_readonly:
|
||||
defaults_snapshot = self._get_default('DEFAULTS_SNAPSHOT')
|
||||
for key in get_writeable_settings(self.registry):
|
||||
init_default = defaults_snapshot.get(key, None)
|
||||
|
||||
@@ -9,15 +9,11 @@ from django.core.cache import cache
|
||||
from django.dispatch import receiver
|
||||
|
||||
# Tower
|
||||
import awx.main.signals
|
||||
from awx.conf import settings_registry
|
||||
from awx.conf.models import Setting
|
||||
from awx.conf.serializers import SettingSerializer
|
||||
|
||||
logger = logging.getLogger('awx.conf.signals')
|
||||
|
||||
awx.main.signals.model_serializer_mapping[Setting] = SettingSerializer
|
||||
|
||||
__all__ = []
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import urllib.parse
|
||||
|
||||
import pytest
|
||||
|
||||
from django.core.urlresolvers import resolve
|
||||
from django.utils.six.moves.urllib.parse import urlparse
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
from rest_framework.test import (
|
||||
@@ -33,7 +34,7 @@ def admin():
|
||||
@pytest.fixture
|
||||
def api_request(admin):
|
||||
def rf(verb, url, data=None, user=admin):
|
||||
view, view_args, view_kwargs = resolve(urlparse(url)[2])
|
||||
view, view_args, view_kwargs = resolve(urllib.parse.urlparse(url)[2])
|
||||
request = getattr(APIRequestFactory(), verb)(url, data=data, format='json')
|
||||
if user:
|
||||
force_authenticate(request, user=user)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import pytest
|
||||
import mock
|
||||
from unittest import mock
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
import pytest
|
||||
import mock
|
||||
|
||||
from django.apps import apps
|
||||
from awx.conf.migrations._reencrypt import (
|
||||
replace_aesecb_fernet,
|
||||
encrypt_field,
|
||||
decrypt_field,
|
||||
)
|
||||
from awx.conf.settings import Setting
|
||||
from awx.main.utils import decrypt_field as new_decrypt_field
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize("old_enc, new_enc, value", [
|
||||
('$encrypted$UTF8$AES', '$encrypted$UTF8$AESCBC$', u'Iñtërnâtiônàlizætiøn'),
|
||||
('$encrypted$AES$', '$encrypted$AESCBC$', 'test'),
|
||||
])
|
||||
def test_settings(old_enc, new_enc, value):
|
||||
with mock.patch('awx.conf.models.encrypt_field', encrypt_field):
|
||||
with mock.patch('awx.conf.settings.decrypt_field', decrypt_field):
|
||||
setting = Setting.objects.create(key='SOCIAL_AUTH_GITHUB_SECRET', value=value)
|
||||
assert setting.value.startswith(old_enc)
|
||||
|
||||
replace_aesecb_fernet(apps, None)
|
||||
setting.refresh_from_db()
|
||||
|
||||
assert setting.value.startswith(new_enc)
|
||||
assert new_decrypt_field(setting, 'value') == value
|
||||
|
||||
# This is here for a side-effect.
|
||||
# Exception if the encryption type of AESCBC is not properly skipped, ensures
|
||||
# our `startswith` calls don't have typos
|
||||
replace_aesecb_fernet(apps, None)
|
||||
@@ -4,6 +4,7 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
from contextlib import contextmanager
|
||||
import codecs
|
||||
from uuid import uuid4
|
||||
import time
|
||||
|
||||
@@ -12,7 +13,6 @@ from django.core.cache.backends.locmem import LocMemCache
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
import pytest
|
||||
import six
|
||||
|
||||
from awx.conf import models, fields
|
||||
from awx.conf.settings import SettingsWrapper, EncryptedCacheProxy, SETTING_CACHE_NOTSET
|
||||
@@ -67,9 +67,9 @@ def test_cached_settings_unicode_is_auto_decoded(settings):
|
||||
# https://github.com/linsomniac/python-memcached/issues/79
|
||||
# https://github.com/linsomniac/python-memcached/blob/288c159720eebcdf667727a859ef341f1e908308/memcache.py#L961
|
||||
|
||||
value = six.u('Iñtërnâtiônàlizætiøn').encode('utf-8') # this simulates what python-memcached does on cache.set()
|
||||
value = 'Iñtërnâtiônàlizætiøn' # this simulates what python-memcached does on cache.set()
|
||||
settings.cache.set('DEBUG', value)
|
||||
assert settings.cache.get('DEBUG') == six.u('Iñtërnâtiônàlizætiøn')
|
||||
assert settings.cache.get('DEBUG') == 'Iñtërnâtiônàlizætiøn'
|
||||
|
||||
|
||||
def test_read_only_setting(settings):
|
||||
@@ -262,7 +262,7 @@ def test_setting_from_db_with_unicode(settings, mocker, encrypted):
|
||||
encrypted=encrypted
|
||||
)
|
||||
# this simulates a bug in python-memcached; see https://github.com/linsomniac/python-memcached/issues/79
|
||||
value = six.u('Iñtërnâtiônàlizætiøn').encode('utf-8')
|
||||
value = 'Iñtërnâtiônàlizætiøn'
|
||||
|
||||
setting_from_db = mocker.Mock(id=1, key='AWX_SOME_SETTING', value=value)
|
||||
mocks = mocker.Mock(**{
|
||||
@@ -272,8 +272,8 @@ def test_setting_from_db_with_unicode(settings, mocker, encrypted):
|
||||
}),
|
||||
})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
assert settings.AWX_SOME_SETTING == six.u('Iñtërnâtiônàlizætiøn')
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == six.u('Iñtërnâtiônàlizætiøn')
|
||||
assert settings.AWX_SOME_SETTING == 'Iñtërnâtiônàlizætiøn'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'Iñtërnâtiônàlizætiøn'
|
||||
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
@@ -434,7 +434,7 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
|
||||
|
||||
def rot13(obj, attribute):
|
||||
assert obj.pk == 123
|
||||
return getattr(obj, attribute).encode('rot13')
|
||||
return codecs.encode(getattr(obj, attribute), 'rot_13')
|
||||
|
||||
native_cache = LocMemCache(str(uuid4()), {})
|
||||
cache = EncryptedCacheProxy(
|
||||
@@ -471,7 +471,7 @@ def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
||||
|
||||
def rot13(obj, attribute):
|
||||
assert obj.pk is None
|
||||
return getattr(obj, attribute).encode('rot13')
|
||||
return codecs.encode(getattr(obj, attribute), 'rot_13')
|
||||
|
||||
native_cache = LocMemCache(str(uuid4()), {})
|
||||
cache = EncryptedCacheProxy(
|
||||
|
||||
@@ -6,8 +6,6 @@ import glob
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import six
|
||||
|
||||
# AWX
|
||||
from awx.conf.registry import settings_registry
|
||||
|
||||
@@ -15,7 +13,7 @@ __all__ = ['comment_assignments', 'conf_to_dict']
|
||||
|
||||
|
||||
def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix='.old'):
|
||||
if isinstance(patterns, six.string_types):
|
||||
if isinstance(patterns, str):
|
||||
patterns = [patterns]
|
||||
diffs = []
|
||||
for pattern in patterns:
|
||||
@@ -34,7 +32,7 @@ def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix=
|
||||
def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup_filename=None):
|
||||
from redbaron import RedBaron, indent
|
||||
|
||||
if isinstance(assignment_names, six.string_types):
|
||||
if isinstance(assignment_names, str):
|
||||
assignment_names = [assignment_names]
|
||||
else:
|
||||
assignment_names = assignment_names[:]
|
||||
@@ -102,7 +100,7 @@ def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup
|
||||
if not dry_run:
|
||||
if backup_filename:
|
||||
shutil.copy2(filename, backup_filename)
|
||||
with open(filename, 'wb') as fileobj:
|
||||
with open(filename, 'w') as fileobj:
|
||||
fileobj.write(new_file_data)
|
||||
return '\n'.join(diff_lines)
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
def get_queryset(self):
|
||||
self.category_slug = self.kwargs.get('category_slug', 'all')
|
||||
all_category_slugs = settings_registry.get_registered_categories(features_enabled=get_licensed_features()).keys()
|
||||
all_category_slugs = list(settings_registry.get_registered_categories(features_enabled=get_licensed_features()).keys())
|
||||
for slug_to_delete in VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE[get_request_version(self.request)]:
|
||||
all_category_slugs.remove(slug_to_delete)
|
||||
if self.request.user.is_superuser or getattr(self.request.user, 'is_system_auditor', False):
|
||||
@@ -123,7 +123,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
if key == 'LICENSE' or settings_registry.is_setting_read_only(key):
|
||||
continue
|
||||
if settings_registry.is_setting_encrypted(key) and \
|
||||
isinstance(value, basestring) and \
|
||||
isinstance(value, str) and \
|
||||
value.startswith('$encrypted$'):
|
||||
continue
|
||||
setattr(serializer.instance, key, value)
|
||||
@@ -135,7 +135,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
setting.value = value
|
||||
setting.save(update_fields=['value'])
|
||||
settings_change_list.append(key)
|
||||
if settings_change_list and 'migrate_to_database_settings' not in sys.argv:
|
||||
if settings_change_list:
|
||||
handle_setting_changes.delay(settings_change_list)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
@@ -150,7 +150,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
continue
|
||||
setting.delete()
|
||||
settings_change_list.append(setting.key)
|
||||
if settings_change_list and 'migrate_to_database_settings' not in sys.argv:
|
||||
if settings_change_list:
|
||||
handle_setting_changes.delay(settings_change_list)
|
||||
|
||||
# When TOWER_URL_BASE is deleted from the API, reset it to the hostname
|
||||
@@ -210,7 +210,7 @@ class SettingLoggingTest(GenericAPIView):
|
||||
# in URL patterns and reverse URL lookups, converting CamelCase names to
|
||||
# lowercase_with_underscore (e.g. MyView.as_view() becomes my_view).
|
||||
this_module = sys.modules[__name__]
|
||||
for attr, value in locals().items():
|
||||
for attr, value in list(locals().items()):
|
||||
if isinstance(value, type) and issubclass(value, APIView):
|
||||
name = camelcase_to_underscore(attr)
|
||||
view = value.as_view()
|
||||
|
||||
@@ -35,8 +35,6 @@ except ImportError:
|
||||
os.environ['VIRTUAL_ENV']
|
||||
))
|
||||
|
||||
from six.moves import xrange
|
||||
|
||||
__all__ = ['event_context']
|
||||
|
||||
|
||||
@@ -56,9 +54,8 @@ class IsolatedFileWrite:
|
||||
filename = '{}-partial.json'.format(event_uuid)
|
||||
dropoff_location = os.path.join(self.private_data_dir, 'artifacts', 'job_events', filename)
|
||||
write_location = '.'.join([dropoff_location, 'tmp'])
|
||||
partial_data = json.dumps(value)
|
||||
with os.fdopen(os.open(write_location, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f:
|
||||
f.write(partial_data)
|
||||
f.write(value)
|
||||
os.rename(write_location, dropoff_location)
|
||||
|
||||
|
||||
@@ -154,7 +151,7 @@ class EventContext(object):
|
||||
if event not in ('playbook_on_stats',) and "res" in event_data and len(str(event_data['res'])) > max_res:
|
||||
event_data['res'] = {}
|
||||
event_dict = dict(event=event, event_data=event_data)
|
||||
for key in event_data.keys():
|
||||
for key in list(event_data.keys()):
|
||||
if key in ('job_id', 'ad_hoc_command_id', 'project_update_id', 'uuid', 'parent_uuid', 'created',):
|
||||
event_dict[key] = event_data.pop(key)
|
||||
elif key in ('verbosity', 'pid'):
|
||||
@@ -165,11 +162,11 @@ class EventContext(object):
|
||||
return {}
|
||||
|
||||
def dump(self, fileobj, data, max_width=78, flush=False):
|
||||
b64data = base64.b64encode(json.dumps(data))
|
||||
b64data = base64.b64encode(json.dumps(data).encode('utf-8')).decode()
|
||||
with self.display_lock:
|
||||
# pattern corresponding to OutputEventFilter expectation
|
||||
fileobj.write(u'\x1b[K')
|
||||
for offset in xrange(0, len(b64data), max_width):
|
||||
for offset in range(0, len(b64data), max_width):
|
||||
chunk = b64data[offset:offset + max_width]
|
||||
escaped_chunk = u'{}\x1b[{}D'.format(chunk, len(chunk))
|
||||
fileobj.write(escaped_chunk)
|
||||
@@ -179,7 +176,7 @@ class EventContext(object):
|
||||
|
||||
def dump_begin(self, fileobj):
|
||||
begin_dict = self.get_begin_dict()
|
||||
self.cache.set(":1:ev-{}".format(begin_dict['uuid']), begin_dict)
|
||||
self.cache.set(":1:ev-{}".format(begin_dict['uuid']), json.dumps(begin_dict))
|
||||
self.dump(fileobj, {'uuid': begin_dict['uuid']})
|
||||
|
||||
def dump_end(self, fileobj):
|
||||
|
||||
@@ -5,11 +5,11 @@ from __future__ import absolute_import
|
||||
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
import mock
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@@ -2020,7 +2020,7 @@ msgstr ""
|
||||
|
||||
#: awx/main/conf.py:286
|
||||
msgid ""
|
||||
"Allows roles to be dynamically downlaoded from a requirements.yml file for "
|
||||
"Allows roles to be dynamically downloaded from a requirements.yml file for "
|
||||
"SCM projects."
|
||||
msgstr ""
|
||||
|
||||
@@ -2835,7 +2835,7 @@ msgstr ""
|
||||
|
||||
#: awx/main/models/credential/__init__.py:979
|
||||
msgid ""
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForm "
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForms "
|
||||
"instance. For example, https://cloudforms.example.org"
|
||||
msgstr ""
|
||||
|
||||
|
||||
@@ -2020,7 +2020,7 @@ msgstr ""
|
||||
|
||||
#: awx/main/conf.py:286
|
||||
msgid ""
|
||||
"Allows roles to be dynamically downlaoded from a requirements.yml file for "
|
||||
"Allows roles to be dynamically downloaded from a requirements.yml file for "
|
||||
"SCM projects."
|
||||
msgstr ""
|
||||
|
||||
@@ -2835,7 +2835,7 @@ msgstr ""
|
||||
|
||||
#: awx/main/models/credential/__init__.py:979
|
||||
msgid ""
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForm "
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForms "
|
||||
"instance. For example, https://cloudforms.example.org"
|
||||
msgstr ""
|
||||
|
||||
|
||||
@@ -3086,7 +3086,7 @@ msgstr "URL CloudForms"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:982
|
||||
msgid ""
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForm "
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForms "
|
||||
"instance. For example, https://cloudforms.example.org"
|
||||
msgstr ""
|
||||
"Introduzca la URL para la máquina virtual que corresponda a su instancia "
|
||||
|
||||
@@ -3099,7 +3099,7 @@ msgstr "URL CloudForms"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:982
|
||||
msgid ""
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForm "
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForms "
|
||||
"instance. For example, https://cloudforms.example.org"
|
||||
msgstr ""
|
||||
"Veuillez saisir l’URL de la machine virtuelle qui correspond à votre "
|
||||
|
||||
@@ -2858,7 +2858,7 @@ msgstr "CloudForms URL"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:982
|
||||
msgid ""
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForm "
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForms "
|
||||
"instance. For example, https://cloudforms.example.org"
|
||||
msgstr ""
|
||||
"CloudForms インスタンスに対応する仮想マシンの URL を入力します (例: https://cloudforms.example.org)。"
|
||||
|
||||
@@ -3072,7 +3072,7 @@ msgstr "CloudForms-URL"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:982
|
||||
msgid ""
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForm "
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForms "
|
||||
"instance. For example, https://cloudforms.example.org"
|
||||
msgstr ""
|
||||
"Voer de URL in voor de virtuele machine die overeenkomt met uw CloudForm-"
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import six
|
||||
from functools import reduce
|
||||
|
||||
# Django
|
||||
@@ -1397,6 +1396,8 @@ class JobTemplateAccess(BaseAccess):
|
||||
]
|
||||
|
||||
for k, v in data.items():
|
||||
if k not in [x.name for x in obj._meta.concrete_fields]:
|
||||
continue
|
||||
if hasattr(obj, k) and getattr(obj, k) != v:
|
||||
if k not in field_whitelist and v != getattr(obj, '%s_id' % k, None) \
|
||||
and not (hasattr(obj, '%s_id' % k) and getattr(obj, '%s_id' % k) is None and v == ''): # Equate '' to None in the case of foreign keys
|
||||
@@ -2588,7 +2589,7 @@ class RoleAccess(BaseAccess):
|
||||
if (isinstance(obj.content_object, Organization) and
|
||||
obj.role_field in (Organization.member_role.field.parent_role + ['member_role'])):
|
||||
if not isinstance(sub_obj, User):
|
||||
logger.error(six.text_type('Unexpected attempt to associate {} with organization role.').format(sub_obj))
|
||||
logger.error('Unexpected attempt to associate {} with organization role.'.format(sub_obj))
|
||||
return False
|
||||
if not UserAccess(self.user).can_admin(sub_obj, None, allow_orphans=True):
|
||||
return False
|
||||
|
||||
@@ -197,6 +197,18 @@ register(
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_ISOLATED_VERBOSITY',
|
||||
field_class=fields.IntegerField,
|
||||
min_value=0,
|
||||
max_value=5,
|
||||
label=_('Verbosity level for isolated node management tasks'),
|
||||
help_text=_('This can be raised to aid in debugging connection issues for isolated task execution'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
default=0
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_ISOLATED_CHECK_INTERVAL',
|
||||
field_class=fields.IntegerField,
|
||||
@@ -283,7 +295,7 @@ register(
|
||||
field_class=fields.BooleanField,
|
||||
default=True,
|
||||
label=_('Enable Role Download'),
|
||||
help_text=_('Allows roles to be dynamically downlaoded from a requirements.yml file for SCM projects.'),
|
||||
help_text=_('Allows roles to be dynamically downloaded from a requirements.yml file for SCM projects.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
@@ -4,6 +4,7 @@ import logging
|
||||
from channels import Group
|
||||
from channels.auth import channel_session_user_from_http, channel_session_user
|
||||
|
||||
from django.utils.encoding import smart_str
|
||||
from django.http.cookie import parse_cookie
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
|
||||
@@ -30,7 +31,7 @@ def ws_connect(message):
|
||||
# store the valid CSRF token from the cookie so we can compare it later
|
||||
# on ws_receive
|
||||
cookie_token = parse_cookie(
|
||||
headers.get('cookie')
|
||||
smart_str(headers.get(b'cookie'))
|
||||
).get('csrftoken')
|
||||
if cookie_token:
|
||||
message.channel_session[XRF_KEY] = cookie_token
|
||||
|
||||
@@ -2,4 +2,4 @@ from django.conf import settings
|
||||
|
||||
|
||||
def get_local_queuename():
|
||||
return settings.CLUSTER_HOST_ID.encode('utf-8')
|
||||
return settings.CLUSTER_HOST_ID
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import random
|
||||
import traceback
|
||||
from uuid import uuid4
|
||||
@@ -8,7 +7,7 @@ from uuid import uuid4
|
||||
import collections
|
||||
from multiprocessing import Process
|
||||
from multiprocessing import Queue as MPQueue
|
||||
from Queue import Full as QueueFull, Empty as QueueEmpty
|
||||
from queue import Full as QueueFull, Empty as QueueEmpty
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection as django_connection, connections
|
||||
@@ -129,7 +128,7 @@ class PoolWorker(object):
|
||||
# the task at [0] is the one that's running right now (or is about to
|
||||
# be running)
|
||||
if len(self.managed_tasks):
|
||||
return self.managed_tasks[self.managed_tasks.keys()[0]]
|
||||
return self.managed_tasks[list(self.managed_tasks.keys())[0]]
|
||||
|
||||
return None
|
||||
|
||||
@@ -180,7 +179,7 @@ class WorkerPool(object):
|
||||
class MessagePrinter(awx.main.dispatch.worker.BaseWorker):
|
||||
|
||||
def perform_work(self, body):
|
||||
print body
|
||||
print(body)
|
||||
|
||||
pool = WorkerPool(min_workers=4) # spawn four worker processes
|
||||
pool.init_workers(MessagePrint().work_loop)
|
||||
@@ -253,7 +252,7 @@ class WorkerPool(object):
|
||||
return tmpl.render(pool=self, workers=self.workers, meta=self.debug_meta)
|
||||
|
||||
def write(self, preferred_queue, body):
|
||||
queue_order = sorted(range(len(self.workers)), cmp=lambda x, y: -1 if x==preferred_queue else 0)
|
||||
queue_order = sorted(range(len(self.workers)), key=lambda x: -1 if x==preferred_queue else x)
|
||||
write_attempt_order = []
|
||||
for queue_actual in queue_order:
|
||||
try:
|
||||
@@ -325,6 +324,11 @@ class AutoscalePool(WorkerPool):
|
||||
2. Clean up unnecessary, idle workers.
|
||||
3. Check to see if the database says this node is running any tasks
|
||||
that aren't actually running. If so, reap them.
|
||||
|
||||
IMPORTANT: this function is one of the few places in the dispatcher
|
||||
(aside from setting lookups) where we talk to the database. As such,
|
||||
if there's an outage, this method _can_ throw various
|
||||
django.db.utils.Error exceptions. Act accordingly.
|
||||
"""
|
||||
orphaned = []
|
||||
for w in self.workers[::]:
|
||||
@@ -365,14 +369,8 @@ class AutoscalePool(WorkerPool):
|
||||
running_uuids = []
|
||||
for worker in self.workers:
|
||||
worker.calculate_managed_tasks()
|
||||
running_uuids.extend(worker.managed_tasks.keys())
|
||||
try:
|
||||
reaper.reap(excluded_uuids=running_uuids)
|
||||
except Exception:
|
||||
# we _probably_ failed here due to DB connectivity issues, so
|
||||
# don't use our logger (it accesses the database for configuration)
|
||||
_, _, tb = sys.exc_info()
|
||||
traceback.print_tb(tb)
|
||||
running_uuids.extend(list(worker.managed_tasks.keys()))
|
||||
reaper.reap(excluded_uuids=running_uuids)
|
||||
|
||||
def up(self):
|
||||
if self.full:
|
||||
|
||||
@@ -45,7 +45,7 @@ class task:
|
||||
|
||||
@task(queue='tower_broadcast', exchange_type='fanout')
|
||||
def announce():
|
||||
print "Run this everywhere!"
|
||||
print("Run this everywhere!")
|
||||
"""
|
||||
|
||||
def __init__(self, queue=None, exchange_type=None):
|
||||
|
||||
@@ -5,7 +5,7 @@ import os
|
||||
import logging
|
||||
import signal
|
||||
from uuid import UUID
|
||||
from Queue import Empty as QueueEmpty
|
||||
from queue import Empty as QueueEmpty
|
||||
|
||||
from django import db
|
||||
from kombu import Producer
|
||||
@@ -81,7 +81,11 @@ class AWXConsumer(ConsumerMixin):
|
||||
|
||||
def process_task(self, body, message):
|
||||
if 'control' in body:
|
||||
return self.control(body, message)
|
||||
try:
|
||||
return self.control(body, message)
|
||||
except Exception:
|
||||
logger.exception("Exception handling control message:")
|
||||
return
|
||||
if len(self.pool):
|
||||
if "uuid" in body and body['uuid']:
|
||||
try:
|
||||
|
||||
@@ -4,7 +4,6 @@ import importlib
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
import six
|
||||
|
||||
from awx.main.tasks import dispatch_startup, inform_cluster_of_shutdown
|
||||
|
||||
@@ -30,11 +29,18 @@ class TaskWorker(BaseWorker):
|
||||
awx.main.tasks.delete_inventory
|
||||
awx.main.tasks.RunProjectUpdate
|
||||
'''
|
||||
if not task.startswith('awx.'):
|
||||
raise ValueError('{} is not a valid awx task'.format(task))
|
||||
module, target = task.rsplit('.', 1)
|
||||
module = importlib.import_module(module)
|
||||
_call = None
|
||||
if hasattr(module, target):
|
||||
_call = getattr(module, target, None)
|
||||
if not (
|
||||
hasattr(_call, 'apply_async') and hasattr(_call, 'delay')
|
||||
):
|
||||
raise ValueError('{} is not decorated with @task()'.format(task))
|
||||
|
||||
return _call
|
||||
|
||||
def run_callable(self, body):
|
||||
@@ -78,11 +84,12 @@ class TaskWorker(BaseWorker):
|
||||
try:
|
||||
result = self.run_callable(body)
|
||||
except Exception as exc:
|
||||
result = exc
|
||||
|
||||
try:
|
||||
if getattr(exc, 'is_awx_task_error', False):
|
||||
# Error caused by user / tracked in job output
|
||||
logger.warning(six.text_type("{}").format(exc))
|
||||
logger.warning("{}".format(exc))
|
||||
else:
|
||||
task = body['task']
|
||||
args = body.get('args', [])
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
# Copyright (c) 2018 Ansible by Red Hat
|
||||
# All Rights Reserved.
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class _AwxTaskError():
|
||||
def build_exception(self, task, message=None):
|
||||
if message is None:
|
||||
message = six.text_type("Execution error running {}").format(task.log_format)
|
||||
message = "Execution error running {}".format(task.log_format)
|
||||
e = Exception(message)
|
||||
e.task = task
|
||||
e.is_awx_task_error = True
|
||||
@@ -15,7 +14,7 @@ class _AwxTaskError():
|
||||
|
||||
def TaskCancel(self, task, rc):
|
||||
"""Canceled flag caused run_pexpect to kill the job run"""
|
||||
message=six.text_type("{} was canceled (rc={})").format(task.log_format, rc)
|
||||
message="{} was canceled (rc={})".format(task.log_format, rc)
|
||||
e = self.build_exception(task, message)
|
||||
e.rc = rc
|
||||
e.awx_task_error_type = "TaskCancel"
|
||||
@@ -23,7 +22,7 @@ class _AwxTaskError():
|
||||
|
||||
def TaskError(self, task, rc):
|
||||
"""Userspace error (non-zero exit code) in run_pexpect subprocess"""
|
||||
message = six.text_type("{} encountered an error (rc={}), please see task stdout for details.").format(task.log_format, rc)
|
||||
message = "{} encountered an error (rc={}), please see task stdout for details.".format(task.log_format, rc)
|
||||
e = self.build_exception(task, message)
|
||||
e.rc = rc
|
||||
e.awx_task_error_type = "TaskError"
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import base64
|
||||
import codecs
|
||||
import StringIO
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
@@ -9,8 +8,10 @@ import tempfile
|
||||
import time
|
||||
import logging
|
||||
from distutils.version import LooseVersion as Version
|
||||
from io import StringIO
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.encoding import smart_bytes, smart_str
|
||||
|
||||
import awx
|
||||
from awx.main.expect import run
|
||||
@@ -101,6 +102,8 @@ class IsolatedManager(object):
|
||||
]
|
||||
if extra_vars:
|
||||
args.extend(['-e', json.dumps(extra_vars)])
|
||||
if settings.AWX_ISOLATED_VERBOSITY:
|
||||
args.append('-%s' % ('v' * min(5, settings.AWX_ISOLATED_VERBOSITY)))
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
@@ -142,7 +145,7 @@ class IsolatedManager(object):
|
||||
|
||||
# if an ssh private key fifo exists, read its contents and delete it
|
||||
if self.ssh_key_path:
|
||||
buff = StringIO.StringIO()
|
||||
buff = StringIO()
|
||||
with open(self.ssh_key_path, 'r') as fifo:
|
||||
for line in fifo:
|
||||
buff.write(line)
|
||||
@@ -154,7 +157,10 @@ class IsolatedManager(object):
|
||||
# into a variable, and will replicate the data into a named pipe on the
|
||||
# isolated instance
|
||||
secrets_path = os.path.join(self.private_data_dir, 'env')
|
||||
run.open_fifo_write(secrets_path, base64.b64encode(json.dumps(secrets)))
|
||||
run.open_fifo_write(
|
||||
secrets_path,
|
||||
smart_str(base64.b64encode(smart_bytes(json.dumps(secrets))))
|
||||
)
|
||||
|
||||
self.build_isolated_job_data()
|
||||
|
||||
@@ -174,7 +180,7 @@ class IsolatedManager(object):
|
||||
args = self._build_args('run_isolated.yml', '%s,' % self.host, extra_vars)
|
||||
if self.instance.verbosity:
|
||||
args.append('-%s' % ('v' * min(5, self.instance.verbosity)))
|
||||
buff = StringIO.StringIO()
|
||||
buff = StringIO()
|
||||
logger.debug('Starting job {} on isolated host with `run_isolated.yml` playbook.'.format(self.instance.id))
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
@@ -244,7 +250,7 @@ class IsolatedManager(object):
|
||||
os.makedirs(self.path_to('artifacts', 'job_events'), mode=stat.S_IXUSR + stat.S_IWUSR + stat.S_IRUSR)
|
||||
|
||||
def _missing_artifacts(self, path_list, output):
|
||||
missing_artifacts = filter(lambda path: not os.path.exists(path), path_list)
|
||||
missing_artifacts = list(filter(lambda path: not os.path.exists(path), path_list))
|
||||
for path in missing_artifacts:
|
||||
self.stdout_handle.write('ansible did not exit cleanly, missing `{}`.\n'.format(path))
|
||||
if missing_artifacts:
|
||||
@@ -282,7 +288,7 @@ class IsolatedManager(object):
|
||||
status = 'failed'
|
||||
output = ''
|
||||
rc = None
|
||||
buff = StringIO.StringIO()
|
||||
buff = StringIO()
|
||||
last_check = time.time()
|
||||
seek = 0
|
||||
job_timeout = remaining = self.job_timeout
|
||||
@@ -303,7 +309,7 @@ class IsolatedManager(object):
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
buff = StringIO.StringIO()
|
||||
buff = StringIO()
|
||||
logger.debug('Checking on isolated job {} with `check_isolated.yml`.'.format(self.instance.id))
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
@@ -318,7 +324,7 @@ class IsolatedManager(object):
|
||||
|
||||
path = self.path_to('artifacts', 'stdout')
|
||||
if os.path.exists(path):
|
||||
with open(path, 'r') as f:
|
||||
with codecs.open(path, 'r', encoding='utf-8') as f:
|
||||
f.seek(seek)
|
||||
for line in f:
|
||||
self.stdout_handle.write(line)
|
||||
@@ -340,7 +346,7 @@ class IsolatedManager(object):
|
||||
elif status == 'failed':
|
||||
# if we were unable to retrieve job reults from the isolated host,
|
||||
# print stdout of the `check_isolated.yml` playbook for clues
|
||||
self.stdout_handle.write(output)
|
||||
self.stdout_handle.write(smart_str(output))
|
||||
|
||||
return status, rc
|
||||
|
||||
@@ -355,7 +361,7 @@ class IsolatedManager(object):
|
||||
}
|
||||
args = self._build_args('clean_isolated.yml', '%s,' % self.host, extra_vars)
|
||||
logger.debug('Cleaning up job {} on isolated host with `clean_isolated.yml` playbook.'.format(self.instance.id))
|
||||
buff = StringIO.StringIO()
|
||||
buff = StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
@@ -407,46 +413,52 @@ class IsolatedManager(object):
|
||||
args = cls._build_args('heartbeat_isolated.yml', hostname_string)
|
||||
args.extend(['--forks', str(len(instance_qs))])
|
||||
env = cls._base_management_env()
|
||||
env['ANSIBLE_STDOUT_CALLBACK'] = 'json'
|
||||
|
||||
buff = StringIO.StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, cls.awx_playbook_path(), env, buff,
|
||||
idle_timeout=timeout, job_timeout=timeout,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
output = buff.getvalue().encode('utf-8')
|
||||
buff.close()
|
||||
|
||||
try:
|
||||
result = json.loads(output)
|
||||
if not isinstance(result, dict):
|
||||
raise TypeError('Expected a dict but received {}.'.format(str(type(result))))
|
||||
except (ValueError, AssertionError, TypeError):
|
||||
logger.exception('Failed to read status from isolated instances, output:\n {}'.format(output))
|
||||
return
|
||||
facts_path = tempfile.mkdtemp()
|
||||
env['ANSIBLE_CACHE_PLUGIN'] = 'jsonfile'
|
||||
env['ANSIBLE_CACHE_PLUGIN_CONNECTION'] = facts_path
|
||||
|
||||
for instance in instance_qs:
|
||||
try:
|
||||
task_result = result['plays'][0]['tasks'][0]['hosts'][instance.hostname]
|
||||
except (KeyError, IndexError):
|
||||
buff = StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, cls.awx_playbook_path(), env, buff,
|
||||
idle_timeout=timeout, job_timeout=timeout,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
heartbeat_stdout = buff.getvalue().encode('utf-8')
|
||||
buff.close()
|
||||
|
||||
for instance in instance_qs:
|
||||
output = heartbeat_stdout
|
||||
task_result = {}
|
||||
if 'capacity_cpu' in task_result and 'capacity_mem' in task_result:
|
||||
cls.update_capacity(instance, task_result, awx_application_version)
|
||||
logger.debug('Isolated instance {} successful heartbeat'.format(instance.hostname))
|
||||
elif instance.capacity == 0:
|
||||
logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(
|
||||
instance.hostname))
|
||||
else:
|
||||
logger.warning('Could not update status of isolated instance {}, msg={}'.format(
|
||||
instance.hostname, task_result.get('msg', 'unknown failure')
|
||||
))
|
||||
if instance.is_lost(isolated=True):
|
||||
instance.capacity = 0
|
||||
instance.save(update_fields=['capacity'])
|
||||
logger.error('Isolated instance {} last checked in at {}, marked as lost.'.format(
|
||||
instance.hostname, instance.modified))
|
||||
try:
|
||||
with open(os.path.join(facts_path, instance.hostname), 'r') as facts_data:
|
||||
output = facts_data.read()
|
||||
task_result = json.loads(output)
|
||||
except Exception:
|
||||
logger.exception('Failed to read status from isolated instances, output:\n {}'.format(output))
|
||||
if 'awx_capacity_cpu' in task_result and 'awx_capacity_mem' in task_result:
|
||||
task_result = {
|
||||
'capacity_cpu': task_result['awx_capacity_cpu'],
|
||||
'capacity_mem': task_result['awx_capacity_mem'],
|
||||
'version': task_result['awx_capacity_version']
|
||||
}
|
||||
cls.update_capacity(instance, task_result, awx_application_version)
|
||||
logger.debug('Isolated instance {} successful heartbeat'.format(instance.hostname))
|
||||
elif instance.capacity == 0:
|
||||
logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(
|
||||
instance.hostname))
|
||||
else:
|
||||
logger.warning('Could not update status of isolated instance {}'.format(instance.hostname))
|
||||
if instance.is_lost(isolated=True):
|
||||
instance.capacity = 0
|
||||
instance.save(update_fields=['capacity'])
|
||||
logger.error('Isolated instance {} last checked in at {}, marked as lost.'.format(
|
||||
instance.hostname, instance.modified))
|
||||
finally:
|
||||
if os.path.exists(facts_path):
|
||||
shutil.rmtree(facts_path)
|
||||
|
||||
@staticmethod
|
||||
def get_stdout_handle(instance, private_data_dir, event_data_key='job_id'):
|
||||
|
||||
@@ -4,7 +4,6 @@ import argparse
|
||||
import base64
|
||||
import codecs
|
||||
import collections
|
||||
import StringIO
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
@@ -13,12 +12,15 @@ import pipes
|
||||
import re
|
||||
import signal
|
||||
import sys
|
||||
import thread
|
||||
import threading
|
||||
import time
|
||||
try:
|
||||
from io import StringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
|
||||
import pexpect
|
||||
import psutil
|
||||
import six
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.utils.expect')
|
||||
@@ -49,7 +51,10 @@ def open_fifo_write(path, data):
|
||||
reads data from the pipe.
|
||||
'''
|
||||
os.mkfifo(path, 0o600)
|
||||
thread.start_new_thread(lambda p, d: open(p, 'w').write(d), (path, data))
|
||||
threading.Thread(
|
||||
target=lambda p, d: open(p, 'w').write(d),
|
||||
args=(path, data)
|
||||
).start()
|
||||
|
||||
|
||||
def run_pexpect(args, cwd, env, logfile,
|
||||
@@ -97,14 +102,8 @@ def run_pexpect(args, cwd, env, logfile,
|
||||
# enforce usage of an OrderedDict so that the ordering of elements in
|
||||
# `keys()` matches `values()`.
|
||||
expect_passwords = collections.OrderedDict(expect_passwords)
|
||||
password_patterns = expect_passwords.keys()
|
||||
password_values = expect_passwords.values()
|
||||
|
||||
# pexpect needs all env vars to be utf-8 encoded strings
|
||||
# https://github.com/pexpect/pexpect/issues/512
|
||||
for k, v in env.items():
|
||||
if isinstance(v, six.text_type):
|
||||
env[k] = v.encode('utf-8')
|
||||
password_patterns = list(expect_passwords.keys())
|
||||
password_values = list(expect_passwords.values())
|
||||
|
||||
child = pexpect.spawn(
|
||||
args[0], args[1:], cwd=cwd, env=env, ignore_sighup=True,
|
||||
@@ -232,7 +231,11 @@ def handle_termination(pid, args, proot_cmd, is_cancel=True):
|
||||
instance's cancel_flag.
|
||||
'''
|
||||
try:
|
||||
if proot_cmd in ' '.join(args):
|
||||
if sys.version_info > (3, 0):
|
||||
used_proot = proot_cmd.encode('utf-8') in args
|
||||
else:
|
||||
used_proot = proot_cmd in ' '.join(args)
|
||||
if used_proot:
|
||||
if not psutil:
|
||||
os.kill(pid, signal.SIGKILL)
|
||||
else:
|
||||
@@ -253,8 +256,8 @@ def handle_termination(pid, args, proot_cmd, is_cancel=True):
|
||||
|
||||
|
||||
def __run__(private_data_dir):
|
||||
buff = StringIO.StringIO()
|
||||
with open(os.path.join(private_data_dir, 'env'), 'r') as f:
|
||||
buff = StringIO()
|
||||
with codecs.open(os.path.join(private_data_dir, 'env'), 'r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
buff.write(line)
|
||||
|
||||
|
||||
@@ -4,10 +4,8 @@
|
||||
# Python
|
||||
import copy
|
||||
import json
|
||||
import operator
|
||||
import re
|
||||
import six
|
||||
import urllib
|
||||
import urllib.parse
|
||||
|
||||
from jinja2 import Environment, StrictUndefined
|
||||
from jinja2.exceptions import UndefinedError, TemplateSyntaxError
|
||||
@@ -46,7 +44,7 @@ from awx.main.utils.filters import SmartFilter
|
||||
from awx.main.utils.encryption import encrypt_value, decrypt_value, get_encryption_key
|
||||
from awx.main.validators import validate_ssh_private_key
|
||||
from awx.main.models.rbac import batch_role_ancestor_rebuilding, Role
|
||||
from awx.main.constants import CHOICES_PRIVILEGE_ESCALATION_METHODS, ENV_BLACKLIST
|
||||
from awx.main.constants import ENV_BLACKLIST
|
||||
from awx.main import utils
|
||||
|
||||
|
||||
@@ -80,7 +78,7 @@ class JSONField(upstream_JSONField):
|
||||
|
||||
class JSONBField(upstream_JSONBField):
|
||||
def get_prep_lookup(self, lookup_type, value):
|
||||
if isinstance(value, six.string_types) and value == "null":
|
||||
if isinstance(value, str) and value == "null":
|
||||
return 'null'
|
||||
return super(JSONBField, self).get_prep_lookup(lookup_type, value)
|
||||
|
||||
@@ -95,7 +93,7 @@ class JSONBField(upstream_JSONBField):
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
# Work around a bug in django-jsonfield
|
||||
# https://bitbucket.org/schinckel/django-jsonfield/issues/57/cannot-use-in-the-same-project-as-djangos
|
||||
if isinstance(value, six.string_types):
|
||||
if isinstance(value, str):
|
||||
return json.loads(value)
|
||||
return value
|
||||
|
||||
@@ -251,6 +249,9 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
if type(field_name) == tuple:
|
||||
continue
|
||||
|
||||
if type(field_name) == bytes:
|
||||
field_name = field_name.decode('utf-8')
|
||||
|
||||
if field_name.startswith('singleton:'):
|
||||
continue
|
||||
|
||||
@@ -373,7 +374,7 @@ class SmartFilterField(models.TextField):
|
||||
# https://docs.python.org/2/library/stdtypes.html#truth-value-testing
|
||||
if not value:
|
||||
return None
|
||||
value = urllib.unquote(value)
|
||||
value = urllib.parse.unquote(value)
|
||||
try:
|
||||
SmartFilter().query_from_string(value)
|
||||
except RuntimeError as e:
|
||||
@@ -407,11 +408,8 @@ class JSONSchemaField(JSONBField):
|
||||
self.schema(model_instance),
|
||||
format_checker=self.format_checker
|
||||
).iter_errors(value):
|
||||
# strip Python unicode markers from jsonschema validation errors
|
||||
error.message = re.sub(r'\bu(\'|")', r'\1', error.message)
|
||||
|
||||
if error.validator == 'pattern' and 'error' in error.schema:
|
||||
error.message = six.text_type(error.schema['error']).format(instance=error.instance)
|
||||
error.message = error.schema['error'].format(instance=error.instance)
|
||||
elif error.validator == 'type':
|
||||
expected_type = error.validator_value
|
||||
if expected_type == 'object':
|
||||
@@ -450,7 +448,7 @@ class JSONSchemaField(JSONBField):
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
# Work around a bug in django-jsonfield
|
||||
# https://bitbucket.org/schinckel/django-jsonfield/issues/57/cannot-use-in-the-same-project-as-djangos
|
||||
if isinstance(value, six.string_types):
|
||||
if isinstance(value, str):
|
||||
return json.loads(value)
|
||||
return value
|
||||
|
||||
@@ -512,12 +510,9 @@ class CredentialInputField(JSONSchemaField):
|
||||
properties = {}
|
||||
for field in model_instance.credential_type.inputs.get('fields', []):
|
||||
field = field.copy()
|
||||
if field['type'] == 'become_method':
|
||||
field.pop('type')
|
||||
field['choices'] = map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS)
|
||||
properties[field['id']] = field
|
||||
if field.get('choices', []):
|
||||
field['enum'] = field['choices'][:]
|
||||
field['enum'] = list(field['choices'])[:]
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': properties,
|
||||
@@ -547,7 +542,7 @@ class CredentialInputField(JSONSchemaField):
|
||||
v != '$encrypted$',
|
||||
model_instance.pk
|
||||
]):
|
||||
if not isinstance(getattr(model_instance, k), six.string_types):
|
||||
if not isinstance(getattr(model_instance, k), str):
|
||||
raise django_exceptions.ValidationError(
|
||||
_('secret values must be of type string, not {}').format(type(v).__name__),
|
||||
code='invalid',
|
||||
@@ -564,7 +559,7 @@ class CredentialInputField(JSONSchemaField):
|
||||
format_checker=self.format_checker
|
||||
).iter_errors(decrypted_values):
|
||||
if error.validator == 'pattern' and 'error' in error.schema:
|
||||
error.message = six.text_type(error.schema['error']).format(instance=error.instance)
|
||||
error.message = error.schema['error'].format(instance=error.instance)
|
||||
if error.validator == 'dependencies':
|
||||
# replace the default error messaging w/ a better i18n string
|
||||
# I wish there was a better way to determine the parameters of
|
||||
@@ -658,7 +653,7 @@ class CredentialTypeInputField(JSONSchemaField):
|
||||
'items': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'type': {'enum': ['string', 'boolean', 'become_method']},
|
||||
'type': {'enum': ['string', 'boolean']},
|
||||
'format': {'enum': ['ssh_private_key']},
|
||||
'choices': {
|
||||
'type': 'array',
|
||||
@@ -719,17 +714,6 @@ class CredentialTypeInputField(JSONSchemaField):
|
||||
# If no type is specified, default to string
|
||||
field['type'] = 'string'
|
||||
|
||||
if field['type'] == 'become_method':
|
||||
if not model_instance.managed_by_tower:
|
||||
raise django_exceptions.ValidationError(
|
||||
_('become_method is a reserved type name'),
|
||||
code='invalid',
|
||||
params={'value': value},
|
||||
)
|
||||
else:
|
||||
field.pop('type')
|
||||
field['choices'] = CHOICES_PRIVILEGE_ESCALATION_METHODS
|
||||
|
||||
for key in ('choices', 'multiline', 'format', 'secret',):
|
||||
if key in field and field['type'] != 'string':
|
||||
raise django_exceptions.ValidationError(
|
||||
@@ -824,14 +808,14 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
||||
)
|
||||
|
||||
class ExplodingNamespace:
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
raise UndefinedError(_('Must define unnamed file injector in order to reference `tower.filename`.'))
|
||||
|
||||
class TowerNamespace:
|
||||
def __init__(self):
|
||||
self.filename = ExplodingNamespace()
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
raise UndefinedError(_('Cannot directly reference reserved `tower` namespace container.'))
|
||||
|
||||
valid_namespace['tower'] = TowerNamespace()
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand
|
||||
@@ -43,7 +42,7 @@ class Command(BaseCommand):
|
||||
n_deleted_items = 0
|
||||
pks_to_delete = set()
|
||||
for asobj in ActivityStream.objects.iterator():
|
||||
asobj_disp = '"%s" id: %s' % (six.text_type(asobj), asobj.id)
|
||||
asobj_disp = '"%s" id: %s' % (str(asobj), asobj.id)
|
||||
if asobj.timestamp >= self.cutoff:
|
||||
if self.dry_run:
|
||||
self.logger.info("would skip %s" % asobj_disp)
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
@@ -68,7 +67,7 @@ class Command(BaseCommand):
|
||||
jobs = Job.objects.filter(created__lt=self.cutoff)
|
||||
for job in jobs.iterator():
|
||||
job_display = '"%s" (%d host summaries, %d events)' % \
|
||||
(six.text_type(job),
|
||||
(str(job),
|
||||
job.job_host_summaries.count(), job.job_events.count())
|
||||
if job.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@@ -89,7 +88,7 @@ class Command(BaseCommand):
|
||||
ad_hoc_commands = AdHocCommand.objects.filter(created__lt=self.cutoff)
|
||||
for ad_hoc_command in ad_hoc_commands.iterator():
|
||||
ad_hoc_command_display = '"%s" (%d events)' % \
|
||||
(six.text_type(ad_hoc_command),
|
||||
(str(ad_hoc_command),
|
||||
ad_hoc_command.ad_hoc_command_events.count())
|
||||
if ad_hoc_command.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@@ -109,7 +108,7 @@ class Command(BaseCommand):
|
||||
skipped, deleted = 0, 0
|
||||
project_updates = ProjectUpdate.objects.filter(created__lt=self.cutoff)
|
||||
for pu in project_updates.iterator():
|
||||
pu_display = '"%s" (type %s)' % (six.text_type(pu), six.text_type(pu.launch_type))
|
||||
pu_display = '"%s" (type %s)' % (str(pu), str(pu.launch_type))
|
||||
if pu.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s project update %s', action_text, pu.status, pu_display)
|
||||
@@ -132,7 +131,7 @@ class Command(BaseCommand):
|
||||
skipped, deleted = 0, 0
|
||||
inventory_updates = InventoryUpdate.objects.filter(created__lt=self.cutoff)
|
||||
for iu in inventory_updates.iterator():
|
||||
iu_display = '"%s" (source %s)' % (six.text_type(iu), six.text_type(iu.source))
|
||||
iu_display = '"%s" (source %s)' % (str(iu), str(iu.source))
|
||||
if iu.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s inventory update %s', action_text, iu.status, iu_display)
|
||||
@@ -155,7 +154,7 @@ class Command(BaseCommand):
|
||||
skipped, deleted = 0, 0
|
||||
system_jobs = SystemJob.objects.filter(created__lt=self.cutoff)
|
||||
for sj in system_jobs.iterator():
|
||||
sj_display = '"%s" (type %s)' % (six.text_type(sj), six.text_type(sj.job_type))
|
||||
sj_display = '"%s" (type %s)' % (str(sj), str(sj.job_type))
|
||||
if sj.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s system_job %s', action_text, sj.status, sj_display)
|
||||
@@ -185,7 +184,7 @@ class Command(BaseCommand):
|
||||
workflow_jobs = WorkflowJob.objects.filter(created__lt=self.cutoff)
|
||||
for workflow_job in workflow_jobs.iterator():
|
||||
workflow_job_display = '"{}" ({} nodes)'.format(
|
||||
six.text_type(workflow_job),
|
||||
str(workflow_job),
|
||||
workflow_job.workflow_nodes.count())
|
||||
if workflow_job.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@@ -206,7 +205,7 @@ class Command(BaseCommand):
|
||||
notifications = Notification.objects.filter(created__lt=self.cutoff)
|
||||
for notification in notifications.iterator():
|
||||
notification_display = '"{}" (started {}, {} type, {} sent)'.format(
|
||||
six.text_type(notification), six.text_type(notification.created),
|
||||
str(notification), str(notification.created),
|
||||
notification.notification_type, notification.notifications_sent)
|
||||
if notification.status in ('pending',):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
import datetime
|
||||
from django.utils.encoding import smart_str
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
@@ -35,10 +36,10 @@ class Command(BaseCommand):
|
||||
).save()
|
||||
pemfile = Setting.objects.create(
|
||||
key='AWX_ISOLATED_PUBLIC_KEY',
|
||||
value=key.public_key().public_bytes(
|
||||
value=smart_str(key.public_key().public_bytes(
|
||||
encoding=serialization.Encoding.OpenSSH,
|
||||
format=serialization.PublicFormat.OpenSSH
|
||||
) + " generated-by-awx@%s" % datetime.datetime.utcnow().isoformat()
|
||||
)) + " generated-by-awx@%s" % datetime.datetime.utcnow().isoformat()
|
||||
)
|
||||
pemfile.save()
|
||||
print(pemfile.value)
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# Python
|
||||
import json
|
||||
import logging
|
||||
import fnmatch
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
@@ -15,12 +16,20 @@ import shutil
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import connection, transaction
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
# AWX inventory imports
|
||||
from awx.main.models.inventory import (
|
||||
Inventory,
|
||||
InventorySource,
|
||||
InventoryUpdate,
|
||||
Host
|
||||
)
|
||||
from awx.main.utils.mem_inventory import MemInventory, dict_to_mem_data
|
||||
|
||||
# other AWX imports
|
||||
from awx.main.models.rbac import batch_role_ancestor_rebuilding
|
||||
from awx.main.utils import (
|
||||
ignore_inventory_computed_fields,
|
||||
check_proot_installed,
|
||||
@@ -28,7 +37,6 @@ from awx.main.utils import (
|
||||
build_proot_temp_dir,
|
||||
get_licenser
|
||||
)
|
||||
from awx.main.utils.mem_inventory import MemInventory, dict_to_mem_data
|
||||
from awx.main.signals import disable_activity_stream
|
||||
from awx.main.constants import STANDARD_INVENTORY_UPDATE_ENV
|
||||
|
||||
@@ -63,60 +71,62 @@ class AnsibleInventoryLoader(object):
|
||||
use the ansible-inventory CLI utility to convert it into in-memory
|
||||
representational objects. Example:
|
||||
/usr/bin/ansible/ansible-inventory -i hosts --list
|
||||
If it fails to find this, it uses the backported script instead
|
||||
'''
|
||||
|
||||
def __init__(self, source, group_filter_re=None, host_filter_re=None, is_custom=False):
|
||||
def __init__(self, source, is_custom=False, venv_path=None):
|
||||
self.source = source
|
||||
self.source_dir = functioning_dir(self.source)
|
||||
self.is_custom = is_custom
|
||||
self.tmp_private_dir = None
|
||||
self.method = 'ansible-inventory'
|
||||
self.group_filter_re = group_filter_re
|
||||
self.host_filter_re = host_filter_re
|
||||
|
||||
self.is_vendored_source = False
|
||||
if self.source_dir == os.path.join(settings.BASE_DIR, 'plugins', 'inventory'):
|
||||
self.is_vendored_source = True
|
||||
if venv_path:
|
||||
self.venv_path = venv_path
|
||||
else:
|
||||
self.venv_path = settings.ANSIBLE_VENV_PATH
|
||||
|
||||
def build_env(self):
|
||||
env = dict(os.environ.items())
|
||||
env['VIRTUAL_ENV'] = settings.ANSIBLE_VENV_PATH
|
||||
env['PATH'] = os.path.join(settings.ANSIBLE_VENV_PATH, "bin") + ":" + env['PATH']
|
||||
env['VIRTUAL_ENV'] = self.venv_path
|
||||
env['PATH'] = os.path.join(self.venv_path, "bin") + ":" + env['PATH']
|
||||
# Set configuration items that should always be used for updates
|
||||
for key, value in STANDARD_INVENTORY_UPDATE_ENV.items():
|
||||
if key not in env:
|
||||
env[key] = value
|
||||
venv_libdir = os.path.join(settings.ANSIBLE_VENV_PATH, "lib")
|
||||
venv_libdir = os.path.join(self.venv_path, "lib")
|
||||
env.pop('PYTHONPATH', None) # default to none if no python_ver matches
|
||||
if os.path.isdir(os.path.join(venv_libdir, "python2.7")):
|
||||
env['PYTHONPATH'] = os.path.join(venv_libdir, "python2.7", "site-packages") + ":"
|
||||
for version in os.listdir(venv_libdir):
|
||||
if fnmatch.fnmatch(version, 'python[23].*'):
|
||||
if os.path.isdir(os.path.join(venv_libdir, version)):
|
||||
env['PYTHONPATH'] = os.path.join(venv_libdir, version, "site-packages") + ":"
|
||||
break
|
||||
# For internal inventory updates, these are not reported in the job_env API
|
||||
logger.info('Using VIRTUAL_ENV: {}'.format(env['VIRTUAL_ENV']))
|
||||
logger.info('Using PATH: {}'.format(env['PATH']))
|
||||
logger.info('Using PYTHONPATH: {}'.format(env.get('PYTHONPATH', None)))
|
||||
return env
|
||||
|
||||
def get_path_to_ansible_inventory(self):
|
||||
venv_exe = os.path.join(self.venv_path, 'bin', 'ansible-inventory')
|
||||
if os.path.exists(venv_exe):
|
||||
return venv_exe
|
||||
elif os.path.exists(
|
||||
os.path.join(self.venv_path, 'bin', 'ansible')
|
||||
):
|
||||
# if bin/ansible exists but bin/ansible-inventory doesn't, it's
|
||||
# probably a really old version of ansible that doesn't support
|
||||
# ansible-inventory
|
||||
raise RuntimeError(
|
||||
"{} does not exist (please upgrade to ansible >= 2.4)".format(
|
||||
venv_exe
|
||||
)
|
||||
)
|
||||
return shutil.which('ansible-inventory')
|
||||
|
||||
def get_base_args(self):
|
||||
# get ansible-inventory absolute path for running in bubblewrap/proot, in Popen
|
||||
for path in os.environ["PATH"].split(os.pathsep):
|
||||
potential_path = os.path.join(path.strip('"'), 'ansible-inventory')
|
||||
if os.path.isfile(potential_path) and os.access(potential_path, os.X_OK):
|
||||
logger.debug('Using system install of ansible-inventory CLI: {}'.format(potential_path))
|
||||
return [potential_path, '-i', self.source]
|
||||
|
||||
# Stopgap solution for group_vars, do not use backported module for official
|
||||
# vendored cloud modules or custom scripts TODO: remove after Ansible 2.3 deprecation
|
||||
if self.is_vendored_source or self.is_custom:
|
||||
self.method = 'inventory script invocation'
|
||||
return [self.source]
|
||||
|
||||
# ansible-inventory was not found, look for backported module TODO: remove after Ansible 2.3 deprecation
|
||||
abs_module_path = os.path.abspath(os.path.join(
|
||||
os.path.dirname(__file__), '..', '..', '..', 'plugins',
|
||||
'ansible_inventory', 'backport.py'))
|
||||
self.method = 'ansible-inventory backport'
|
||||
|
||||
if not os.path.exists(abs_module_path):
|
||||
raise ImproperlyConfigured('Cannot find inventory module')
|
||||
logger.debug('Using backported ansible-inventory module: {}'.format(abs_module_path))
|
||||
return [abs_module_path, '-i', self.source]
|
||||
bargs= [self.get_path_to_ansible_inventory(), '-i', self.source]
|
||||
logger.debug('Using base command: {}'.format(' '.join(bargs)))
|
||||
return bargs
|
||||
|
||||
def get_proot_args(self, cmd, env):
|
||||
cwd = os.getcwd()
|
||||
@@ -142,6 +152,9 @@ class AnsibleInventoryLoader(object):
|
||||
kwargs['proot_show_paths'] = [functioning_dir(self.source)]
|
||||
logger.debug("Running from `{}` working directory.".format(cwd))
|
||||
|
||||
if self.venv_path != settings.ANSIBLE_VENV_PATH:
|
||||
kwargs['proot_custom_virtualenv'] = self.venv_path
|
||||
|
||||
return wrap_args_with_proot(cmd, cwd, **kwargs)
|
||||
|
||||
def command_to_json(self, cmd):
|
||||
@@ -155,6 +168,8 @@ class AnsibleInventoryLoader(object):
|
||||
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
|
||||
stdout, stderr = proc.communicate()
|
||||
stdout = smart_text(stdout)
|
||||
stderr = smart_text(stderr)
|
||||
|
||||
if self.tmp_private_dir:
|
||||
shutil.rmtree(self.tmp_private_dir, True)
|
||||
@@ -177,80 +192,7 @@ class AnsibleInventoryLoader(object):
|
||||
base_args = self.get_base_args()
|
||||
logger.info('Reading Ansible inventory source: %s', self.source)
|
||||
|
||||
data = self.command_to_json(base_args + ['--list'])
|
||||
|
||||
# TODO: remove after we run custom scripts through ansible-inventory
|
||||
if self.is_custom and '_meta' not in data or 'hostvars' not in data['_meta']:
|
||||
# Invoke the executable once for each host name we've built up
|
||||
# to set their variables
|
||||
data.setdefault('_meta', {})
|
||||
data['_meta'].setdefault('hostvars', {})
|
||||
logger.warning('Re-calling script for hostvars individually.')
|
||||
for group_name, group_data in data.iteritems():
|
||||
if group_name == '_meta':
|
||||
continue
|
||||
|
||||
if isinstance(group_data, dict):
|
||||
group_host_list = group_data.get('hosts', [])
|
||||
elif isinstance(group_data, list):
|
||||
group_host_list = group_data
|
||||
else:
|
||||
logger.warning('Group data for "%s" is not a dict or list',
|
||||
group_name)
|
||||
group_host_list = []
|
||||
|
||||
for hostname in group_host_list:
|
||||
logger.debug('Obtaining hostvars for %s' % hostname.encode('utf-8'))
|
||||
hostdata = self.command_to_json(
|
||||
base_args + ['--host', hostname.encode("utf-8")]
|
||||
)
|
||||
if isinstance(hostdata, dict):
|
||||
data['_meta']['hostvars'][hostname] = hostdata
|
||||
else:
|
||||
logger.warning(
|
||||
'Expected dict of vars for host "%s" when '
|
||||
'calling with `--host`, got %s instead',
|
||||
k, str(type(data))
|
||||
)
|
||||
|
||||
logger.info('Processing JSON output...')
|
||||
inventory = MemInventory(
|
||||
group_filter_re=self.group_filter_re, host_filter_re=self.host_filter_re)
|
||||
inventory = dict_to_mem_data(data, inventory=inventory)
|
||||
|
||||
return inventory
|
||||
|
||||
|
||||
def load_inventory_source(source, group_filter_re=None,
|
||||
host_filter_re=None, exclude_empty_groups=False,
|
||||
is_custom=False):
|
||||
'''
|
||||
Load inventory from given source directory or file.
|
||||
'''
|
||||
# Sanity check: We sanitize these module names for our API but Ansible proper doesn't follow
|
||||
# good naming conventions
|
||||
source = source.replace('rhv.py', 'ovirt4.py')
|
||||
source = source.replace('satellite6.py', 'foreman.py')
|
||||
source = source.replace('vmware.py', 'vmware_inventory.py')
|
||||
if not os.path.exists(source):
|
||||
raise IOError('Source does not exist: %s' % source)
|
||||
source = os.path.join(os.getcwd(), os.path.dirname(source),
|
||||
os.path.basename(source))
|
||||
source = os.path.normpath(os.path.abspath(source))
|
||||
|
||||
inventory = AnsibleInventoryLoader(
|
||||
source=source,
|
||||
group_filter_re=group_filter_re,
|
||||
host_filter_re=host_filter_re,
|
||||
is_custom=is_custom).load()
|
||||
|
||||
logger.debug('Finished loading from source: %s', source)
|
||||
# Exclude groups that are completely empty.
|
||||
if exclude_empty_groups:
|
||||
inventory.delete_empty_groups()
|
||||
logger.info('Loaded %d groups, %d hosts', len(inventory.all_group.all_groups),
|
||||
len(inventory.all_group.all_hosts))
|
||||
return inventory.all_group
|
||||
return self.command_to_json(base_args + ['--list'])
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -268,6 +210,8 @@ class Command(BaseCommand):
|
||||
parser.add_argument('--inventory-id', dest='inventory_id', type=int,
|
||||
default=None, metavar='i',
|
||||
help='id of inventory to sync')
|
||||
parser.add_argument('--venv', dest='venv', type=str, default=None,
|
||||
help='absolute path to the AWX custom virtualenv to use')
|
||||
parser.add_argument('--overwrite', dest='overwrite', action='store_true', default=False,
|
||||
help='overwrite the destination hosts and groups')
|
||||
parser.add_argument('--overwrite-vars', dest='overwrite_vars',
|
||||
@@ -347,7 +291,7 @@ class Command(BaseCommand):
|
||||
if enabled is not default:
|
||||
enabled_value = getattr(self, 'enabled_value', None)
|
||||
if enabled_value is not None:
|
||||
enabled = bool(unicode(enabled_value) == unicode(enabled))
|
||||
enabled = bool(str(enabled_value) == str(enabled))
|
||||
else:
|
||||
enabled = bool(enabled)
|
||||
if enabled is default:
|
||||
@@ -357,6 +301,19 @@ class Command(BaseCommand):
|
||||
else:
|
||||
raise NotImplementedError('Value of enabled {} not understood.'.format(enabled))
|
||||
|
||||
def get_source_absolute_path(self, source):
|
||||
# Sanity check: We sanitize these module names for our API but Ansible proper doesn't follow
|
||||
# good naming conventions
|
||||
source = source.replace('rhv.py', 'ovirt4.py')
|
||||
source = source.replace('satellite6.py', 'foreman.py')
|
||||
source = source.replace('vmware.py', 'vmware_inventory.py')
|
||||
if not os.path.exists(source):
|
||||
raise IOError('Source does not exist: %s' % source)
|
||||
source = os.path.join(os.getcwd(), os.path.dirname(source),
|
||||
os.path.basename(source))
|
||||
source = os.path.normpath(os.path.abspath(source))
|
||||
return source
|
||||
|
||||
def load_inventory_from_database(self):
|
||||
'''
|
||||
Load inventory and related objects from the database.
|
||||
@@ -369,9 +326,9 @@ class Command(BaseCommand):
|
||||
try:
|
||||
self.inventory = Inventory.objects.get(**q)
|
||||
except Inventory.DoesNotExist:
|
||||
raise CommandError('Inventory with %s = %s cannot be found' % q.items()[0])
|
||||
raise CommandError('Inventory with %s = %s cannot be found' % list(q.items())[0])
|
||||
except Inventory.MultipleObjectsReturned:
|
||||
raise CommandError('Inventory with %s = %s returned multiple results' % q.items()[0])
|
||||
raise CommandError('Inventory with %s = %s returned multiple results' % list(q.items())[0])
|
||||
logger.info('Updating inventory %d: %s' % (self.inventory.pk,
|
||||
self.inventory.name))
|
||||
|
||||
@@ -469,9 +426,9 @@ class Command(BaseCommand):
|
||||
# Build list of all host pks, remove all that should not be deleted.
|
||||
del_host_pks = set(hosts_qs.values_list('pk', flat=True))
|
||||
if self.instance_id_var:
|
||||
all_instance_ids = self.mem_instance_id_map.keys()
|
||||
all_instance_ids = list(self.mem_instance_id_map.keys())
|
||||
instance_ids = []
|
||||
for offset in xrange(0, len(all_instance_ids), self._batch_size):
|
||||
for offset in range(0, len(all_instance_ids), self._batch_size):
|
||||
instance_ids = all_instance_ids[offset:(offset + self._batch_size)]
|
||||
for host_pk in hosts_qs.filter(instance_id__in=instance_ids).values_list('pk', flat=True):
|
||||
del_host_pks.discard(host_pk)
|
||||
@@ -479,14 +436,14 @@ class Command(BaseCommand):
|
||||
del_host_pks.discard(host_pk)
|
||||
all_host_names = list(set(self.mem_instance_id_map.values()) - set(self.all_group.all_hosts.keys()))
|
||||
else:
|
||||
all_host_names = self.all_group.all_hosts.keys()
|
||||
for offset in xrange(0, len(all_host_names), self._batch_size):
|
||||
all_host_names = list(self.all_group.all_hosts.keys())
|
||||
for offset in range(0, len(all_host_names), self._batch_size):
|
||||
host_names = all_host_names[offset:(offset + self._batch_size)]
|
||||
for host_pk in hosts_qs.filter(name__in=host_names).values_list('pk', flat=True):
|
||||
del_host_pks.discard(host_pk)
|
||||
# Now delete all remaining hosts in batches.
|
||||
all_del_pks = sorted(list(del_host_pks))
|
||||
for offset in xrange(0, len(all_del_pks), self._batch_size):
|
||||
for offset in range(0, len(all_del_pks), self._batch_size):
|
||||
del_pks = all_del_pks[offset:(offset + self._batch_size)]
|
||||
for host in hosts_qs.filter(pk__in=del_pks):
|
||||
host_name = host.name
|
||||
@@ -509,8 +466,8 @@ class Command(BaseCommand):
|
||||
groups_qs = self.inventory_source.groups.all()
|
||||
# Build list of all group pks, remove those that should not be deleted.
|
||||
del_group_pks = set(groups_qs.values_list('pk', flat=True))
|
||||
all_group_names = self.all_group.all_groups.keys()
|
||||
for offset in xrange(0, len(all_group_names), self._batch_size):
|
||||
all_group_names = list(self.all_group.all_groups.keys())
|
||||
for offset in range(0, len(all_group_names), self._batch_size):
|
||||
group_names = all_group_names[offset:(offset + self._batch_size)]
|
||||
for group_pk in groups_qs.filter(name__in=group_names).values_list('pk', flat=True):
|
||||
del_group_pks.discard(group_pk)
|
||||
@@ -522,7 +479,7 @@ class Command(BaseCommand):
|
||||
del_group_pks.discard(self.inventory_source.deprecated_group_id)
|
||||
# Now delete all remaining groups in batches.
|
||||
all_del_pks = sorted(list(del_group_pks))
|
||||
for offset in xrange(0, len(all_del_pks), self._batch_size):
|
||||
for offset in range(0, len(all_del_pks), self._batch_size):
|
||||
del_pks = all_del_pks[offset:(offset + self._batch_size)]
|
||||
for group in groups_qs.filter(pk__in=del_pks):
|
||||
group_name = group.name
|
||||
@@ -561,7 +518,7 @@ class Command(BaseCommand):
|
||||
for mem_group in mem_children:
|
||||
db_children_name_pk_map.pop(mem_group.name, None)
|
||||
del_child_group_pks = list(set(db_children_name_pk_map.values()))
|
||||
for offset in xrange(0, len(del_child_group_pks), self._batch_size):
|
||||
for offset in range(0, len(del_child_group_pks), self._batch_size):
|
||||
child_group_pks = del_child_group_pks[offset:(offset + self._batch_size)]
|
||||
for db_child in db_children.filter(pk__in=child_group_pks):
|
||||
group_group_count += 1
|
||||
@@ -574,12 +531,12 @@ class Command(BaseCommand):
|
||||
del_host_pks = set(db_hosts.values_list('pk', flat=True))
|
||||
mem_hosts = self.all_group.all_groups[db_group.name].hosts
|
||||
all_mem_host_names = [h.name for h in mem_hosts if not h.instance_id]
|
||||
for offset in xrange(0, len(all_mem_host_names), self._batch_size):
|
||||
for offset in range(0, len(all_mem_host_names), self._batch_size):
|
||||
mem_host_names = all_mem_host_names[offset:(offset + self._batch_size)]
|
||||
for db_host_pk in db_hosts.filter(name__in=mem_host_names).values_list('pk', flat=True):
|
||||
del_host_pks.discard(db_host_pk)
|
||||
all_mem_instance_ids = [h.instance_id for h in mem_hosts if h.instance_id]
|
||||
for offset in xrange(0, len(all_mem_instance_ids), self._batch_size):
|
||||
for offset in range(0, len(all_mem_instance_ids), self._batch_size):
|
||||
mem_instance_ids = all_mem_instance_ids[offset:(offset + self._batch_size)]
|
||||
for db_host_pk in db_hosts.filter(instance_id__in=mem_instance_ids).values_list('pk', flat=True):
|
||||
del_host_pks.discard(db_host_pk)
|
||||
@@ -587,7 +544,7 @@ class Command(BaseCommand):
|
||||
for db_host_pk in all_db_host_pks:
|
||||
del_host_pks.discard(db_host_pk)
|
||||
del_host_pks = list(del_host_pks)
|
||||
for offset in xrange(0, len(del_host_pks), self._batch_size):
|
||||
for offset in range(0, len(del_host_pks), self._batch_size):
|
||||
del_pks = del_host_pks[offset:(offset + self._batch_size)]
|
||||
for db_host in db_hosts.filter(pk__in=del_pks):
|
||||
group_host_count += 1
|
||||
@@ -635,7 +592,7 @@ class Command(BaseCommand):
|
||||
if len(v.parents) == 1 and v.parents[0].name == 'all':
|
||||
root_group_names.add(k)
|
||||
existing_group_names = set()
|
||||
for offset in xrange(0, len(all_group_names), self._batch_size):
|
||||
for offset in range(0, len(all_group_names), self._batch_size):
|
||||
group_names = all_group_names[offset:(offset + self._batch_size)]
|
||||
for group in self.inventory.groups.filter(name__in=group_names):
|
||||
mem_group = self.all_group.all_groups[group.name]
|
||||
@@ -739,7 +696,7 @@ class Command(BaseCommand):
|
||||
mem_host_instance_id_map = {}
|
||||
mem_host_name_map = {}
|
||||
mem_host_names_to_update = set(self.all_group.all_hosts.keys())
|
||||
for k,v in self.all_group.all_hosts.iteritems():
|
||||
for k,v in self.all_group.all_hosts.items():
|
||||
mem_host_name_map[k] = v
|
||||
instance_id = self._get_instance_id(v.variables)
|
||||
if instance_id in self.db_instance_id_map:
|
||||
@@ -749,7 +706,7 @@ class Command(BaseCommand):
|
||||
|
||||
# Update all existing hosts where we know the PK based on instance_id.
|
||||
all_host_pks = sorted(mem_host_pk_map.keys())
|
||||
for offset in xrange(0, len(all_host_pks), self._batch_size):
|
||||
for offset in range(0, len(all_host_pks), self._batch_size):
|
||||
host_pks = all_host_pks[offset:(offset + self._batch_size)]
|
||||
for db_host in self.inventory.hosts.filter( pk__in=host_pks):
|
||||
if db_host.pk in host_pks_updated:
|
||||
@@ -761,7 +718,7 @@ class Command(BaseCommand):
|
||||
|
||||
# Update all existing hosts where we know the instance_id.
|
||||
all_instance_ids = sorted(mem_host_instance_id_map.keys())
|
||||
for offset in xrange(0, len(all_instance_ids), self._batch_size):
|
||||
for offset in range(0, len(all_instance_ids), self._batch_size):
|
||||
instance_ids = all_instance_ids[offset:(offset + self._batch_size)]
|
||||
for db_host in self.inventory.hosts.filter( instance_id__in=instance_ids):
|
||||
if db_host.pk in host_pks_updated:
|
||||
@@ -773,7 +730,7 @@ class Command(BaseCommand):
|
||||
|
||||
# Update all existing hosts by name.
|
||||
all_host_names = sorted(mem_host_name_map.keys())
|
||||
for offset in xrange(0, len(all_host_names), self._batch_size):
|
||||
for offset in range(0, len(all_host_names), self._batch_size):
|
||||
host_names = all_host_names[offset:(offset + self._batch_size)]
|
||||
for db_host in self.inventory.hosts.filter( name__in=host_names):
|
||||
if db_host.pk in host_pks_updated:
|
||||
@@ -815,15 +772,15 @@ class Command(BaseCommand):
|
||||
'''
|
||||
if settings.SQL_DEBUG:
|
||||
queries_before = len(connection.queries)
|
||||
all_group_names = sorted([k for k,v in self.all_group.all_groups.iteritems() if v.children])
|
||||
all_group_names = sorted([k for k,v in self.all_group.all_groups.items() if v.children])
|
||||
group_group_count = 0
|
||||
for offset in xrange(0, len(all_group_names), self._batch_size):
|
||||
for offset in range(0, len(all_group_names), self._batch_size):
|
||||
group_names = all_group_names[offset:(offset + self._batch_size)]
|
||||
for db_group in self.inventory.groups.filter(name__in=group_names):
|
||||
mem_group = self.all_group.all_groups[db_group.name]
|
||||
group_group_count += len(mem_group.children)
|
||||
all_child_names = sorted([g.name for g in mem_group.children])
|
||||
for offset2 in xrange(0, len(all_child_names), self._batch_size):
|
||||
for offset2 in range(0, len(all_child_names), self._batch_size):
|
||||
child_names = all_child_names[offset2:(offset2 + self._batch_size)]
|
||||
db_children_qs = self.inventory.groups.filter(name__in=child_names)
|
||||
for db_child in db_children_qs.filter(children__id=db_group.id):
|
||||
@@ -842,15 +799,15 @@ class Command(BaseCommand):
|
||||
# belongs.
|
||||
if settings.SQL_DEBUG:
|
||||
queries_before = len(connection.queries)
|
||||
all_group_names = sorted([k for k,v in self.all_group.all_groups.iteritems() if v.hosts])
|
||||
all_group_names = sorted([k for k,v in self.all_group.all_groups.items() if v.hosts])
|
||||
group_host_count = 0
|
||||
for offset in xrange(0, len(all_group_names), self._batch_size):
|
||||
for offset in range(0, len(all_group_names), self._batch_size):
|
||||
group_names = all_group_names[offset:(offset + self._batch_size)]
|
||||
for db_group in self.inventory.groups.filter(name__in=group_names):
|
||||
mem_group = self.all_group.all_groups[db_group.name]
|
||||
group_host_count += len(mem_group.hosts)
|
||||
all_host_names = sorted([h.name for h in mem_group.hosts if not h.instance_id])
|
||||
for offset2 in xrange(0, len(all_host_names), self._batch_size):
|
||||
for offset2 in range(0, len(all_host_names), self._batch_size):
|
||||
host_names = all_host_names[offset2:(offset2 + self._batch_size)]
|
||||
db_hosts_qs = self.inventory.hosts.filter(name__in=host_names)
|
||||
for db_host in db_hosts_qs.filter(groups__id=db_group.id):
|
||||
@@ -859,7 +816,7 @@ class Command(BaseCommand):
|
||||
self._batch_add_m2m(db_group.hosts, db_host)
|
||||
logger.debug('Host "%s" added to group "%s"', db_host.name, db_group.name)
|
||||
all_instance_ids = sorted([h.instance_id for h in mem_group.hosts if h.instance_id])
|
||||
for offset2 in xrange(0, len(all_instance_ids), self._batch_size):
|
||||
for offset2 in range(0, len(all_instance_ids), self._batch_size):
|
||||
instance_ids = all_instance_ids[offset2:(offset2 + self._batch_size)]
|
||||
db_hosts_qs = self.inventory.hosts.filter(instance_id__in=instance_ids)
|
||||
for db_host in db_hosts_qs.filter(groups__id=db_group.id):
|
||||
@@ -926,6 +883,7 @@ class Command(BaseCommand):
|
||||
self.set_logging_level()
|
||||
self.inventory_name = options.get('inventory_name', None)
|
||||
self.inventory_id = options.get('inventory_id', None)
|
||||
venv_path = options.get('venv', None)
|
||||
self.overwrite = bool(options.get('overwrite', False))
|
||||
self.overwrite_vars = bool(options.get('overwrite_vars', False))
|
||||
self.keep_vars = bool(options.get('keep_vars', False))
|
||||
@@ -986,12 +944,26 @@ class Command(BaseCommand):
|
||||
self.inventory_update.status = 'running'
|
||||
self.inventory_update.save()
|
||||
|
||||
# Load inventory from source.
|
||||
self.all_group = load_inventory_source(self.source,
|
||||
self.group_filter_re,
|
||||
self.host_filter_re,
|
||||
self.exclude_empty_groups,
|
||||
self.is_custom)
|
||||
source = self.get_source_absolute_path(self.source)
|
||||
|
||||
data = AnsibleInventoryLoader(source=source, is_custom=self.is_custom, venv_path=venv_path).load()
|
||||
|
||||
logger.debug('Finished loading from source: %s', source)
|
||||
logger.info('Processing JSON output...')
|
||||
inventory = MemInventory(
|
||||
group_filter_re=self.group_filter_re, host_filter_re=self.host_filter_re)
|
||||
inventory = dict_to_mem_data(data, inventory=inventory)
|
||||
|
||||
del data # forget dict from import, could be large
|
||||
|
||||
logger.info('Loaded %d groups, %d hosts', len(inventory.all_group.all_groups),
|
||||
len(inventory.all_group.all_hosts))
|
||||
|
||||
if self.exclude_empty_groups:
|
||||
inventory.delete_empty_groups()
|
||||
|
||||
self.all_group = inventory.all_group
|
||||
|
||||
if settings.DEBUG:
|
||||
# depending on inventory source, this output can be
|
||||
# *exceedingly* verbose - crawling a deeply nested
|
||||
@@ -1074,4 +1046,4 @@ class Command(BaseCommand):
|
||||
if exc and isinstance(exc, CommandError):
|
||||
sys.exit(1)
|
||||
elif exc:
|
||||
raise
|
||||
raise exc
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
|
||||
from awx.main.models import Instance, InstanceGroup
|
||||
from django.core.management.base import BaseCommand
|
||||
import six
|
||||
|
||||
|
||||
class Ungrouped(object):
|
||||
@@ -42,7 +41,7 @@ class Command(BaseCommand):
|
||||
fmt += ' policy>={0.policy_instance_minimum}'
|
||||
if instance_group.controller:
|
||||
fmt += ' controller={0.controller.name}'
|
||||
print(six.text_type(fmt + ']').format(instance_group))
|
||||
print((fmt + ']').format(instance_group))
|
||||
for x in instance_group.instances.all():
|
||||
color = '\033[92m'
|
||||
if x.capacity == 0 or x.enabled is False:
|
||||
@@ -52,5 +51,5 @@ class Command(BaseCommand):
|
||||
fmt += ' last_isolated_check="{0.last_isolated_check:%Y-%m-%d %H:%M:%S}"'
|
||||
if x.capacity:
|
||||
fmt += ' heartbeat="{0.modified:%Y-%m-%d %H:%M:%S}"'
|
||||
print(six.text_type(fmt + '\033[0m').format(x, x.version or '?'))
|
||||
print((fmt + '\033[0m').format(x, x.version or '?'))
|
||||
print('')
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# Copyright (c) 2017 Ansible Tower by Red Hat
|
||||
# All Rights Reserved.
|
||||
import sys
|
||||
import six
|
||||
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
from awx.main.models import Instance, InstanceGroup
|
||||
@@ -19,11 +18,11 @@ class InstanceNotFound(Exception):
|
||||
class Command(BaseCommand):
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--queuename', dest='queuename', type=lambda s: six.text_type(s, 'utf8'),
|
||||
parser.add_argument('--queuename', dest='queuename', type=str,
|
||||
help='Queue to create/update')
|
||||
parser.add_argument('--hostnames', dest='hostnames', type=lambda s: six.text_type(s, 'utf8'),
|
||||
parser.add_argument('--hostnames', dest='hostnames', type=str,
|
||||
help='Comma-Delimited Hosts to add to the Queue (will not remove already assigned instances)')
|
||||
parser.add_argument('--controller', dest='controller', type=lambda s: six.text_type(s, 'utf8'),
|
||||
parser.add_argument('--controller', dest='controller', type=str,
|
||||
default='', help='The controlling group (makes this an isolated group)')
|
||||
parser.add_argument('--instance_percent', dest='instance_percent', type=int, default=0,
|
||||
help='The percentage of active instances that will be assigned to this group'),
|
||||
@@ -73,7 +72,7 @@ class Command(BaseCommand):
|
||||
if instance.exists():
|
||||
instances.append(instance[0])
|
||||
else:
|
||||
raise InstanceNotFound(six.text_type("Instance does not exist: {}").format(inst_name), changed)
|
||||
raise InstanceNotFound("Instance does not exist: {}".format(inst_name), changed)
|
||||
|
||||
ig.instances.add(*instances)
|
||||
|
||||
@@ -99,24 +98,24 @@ class Command(BaseCommand):
|
||||
if options.get('hostnames'):
|
||||
hostname_list = options.get('hostnames').split(",")
|
||||
|
||||
with advisory_lock(six.text_type('instance_group_registration_{}').format(queuename)):
|
||||
with advisory_lock('instance_group_registration_{}'.format(queuename)):
|
||||
changed2 = False
|
||||
changed3 = False
|
||||
(ig, created, changed1) = self.get_create_update_instance_group(queuename, inst_per, inst_min)
|
||||
if created:
|
||||
print(six.text_type("Creating instance group {}".format(ig.name)))
|
||||
print("Creating instance group {}".format(ig.name))
|
||||
elif not created:
|
||||
print(six.text_type("Instance Group already registered {}").format(ig.name))
|
||||
print("Instance Group already registered {}".format(ig.name))
|
||||
|
||||
if ctrl:
|
||||
(ig_ctrl, changed2) = self.update_instance_group_controller(ig, ctrl)
|
||||
if changed2:
|
||||
print(six.text_type("Set controller group {} on {}.").format(ctrl, queuename))
|
||||
print("Set controller group {} on {}.".format(ctrl, queuename))
|
||||
|
||||
try:
|
||||
(instances, changed3) = self.add_instances_to_group(ig, hostname_list)
|
||||
for i in instances:
|
||||
print(six.text_type("Added instance {} to {}").format(i.hostname, ig.name))
|
||||
print("Added instance {} to {}".format(i.hostname, ig.name))
|
||||
except InstanceNotFound as e:
|
||||
instance_not_found_err = e
|
||||
|
||||
@@ -126,4 +125,3 @@ class Command(BaseCommand):
|
||||
if instance_not_found_err:
|
||||
print(instance_not_found_err.message)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@@ -154,7 +154,7 @@ class ReplayJobEvents(JobStatusLifeCycle):
|
||||
continue
|
||||
|
||||
if debug:
|
||||
raw_input("{} of {}:".format(n, job_event_count))
|
||||
input("{} of {}:".format(n, job_event_count))
|
||||
|
||||
if not je_previous:
|
||||
stats['recording_start'] = je_current.created
|
||||
|
||||
@@ -19,7 +19,7 @@ logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
|
||||
def construct_bcast_queue_name(common_name):
|
||||
return common_name.encode('utf8') + '_' + settings.CLUSTER_HOST_ID
|
||||
return common_name + '_' + settings.CLUSTER_HOST_ID
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -69,21 +69,42 @@ class Command(BaseCommand):
|
||||
|
||||
return TaskResult()
|
||||
|
||||
sched_file = '/var/lib/awx/beat.db'
|
||||
app = Celery()
|
||||
app.conf.BROKER_URL = settings.BROKER_URL
|
||||
app.conf.CELERY_TASK_RESULT_EXPIRES = False
|
||||
|
||||
# celery in py3 seems to have a bug where the celerybeat schedule
|
||||
# shelve can become corrupted; we've _only_ seen this in Ubuntu and py36
|
||||
# it can be avoided by detecting and removing the corrupted file
|
||||
# at some point, we'll just stop using celerybeat, because it's clearly
|
||||
# buggy, too -_-
|
||||
#
|
||||
# https://github.com/celery/celery/issues/4777
|
||||
sched = AWXScheduler(schedule_filename=sched_file, app=app)
|
||||
try:
|
||||
sched.setup_schedule()
|
||||
except Exception:
|
||||
logger.exception('{} is corrupted, removing.'.format(sched_file))
|
||||
sched._remove_db()
|
||||
finally:
|
||||
try:
|
||||
sched.close()
|
||||
except Exception:
|
||||
logger.exception('{} failed to sync/close'.format(sched_file))
|
||||
|
||||
beat.Beat(
|
||||
30,
|
||||
app,
|
||||
schedule='/var/lib/awx/beat.db', scheduler_cls=AWXScheduler
|
||||
schedule=sched_file, scheduler_cls=AWXScheduler
|
||||
).run()
|
||||
|
||||
def handle(self, *arg, **options):
|
||||
if options.get('status'):
|
||||
print Control('dispatcher').status()
|
||||
print(Control('dispatcher').status())
|
||||
return
|
||||
if options.get('running'):
|
||||
print Control('dispatcher').running()
|
||||
print(Control('dispatcher').running())
|
||||
return
|
||||
if options.get('reload'):
|
||||
return Control('dispatcher').control({'control': 'reload'})
|
||||
|
||||
@@ -38,20 +38,20 @@ class HostManager(models.Manager):
|
||||
hasattr(self.instance, 'host_filter') and
|
||||
hasattr(self.instance, 'kind')):
|
||||
if self.instance.kind == 'smart' and self.instance.host_filter is not None:
|
||||
q = SmartFilter.query_from_string(self.instance.host_filter)
|
||||
if self.instance.organization_id:
|
||||
q = q.filter(inventory__organization=self.instance.organization_id)
|
||||
# If we are using host_filters, disable the core_filters, this allows
|
||||
# us to access all of the available Host entries, not just the ones associated
|
||||
# with a specific FK/relation.
|
||||
#
|
||||
# If we don't disable this, a filter of {'inventory': self.instance} gets automatically
|
||||
# injected by the related object mapper.
|
||||
self.core_filters = {}
|
||||
q = SmartFilter.query_from_string(self.instance.host_filter)
|
||||
if self.instance.organization_id:
|
||||
q = q.filter(inventory__organization=self.instance.organization_id)
|
||||
# If we are using host_filters, disable the core_filters, this allows
|
||||
# us to access all of the available Host entries, not just the ones associated
|
||||
# with a specific FK/relation.
|
||||
#
|
||||
# If we don't disable this, a filter of {'inventory': self.instance} gets automatically
|
||||
# injected by the related object mapper.
|
||||
self.core_filters = {}
|
||||
|
||||
qs = qs & q
|
||||
unique_by_name = qs.order_by('name', 'pk').distinct('name')
|
||||
return qs.filter(pk__in=unique_by_name)
|
||||
qs = qs & q
|
||||
unique_by_name = qs.order_by('name', 'pk').distinct('name')
|
||||
return qs.filter(pk__in=unique_by_name)
|
||||
return qs
|
||||
|
||||
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
import uuid
|
||||
import logging
|
||||
import threading
|
||||
import six
|
||||
import time
|
||||
import cProfile
|
||||
import pstats
|
||||
import os
|
||||
import urllib.parse
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
@@ -126,8 +126,9 @@ class SessionTimeoutMiddleware(object):
|
||||
"""
|
||||
|
||||
def process_response(self, request, response):
|
||||
should_skip = 'HTTP_X_WS_SESSION_QUIET' in request.META
|
||||
req_session = getattr(request, 'session', None)
|
||||
if req_session and not req_session.is_empty():
|
||||
if req_session and not req_session.is_empty() and should_skip is False:
|
||||
expiry = int(settings.SESSION_COOKIE_AGE)
|
||||
request.session.set_expiry(expiry)
|
||||
response['Session-Timeout'] = expiry
|
||||
@@ -194,7 +195,7 @@ class URLModificationMiddleware(object):
|
||||
|
||||
def process_request(self, request):
|
||||
if hasattr(request, 'environ') and 'REQUEST_URI' in request.environ:
|
||||
old_path = six.moves.urllib.parse.urlsplit(request.environ['REQUEST_URI']).path
|
||||
old_path = urllib.parse.urlsplit(request.environ['REQUEST_URI']).path
|
||||
old_path = old_path[request.path.find(request.path_info):]
|
||||
else:
|
||||
old_path = request.path_info
|
||||
|
||||
@@ -27,7 +27,7 @@ class Migration(migrations.Migration):
|
||||
name='ActivityStream',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('operation', models.CharField(max_length=13, choices=[(b'create', 'Entity Created'), (b'update', 'Entity Updated'), (b'delete', 'Entity Deleted'), (b'associate', 'Entity Associated with another Entity'), (b'disassociate', 'Entity was Disassociated with another Entity')])),
|
||||
('operation', models.CharField(max_length=13, choices=[('create', 'Entity Created'), ('update', 'Entity Updated'), ('delete', 'Entity Deleted'), ('associate', 'Entity Associated with another Entity'), ('disassociate', 'Entity was Disassociated with another Entity')])),
|
||||
('timestamp', models.DateTimeField(auto_now_add=True)),
|
||||
('changes', models.TextField(blank=True)),
|
||||
('object_relationship_type', models.TextField(blank=True)),
|
||||
@@ -42,8 +42,8 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_skipped', 'Host Skipped')])),
|
||||
('host_name', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('event', models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_skipped', 'Host Skipped')])),
|
||||
('event_data', jsonfield.fields.JSONField(default={}, blank=True)),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('changed', models.BooleanField(default=False, editable=False)),
|
||||
@@ -60,8 +60,8 @@ class Migration(migrations.Migration):
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('modified', models.DateTimeField(auto_now=True)),
|
||||
('expires', models.DateTimeField(default=django.utils.timezone.now)),
|
||||
('request_hash', models.CharField(default=b'', max_length=40, blank=True)),
|
||||
('reason', models.CharField(default=b'', help_text='Reason the auth token was invalidated.', max_length=1024, blank=True)),
|
||||
('request_hash', models.CharField(default='', max_length=40, blank=True)),
|
||||
('reason', models.CharField(default='', help_text='Reason the auth token was invalidated.', max_length=1024, blank=True)),
|
||||
('user', models.ForeignKey(related_name='auth_tokens', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
),
|
||||
@@ -71,22 +71,22 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('kind', models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'openstack', 'OpenStack')])),
|
||||
('kind', models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('openstack', 'OpenStack')])),
|
||||
('cloud', models.BooleanField(default=False, editable=False)),
|
||||
('host', models.CharField(default=b'', help_text='The hostname or IP address to use.', max_length=1024, verbose_name='Host', blank=True)),
|
||||
('username', models.CharField(default=b'', help_text='Username for this credential.', max_length=1024, verbose_name='Username', blank=True)),
|
||||
('password', models.CharField(default=b'', help_text='Password for this credential (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='Password', blank=True)),
|
||||
('security_token', models.CharField(default=b'', help_text='Security Token for this credential', max_length=1024, verbose_name='Security Token', blank=True)),
|
||||
('project', models.CharField(default=b'', help_text='The identifier for the project.', max_length=100, verbose_name='Project', blank=True)),
|
||||
('ssh_key_data', models.TextField(default=b'', help_text='RSA or DSA private key to be used instead of password.', verbose_name='SSH private key', blank=True)),
|
||||
('ssh_key_unlock', models.CharField(default=b'', help_text='Passphrase to unlock SSH private key if encrypted (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='SSH key unlock', blank=True)),
|
||||
('become_method', models.CharField(default=b'', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[(b'', 'None'), (b'sudo', 'Sudo'), (b'su', 'Su'), (b'pbrun', 'Pbrun'), (b'pfexec', 'Pfexec')])),
|
||||
('become_username', models.CharField(default=b'', help_text='Privilege escalation username.', max_length=1024, blank=True)),
|
||||
('become_password', models.CharField(default=b'', help_text='Password for privilege escalation method.', max_length=1024, blank=True)),
|
||||
('vault_password', models.CharField(default=b'', help_text='Vault password (or "ASK" to prompt the user).', max_length=1024, blank=True)),
|
||||
('host', models.CharField(default='', help_text='The hostname or IP address to use.', max_length=1024, verbose_name='Host', blank=True)),
|
||||
('username', models.CharField(default='', help_text='Username for this credential.', max_length=1024, verbose_name='Username', blank=True)),
|
||||
('password', models.CharField(default='', help_text='Password for this credential (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='Password', blank=True)),
|
||||
('security_token', models.CharField(default='', help_text='Security Token for this credential', max_length=1024, verbose_name='Security Token', blank=True)),
|
||||
('project', models.CharField(default='', help_text='The identifier for the project.', max_length=100, verbose_name='Project', blank=True)),
|
||||
('ssh_key_data', models.TextField(default='', help_text='RSA or DSA private key to be used instead of password.', verbose_name='SSH private key', blank=True)),
|
||||
('ssh_key_unlock', models.CharField(default='', help_text='Passphrase to unlock SSH private key if encrypted (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='SSH key unlock', blank=True)),
|
||||
('become_method', models.CharField(default='', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec')])),
|
||||
('become_username', models.CharField(default='', help_text='Privilege escalation username.', max_length=1024, blank=True)),
|
||||
('become_password', models.CharField(default='', help_text='Password for privilege escalation method.', max_length=1024, blank=True)),
|
||||
('vault_password', models.CharField(default='', help_text='Vault password (or "ASK" to prompt the user).', max_length=1024, blank=True)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
|
||||
@@ -101,10 +101,10 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('script', models.TextField(default=b'', help_text='Inventory script contents', blank=True)),
|
||||
('script', models.TextField(default='', help_text='Inventory script contents', blank=True)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
],
|
||||
@@ -118,10 +118,10 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('variables', models.TextField(default=b'', help_text='Group variables in JSON or YAML format.', blank=True)),
|
||||
('variables', models.TextField(default='', help_text='Group variables in JSON or YAML format.', blank=True)),
|
||||
('total_hosts', models.PositiveIntegerField(default=0, help_text='Total number of hosts directly or indirectly in this group.', editable=False)),
|
||||
('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether this group has any hosts with active failures.', editable=False)),
|
||||
('hosts_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of hosts in this group with active failures.', editable=False)),
|
||||
@@ -140,12 +140,12 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('enabled', models.BooleanField(default=True, help_text='Is this host online and available for running jobs?')),
|
||||
('instance_id', models.CharField(default=b'', max_length=100, blank=True)),
|
||||
('variables', models.TextField(default=b'', help_text='Host variables in JSON or YAML format.', blank=True)),
|
||||
('instance_id', models.CharField(default='', max_length=100, blank=True)),
|
||||
('variables', models.TextField(default='', help_text='Host variables in JSON or YAML format.', blank=True)),
|
||||
('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether the last job failed for this host.', editable=False)),
|
||||
('has_inventory_sources', models.BooleanField(default=False, help_text='Flag indicating whether this host was created/updated from any external inventory sources.', editable=False)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'host', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
@@ -171,10 +171,10 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('variables', models.TextField(default=b'', help_text='Inventory variables in JSON or YAML format.', blank=True)),
|
||||
('variables', models.TextField(default='', help_text='Inventory variables in JSON or YAML format.', blank=True)),
|
||||
('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether any hosts in this inventory have failed.', editable=False)),
|
||||
('total_hosts', models.PositiveIntegerField(default=0, help_text='Total number of hosts in this inventory.', editable=False)),
|
||||
('hosts_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of hosts in this inventory with active failures.', editable=False)),
|
||||
@@ -197,14 +197,14 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_error', 'Host Failure'), (b'runner_on_skipped', 'Host Skipped'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_no_hosts', 'No Hosts Remaining'), (b'runner_on_async_poll', 'Host Polling'), (b'runner_on_async_ok', 'Host Async OK'), (b'runner_on_async_failed', 'Host Async Failure'), (b'runner_on_file_diff', 'File Difference'), (b'playbook_on_start', 'Playbook Started'), (b'playbook_on_notify', 'Running Handlers'), (b'playbook_on_no_hosts_matched', 'No Hosts Matched'), (b'playbook_on_no_hosts_remaining', 'No Hosts Remaining'), (b'playbook_on_task_start', 'Task Started'), (b'playbook_on_vars_prompt', 'Variables Prompted'), (b'playbook_on_setup', 'Gathering Facts'), (b'playbook_on_import_for_host', 'internal: on Import for Host'), (b'playbook_on_not_import_for_host', 'internal: on Not Import for Host'), (b'playbook_on_play_start', 'Play Started'), (b'playbook_on_stats', 'Playbook Complete')])),
|
||||
('event', models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete')])),
|
||||
('event_data', jsonfield.fields.JSONField(default={}, blank=True)),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('changed', models.BooleanField(default=False, editable=False)),
|
||||
('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('play', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('role', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('task', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('host_name', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('play', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('role', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('task', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('counter', models.PositiveIntegerField(default=0)),
|
||||
('host', models.ForeignKey(related_name='job_events_as_primary_host', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Host', null=True)),
|
||||
('hosts', models.ManyToManyField(related_name='job_events', editable=False, to='main.Host')),
|
||||
@@ -220,7 +220,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('host_name', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('changed', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('dark', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('failures', models.PositiveIntegerField(default=0, editable=False)),
|
||||
@@ -250,7 +250,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('admins', models.ManyToManyField(related_name='admin_of_organizations', to=settings.AUTH_USER_MODEL, blank=True)),
|
||||
@@ -269,10 +269,10 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('permission_type', models.CharField(max_length=64, choices=[(b'read', 'Read Inventory'), (b'write', 'Edit Inventory'), (b'admin', 'Administrate Inventory'), (b'run', 'Deploy To Inventory'), (b'check', 'Deploy To Inventory (Dry Run)'), (b'scan', 'Scan an Inventory'), (b'create', 'Create a Job Template')])),
|
||||
('permission_type', models.CharField(max_length=64, choices=[('read', 'Read Inventory'), ('write', 'Edit Inventory'), ('admin', 'Administrate Inventory'), ('run', 'Deploy To Inventory'), ('check', 'Deploy To Inventory (Dry Run)'), ('scan', 'Scan an Inventory'), ('create', 'Create a Job Template')])),
|
||||
('run_ad_hoc_commands', models.BooleanField(default=False, help_text='Execute Commands on the Inventory')),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'permission', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('inventory', models.ForeignKey(related_name='permissions', on_delete=django.db.models.deletion.SET_NULL, to='main.Inventory', null=True)),
|
||||
@@ -286,7 +286,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('ldap_dn', models.CharField(default=b'', max_length=1024)),
|
||||
('ldap_dn', models.CharField(default='', max_length=1024)),
|
||||
('user', awx.main.fields.AutoOneToOneField(related_name='profile', editable=False, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
),
|
||||
@@ -296,7 +296,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('enabled', models.BooleanField(default=True)),
|
||||
@@ -319,7 +319,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'team', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
@@ -338,26 +338,26 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('old_pk', models.PositiveIntegerField(default=None, null=True, editable=False)),
|
||||
('launch_type', models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency')])),
|
||||
('launch_type', models.CharField(default='manual', max_length=20, editable=False, choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency')])),
|
||||
('cancel_flag', models.BooleanField(default=False, editable=False)),
|
||||
('status', models.CharField(default=b'new', max_length=20, editable=False, choices=[(b'new', 'New'), (b'pending', 'Pending'), (b'waiting', 'Waiting'), (b'running', 'Running'), (b'successful', 'Successful'), (b'failed', 'Failed'), (b'error', 'Error'), (b'canceled', 'Canceled')])),
|
||||
('status', models.CharField(default='new', max_length=20, editable=False, choices=[('new', 'New'), ('pending', 'Pending'), ('waiting', 'Waiting'), ('running', 'Running'), ('successful', 'Successful'), ('failed', 'Failed'), ('error', 'Error'), ('canceled', 'Canceled')])),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('started', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('finished', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('elapsed', models.DecimalField(editable=False, max_digits=12, decimal_places=3)),
|
||||
('job_args', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('job_cwd', models.CharField(default=b'', max_length=1024, editable=False, blank=True)),
|
||||
('job_args', models.TextField(default='', editable=False, blank=True)),
|
||||
('job_cwd', models.CharField(default='', max_length=1024, editable=False, blank=True)),
|
||||
('job_env', jsonfield.fields.JSONField(default={}, editable=False, blank=True)),
|
||||
('job_explanation', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('start_args', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('result_stdout_text', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('result_stdout_file', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('result_traceback', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('celery_task_id', models.CharField(default=b'', max_length=100, editable=False, blank=True)),
|
||||
('job_explanation', models.TextField(default='', editable=False, blank=True)),
|
||||
('start_args', models.TextField(default='', editable=False, blank=True)),
|
||||
('result_stdout_text', models.TextField(default='', editable=False, blank=True)),
|
||||
('result_stdout_file', models.TextField(default='', editable=False, blank=True)),
|
||||
('result_traceback', models.TextField(default='', editable=False, blank=True)),
|
||||
('celery_task_id', models.CharField(default='', max_length=100, editable=False, blank=True)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
@@ -366,7 +366,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('old_pk', models.PositiveIntegerField(default=None, null=True, editable=False)),
|
||||
@@ -374,19 +374,19 @@ class Migration(migrations.Migration):
|
||||
('last_job_run', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('has_schedules', models.BooleanField(default=False, editable=False)),
|
||||
('next_job_run', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('status', models.CharField(default=b'ok', max_length=32, editable=False, choices=[(b'new', 'New'), (b'pending', 'Pending'), (b'waiting', 'Waiting'), (b'running', 'Running'), (b'successful', 'Successful'), (b'failed', 'Failed'), (b'error', 'Error'), (b'canceled', 'Canceled'), (b'never updated', b'Never Updated'), (b'ok', b'OK'), (b'missing', b'Missing'), (b'none', 'No External Source'), (b'updating', 'Updating')])),
|
||||
('status', models.CharField(default='ok', max_length=32, editable=False, choices=[('new', 'New'), ('pending', 'Pending'), ('waiting', 'Waiting'), ('running', 'Running'), ('successful', 'Successful'), ('failed', 'Failed'), ('error', 'Error'), ('canceled', 'Canceled'), ('never updated', 'Never Updated'), ('ok', 'OK'), ('missing', 'Missing'), ('none', 'No External Source'), ('updating', 'Updating')])),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AdHocCommand',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check')])),
|
||||
('limit', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('module_name', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('module_args', models.TextField(default=b'', blank=True)),
|
||||
('job_type', models.CharField(default='run', max_length=64, choices=[('run', 'Run'), ('check', 'Check')])),
|
||||
('limit', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('module_name', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('module_args', models.TextField(default='', blank=True)),
|
||||
('forks', models.PositiveIntegerField(default=0, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, '0 (Normal)'), (1, '1 (Verbose)'), (2, '2 (More Verbose)'), (3, '3 (Debug)'), (4, '4 (Connection Debug)'), (5, '5 (WinRM Debug)')])),
|
||||
('become_enabled', models.BooleanField(default=False)),
|
||||
],
|
||||
bases=('main.unifiedjob',),
|
||||
@@ -395,12 +395,12 @@ class Migration(migrations.Migration):
|
||||
name='InventorySource',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('source', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')])),
|
||||
('source_path', models.CharField(default=b'', max_length=1024, editable=False, blank=True)),
|
||||
('source_vars', models.TextField(default=b'', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
|
||||
('source_regions', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('instance_filters', models.CharField(default=b'', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
|
||||
('group_by', models.CharField(default=b'', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
|
||||
('source', models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')])),
|
||||
('source_path', models.CharField(default='', max_length=1024, editable=False, blank=True)),
|
||||
('source_vars', models.TextField(default='', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
|
||||
('source_regions', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('instance_filters', models.CharField(default='', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
|
||||
('group_by', models.CharField(default='', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
|
||||
('overwrite', models.BooleanField(default=False, help_text='Overwrite local groups and hosts from remote inventory source.')),
|
||||
('overwrite_vars', models.BooleanField(default=False, help_text='Overwrite local variables from remote inventory source.')),
|
||||
('update_on_launch', models.BooleanField(default=False)),
|
||||
@@ -412,12 +412,12 @@ class Migration(migrations.Migration):
|
||||
name='InventoryUpdate',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('source', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')])),
|
||||
('source_path', models.CharField(default=b'', max_length=1024, editable=False, blank=True)),
|
||||
('source_vars', models.TextField(default=b'', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
|
||||
('source_regions', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('instance_filters', models.CharField(default=b'', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
|
||||
('group_by', models.CharField(default=b'', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
|
||||
('source', models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')])),
|
||||
('source_path', models.CharField(default='', max_length=1024, editable=False, blank=True)),
|
||||
('source_vars', models.TextField(default='', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
|
||||
('source_regions', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('instance_filters', models.CharField(default='', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
|
||||
('group_by', models.CharField(default='', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
|
||||
('overwrite', models.BooleanField(default=False, help_text='Overwrite local groups and hosts from remote inventory source.')),
|
||||
('overwrite_vars', models.BooleanField(default=False, help_text='Overwrite local variables from remote inventory source.')),
|
||||
('license_error', models.BooleanField(default=False, editable=False)),
|
||||
@@ -428,16 +428,16 @@ class Migration(migrations.Migration):
|
||||
name='Job',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check'), (b'scan', 'Scan')])),
|
||||
('playbook', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('job_type', models.CharField(default='run', max_length=64, choices=[('run', 'Run'), ('check', 'Check'), ('scan', 'Scan')])),
|
||||
('playbook', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('forks', models.PositiveIntegerField(default=0, blank=True)),
|
||||
('limit', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('job_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('limit', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, '0 (Normal)'), (1, '1 (Verbose)'), (2, '2 (More Verbose)'), (3, '3 (Debug)'), (4, '4 (Connection Debug)'), (5, '5 (WinRM Debug)')])),
|
||||
('extra_vars', models.TextField(default='', blank=True)),
|
||||
('job_tags', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('force_handlers', models.BooleanField(default=False)),
|
||||
('skip_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('start_at_task', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('skip_tags', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('start_at_task', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('become_enabled', models.BooleanField(default=False)),
|
||||
],
|
||||
options={
|
||||
@@ -449,18 +449,18 @@ class Migration(migrations.Migration):
|
||||
name='JobTemplate',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check'), (b'scan', 'Scan')])),
|
||||
('playbook', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('job_type', models.CharField(default='run', max_length=64, choices=[('run', 'Run'), ('check', 'Check'), ('scan', 'Scan')])),
|
||||
('playbook', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('forks', models.PositiveIntegerField(default=0, blank=True)),
|
||||
('limit', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('job_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('limit', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, '0 (Normal)'), (1, '1 (Verbose)'), (2, '2 (More Verbose)'), (3, '3 (Debug)'), (4, '4 (Connection Debug)'), (5, '5 (WinRM Debug)')])),
|
||||
('extra_vars', models.TextField(default='', blank=True)),
|
||||
('job_tags', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('force_handlers', models.BooleanField(default=False)),
|
||||
('skip_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('start_at_task', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('skip_tags', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('start_at_task', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('become_enabled', models.BooleanField(default=False)),
|
||||
('host_config_key', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('host_config_key', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('ask_variables_on_launch', models.BooleanField(default=False)),
|
||||
('survey_enabled', models.BooleanField(default=False)),
|
||||
('survey_spec', jsonfield.fields.JSONField(default={}, blank=True)),
|
||||
@@ -475,9 +475,9 @@ class Migration(migrations.Migration):
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('local_path', models.CharField(help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True)),
|
||||
('scm_type', models.CharField(default=b'', max_length=8, verbose_name='SCM Type', blank=True, choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')])),
|
||||
('scm_url', models.CharField(default=b'', max_length=1024, verbose_name='SCM URL', blank=True)),
|
||||
('scm_branch', models.CharField(default=b'', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
|
||||
('scm_type', models.CharField(default='', max_length=8, verbose_name='SCM Type', blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')])),
|
||||
('scm_url', models.CharField(default='', max_length=1024, verbose_name='SCM URL', blank=True)),
|
||||
('scm_branch', models.CharField(default='', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
|
||||
('scm_clean', models.BooleanField(default=False)),
|
||||
('scm_delete_on_update', models.BooleanField(default=False)),
|
||||
('scm_delete_on_next_update', models.BooleanField(default=False, editable=False)),
|
||||
@@ -494,9 +494,9 @@ class Migration(migrations.Migration):
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('local_path', models.CharField(help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True)),
|
||||
('scm_type', models.CharField(default=b'', max_length=8, verbose_name='SCM Type', blank=True, choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')])),
|
||||
('scm_url', models.CharField(default=b'', max_length=1024, verbose_name='SCM URL', blank=True)),
|
||||
('scm_branch', models.CharField(default=b'', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
|
||||
('scm_type', models.CharField(default='', max_length=8, verbose_name='SCM Type', blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')])),
|
||||
('scm_url', models.CharField(default='', max_length=1024, verbose_name='SCM URL', blank=True)),
|
||||
('scm_branch', models.CharField(default='', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
|
||||
('scm_clean', models.BooleanField(default=False)),
|
||||
('scm_delete_on_update', models.BooleanField(default=False)),
|
||||
],
|
||||
@@ -506,8 +506,8 @@ class Migration(migrations.Migration):
|
||||
name='SystemJob',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('job_type', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_deleted', 'Purge previously deleted items from the database'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('job_type', models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_deleted', 'Purge previously deleted items from the database'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
|
||||
('extra_vars', models.TextField(default='', blank=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('id',),
|
||||
@@ -518,7 +518,7 @@ class Migration(migrations.Migration):
|
||||
name='SystemJobTemplate',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('job_type', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_deleted', 'Purge previously deleted items from the database'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
|
||||
('job_type', models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_deleted', 'Purge previously deleted items from the database'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
|
||||
],
|
||||
bases=('main.unifiedjobtemplate', models.Model),
|
||||
),
|
||||
|
||||
@@ -105,24 +105,24 @@ def create_system_job_templates(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [(b'main', '0002_v300_tower_settings_changes'),
|
||||
(b'main', '0003_v300_notification_changes'),
|
||||
(b'main', '0004_v300_fact_changes'),
|
||||
(b'main', '0005_v300_migrate_facts'),
|
||||
(b'main', '0006_v300_active_flag_cleanup'),
|
||||
(b'main', '0007_v300_active_flag_removal'),
|
||||
(b'main', '0008_v300_rbac_changes'),
|
||||
(b'main', '0009_v300_rbac_migrations'),
|
||||
(b'main', '0010_v300_create_system_job_templates'),
|
||||
(b'main', '0011_v300_credential_domain_field'),
|
||||
(b'main', '0012_v300_create_labels'),
|
||||
(b'main', '0013_v300_label_changes'),
|
||||
(b'main', '0014_v300_invsource_cred'),
|
||||
(b'main', '0015_v300_label_changes'),
|
||||
(b'main', '0016_v300_prompting_changes'),
|
||||
(b'main', '0017_v300_prompting_migrations'),
|
||||
(b'main', '0018_v300_host_ordering'),
|
||||
(b'main', '0019_v300_new_azure_credential'),]
|
||||
replaces = [('main', '0002_v300_tower_settings_changes'),
|
||||
('main', '0003_v300_notification_changes'),
|
||||
('main', '0004_v300_fact_changes'),
|
||||
('main', '0005_v300_migrate_facts'),
|
||||
('main', '0006_v300_active_flag_cleanup'),
|
||||
('main', '0007_v300_active_flag_removal'),
|
||||
('main', '0008_v300_rbac_changes'),
|
||||
('main', '0009_v300_rbac_migrations'),
|
||||
('main', '0010_v300_create_system_job_templates'),
|
||||
('main', '0011_v300_credential_domain_field'),
|
||||
('main', '0012_v300_create_labels'),
|
||||
('main', '0013_v300_label_changes'),
|
||||
('main', '0014_v300_invsource_cred'),
|
||||
('main', '0015_v300_label_changes'),
|
||||
('main', '0016_v300_prompting_changes'),
|
||||
('main', '0017_v300_prompting_migrations'),
|
||||
('main', '0018_v300_host_ordering'),
|
||||
('main', '0019_v300_new_azure_credential'),]
|
||||
|
||||
dependencies = [
|
||||
('taggit', '0002_auto_20150616_2121'),
|
||||
@@ -143,7 +143,7 @@ class Migration(migrations.Migration):
|
||||
('description', models.TextField()),
|
||||
('category', models.CharField(max_length=128)),
|
||||
('value', models.TextField(blank=True)),
|
||||
('value_type', models.CharField(max_length=12, choices=[(b'string', 'String'), (b'int', 'Integer'), (b'float', 'Decimal'), (b'json', 'JSON'), (b'bool', 'Boolean'), (b'password', 'Password'), (b'list', 'List')])),
|
||||
('value_type', models.CharField(max_length=12, choices=[('string', 'String'), ('int', 'Integer'), ('float', 'Decimal'), ('json', 'JSON'), ('bool', 'Boolean'), ('password', 'Password'), ('list', 'List')])),
|
||||
('user', models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
],
|
||||
),
|
||||
@@ -154,12 +154,12 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('status', models.CharField(default=b'pending', max_length=20, editable=False, choices=[(b'pending', 'Pending'), (b'successful', 'Successful'), (b'failed', 'Failed')])),
|
||||
('error', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('status', models.CharField(default='pending', max_length=20, editable=False, choices=[('pending', 'Pending'), ('successful', 'Successful'), ('failed', 'Failed')])),
|
||||
('error', models.TextField(default='', editable=False, blank=True)),
|
||||
('notifications_sent', models.IntegerField(default=0, editable=False)),
|
||||
('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'mattermost', 'Mattermost'), (b'rocketchat', 'Rocket.Chat'), (b'irc', 'IRC')])),
|
||||
('recipients', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('subject', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('notification_type', models.CharField(max_length=32, choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')])),
|
||||
('recipients', models.TextField(default='', editable=False, blank=True)),
|
||||
('subject', models.TextField(default='', editable=False, blank=True)),
|
||||
('body', jsonfield.fields.JSONField(default=dict, blank=True)),
|
||||
],
|
||||
options={
|
||||
@@ -172,9 +172,9 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'mattermost', 'Mattermost'), (b'rocketchat', 'Rocket.Chat'), (b'irc', 'IRC')])),
|
||||
('notification_type', models.CharField(max_length=32, choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')])),
|
||||
('notification_configuration', jsonfield.fields.JSONField(default=dict)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'notificationtemplate', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'notificationtemplate', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
@@ -381,7 +381,7 @@ class Migration(migrations.Migration):
|
||||
('singleton_name', models.TextField(default=None, unique=True, null=True, db_index=True)),
|
||||
('members', models.ManyToManyField(related_name='roles', to=settings.AUTH_USER_MODEL)),
|
||||
('parents', models.ManyToManyField(related_name='children', to='main.Role')),
|
||||
('implicit_parents', models.TextField(default=b'[]')),
|
||||
('implicit_parents', models.TextField(default='[]')),
|
||||
('content_type', models.ForeignKey(default=None, to='contenttypes.ContentType', null=True)),
|
||||
('object_id', models.PositiveIntegerField(default=None, null=True)),
|
||||
|
||||
@@ -422,122 +422,122 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_administrator'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'use_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='custominventoryscript',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'organization.admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='organization.admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='custominventoryscript',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'organization.member_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'organization.member_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'organization.admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='organization.admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='adhoc_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='update_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'adhoc_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='adhoc_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'update_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'update_role', 'use_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'project.organization.admin_role', b'inventory.organization.admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['project.organization.admin_role', 'inventory.organization.admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'project.organization.auditor_role', b'inventory.organization.auditor_role', b'execute_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['project.organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_administrator', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='singleton:system_administrator', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='auditor_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_auditor', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='singleton:system_auditor', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'member_role', b'auditor_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['member_role', 'auditor_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.admin_role', b'singleton:system_administrator'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.admin_role', 'singleton:system_administrator'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='update_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'singleton:system_auditor', b'use_role', b'update_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'singleton:system_auditor', 'use_role', 'update_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='team',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'organization.admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='organization.admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='team',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=None, to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=None, to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='team',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role', b'organization.auditor_role', b'member_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role', 'organization.auditor_role', 'member_role'], to='main.Role', null='True'),
|
||||
),
|
||||
|
||||
# System Job Templates
|
||||
@@ -545,18 +545,18 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='systemjob',
|
||||
name='job_type',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='systemjobtemplate',
|
||||
name='job_type',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
|
||||
),
|
||||
# Credential domain field
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='domain',
|
||||
field=models.CharField(default=b'', help_text='The identifier for the domain.', max_length=100, verbose_name='Domain', blank=True),
|
||||
field=models.CharField(default='', help_text='The identifier for the domain.', max_length=100, verbose_name='Domain', blank=True),
|
||||
),
|
||||
# Create Labels
|
||||
migrations.CreateModel(
|
||||
@@ -565,7 +565,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'label', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'label', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
@@ -625,7 +625,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='authorize_password',
|
||||
field=models.CharField(default=b'', help_text='Password used by the authorize mechanism.', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', help_text='Password used by the authorize mechanism.', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
@@ -640,17 +640,17 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'openstack', 'OpenStack')]),
|
||||
field=models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('net', 'Network'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('openstack', 'OpenStack')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
@@ -702,41 +702,41 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='client',
|
||||
field=models.CharField(default=b'', help_text='Client Id or Application Id for the credential', max_length=128, blank=True),
|
||||
field=models.CharField(default='', help_text='Client Id or Application Id for the credential', max_length=128, blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='secret',
|
||||
field=models.CharField(default=b'', help_text='Secret Token for this credential', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', help_text='Secret Token for this credential', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='subscription',
|
||||
field=models.CharField(default=b'', help_text='Subscription identifier for this credential', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', help_text='Subscription identifier for this credential', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='tenant',
|
||||
field=models.CharField(default=b'', help_text='Tenant identifier for this credential', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', help_text='Tenant identifier for this credential', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'openstack', 'OpenStack')]),
|
||||
field=models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('net', 'Network'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('satellite6', 'Satellite 6'), ('cloudforms', 'CloudForms'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('openstack', 'OpenStack')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='host',
|
||||
name='instance_id',
|
||||
field=models.CharField(default=b'', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Satellite 6'), ('cloudforms', 'CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Satellite 6'), ('cloudforms', 'CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -9,20 +9,20 @@ from django.db import migrations, models
|
||||
from django.conf import settings
|
||||
import awx.main.fields
|
||||
|
||||
import _squashed
|
||||
from _squashed_30 import SQUASHED_30
|
||||
from . import _squashed
|
||||
from ._squashed_30 import SQUASHED_30
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [(b'main', '0020_v300_labels_changes'),
|
||||
(b'main', '0021_v300_activity_stream'),
|
||||
(b'main', '0022_v300_adhoc_extravars'),
|
||||
(b'main', '0023_v300_activity_stream_ordering'),
|
||||
(b'main', '0024_v300_jobtemplate_allow_simul'),
|
||||
(b'main', '0025_v300_update_rbac_parents'),
|
||||
(b'main', '0026_v300_credential_unique'),
|
||||
(b'main', '0027_v300_team_migrations'),
|
||||
(b'main', '0028_v300_org_team_cascade')] + _squashed.replaces(SQUASHED_30, applied=True)
|
||||
replaces = [('main', '0020_v300_labels_changes'),
|
||||
('main', '0021_v300_activity_stream'),
|
||||
('main', '0022_v300_adhoc_extravars'),
|
||||
('main', '0023_v300_activity_stream_ordering'),
|
||||
('main', '0024_v300_jobtemplate_allow_simul'),
|
||||
('main', '0025_v300_update_rbac_parents'),
|
||||
('main', '0026_v300_credential_unique'),
|
||||
('main', '0027_v300_team_migrations'),
|
||||
('main', '0028_v300_org_team_cascade')] + _squashed.replaces(SQUASHED_30, applied=True)
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
@@ -63,22 +63,22 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='adhoccommand',
|
||||
name='extra_vars',
|
||||
field=models.TextField(default=b'', blank=True),
|
||||
field=models.TextField(default='', blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'openstack', 'OpenStack')]),
|
||||
field=models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('net', 'Network'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('openstack', 'OpenStack')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
# jobtemplate allow simul
|
||||
migrations.AddField(
|
||||
@@ -90,17 +90,17 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.admin_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.admin_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'member_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'member_role'], to='main.Role', null='True'),
|
||||
),
|
||||
# Unique credential
|
||||
migrations.AlterUniqueTogether(
|
||||
@@ -110,7 +110,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'use_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
# Team cascade
|
||||
migrations.AlterField(
|
||||
|
||||
@@ -8,8 +8,8 @@ import django.db.models.deletion
|
||||
import awx.main.models.workflow
|
||||
import awx.main.fields
|
||||
|
||||
import _squashed
|
||||
from _squashed_30 import SQUASHED_30
|
||||
from . import _squashed
|
||||
from ._squashed_30 import SQUASHED_30
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -19,7 +19,7 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
replaces = _squashed.replaces(SQUASHED_30) + [
|
||||
(b'main', '0034_v310_release'),
|
||||
('main', '0034_v310_release'),
|
||||
]
|
||||
|
||||
operations = _squashed.operations(SQUASHED_30) + [
|
||||
@@ -42,13 +42,13 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
name='uuid',
|
||||
field=models.CharField(default=b'', max_length=1024, editable=False),
|
||||
field=models.CharField(default='', max_length=1024, editable=False),
|
||||
),
|
||||
# Job Parent Event UUID
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
name='parent_uuid',
|
||||
field=models.CharField(default=b'', max_length=1024, editable=False),
|
||||
field=models.CharField(default='', max_length=1024, editable=False),
|
||||
),
|
||||
# Modify the HA Instance
|
||||
migrations.RemoveField(
|
||||
@@ -63,19 +63,19 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='become_method',
|
||||
field=models.CharField(default=b'', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[(b'', 'None'), (b'sudo', 'Sudo'), (b'su', 'Su'), (b'pbrun', 'Pbrun'), (b'pfexec', 'Pfexec'), (b'dzdo', 'DZDO'), (b'pmrun', 'Pmrun')]),
|
||||
field=models.CharField(default='', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec'), ('dzdo', 'DZDO'), ('pmrun', 'Pmrun')]),
|
||||
),
|
||||
# Add Workflows
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='launch_type',
|
||||
field=models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency'), (b'workflow', 'Workflow'), (b'sync', 'Sync')]),
|
||||
field=models.CharField(default='manual', max_length=20, editable=False, choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('sync', 'Sync')]),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='WorkflowJob',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('extra_vars', models.TextField(default='', blank=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('id',),
|
||||
@@ -101,8 +101,8 @@ class Migration(migrations.Migration):
|
||||
name='WorkflowJobTemplate',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('admin_role', awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_administrator', to='main.Role', null=b'True')),
|
||||
('extra_vars', models.TextField(default='', blank=True)),
|
||||
('admin_role', awx.main.fields.ImplicitRoleField(related_name='+', parent_role='singleton:system_administrator', to='main.Role', null='True')),
|
||||
],
|
||||
bases=('main.unifiedjobtemplate', models.Model),
|
||||
),
|
||||
@@ -176,7 +176,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
@@ -186,7 +186,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'execute_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplatenode',
|
||||
@@ -216,7 +216,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_administrator', 'organization.admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplatenode',
|
||||
@@ -269,23 +269,23 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='execution_node',
|
||||
field=models.TextField(default=b'', editable=False, blank=True),
|
||||
field=models.TextField(default='', editable=False, blank=True),
|
||||
),
|
||||
# SCM Revision
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='scm_revision',
|
||||
field=models.CharField(default=b'', editable=False, max_length=1024, blank=True, help_text='The last revision fetched by a project update', verbose_name='SCM Revision'),
|
||||
field=models.CharField(default='', editable=False, max_length=1024, blank=True, help_text='The last revision fetched by a project update', verbose_name='SCM Revision'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='projectupdate',
|
||||
name='job_type',
|
||||
field=models.CharField(default=b'check', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check')]),
|
||||
field=models.CharField(default='check', max_length=64, choices=[('run', 'Run'), ('check', 'Check')]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='scm_revision',
|
||||
field=models.CharField(default=b'', editable=False, max_length=1024, blank=True, help_text='The SCM Revision from the Project used for this job, if available', verbose_name='SCM Revision'),
|
||||
field=models.CharField(default='', editable=False, max_length=1024, blank=True, help_text='The SCM Revision from the Project used for this job, if available', verbose_name='SCM Revision'),
|
||||
),
|
||||
# Project Playbook Files
|
||||
migrations.AddField(
|
||||
@@ -307,12 +307,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='adhoccommandevent',
|
||||
name='stdout',
|
||||
field=models.TextField(default=b'', editable=False),
|
||||
field=models.TextField(default='', editable=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoccommandevent',
|
||||
name='uuid',
|
||||
field=models.CharField(default=b'', max_length=1024, editable=False),
|
||||
field=models.CharField(default='', max_length=1024, editable=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoccommandevent',
|
||||
@@ -327,7 +327,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
name='playbook',
|
||||
field=models.CharField(default=b'', max_length=1024, editable=False),
|
||||
field=models.CharField(default='', max_length=1024, editable=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
@@ -337,7 +337,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
name='stdout',
|
||||
field=models.TextField(default=b'', editable=False),
|
||||
field=models.TextField(default='', editable=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
@@ -352,7 +352,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='adhoccommandevent',
|
||||
name='event',
|
||||
field=models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_skipped', 'Host Skipped'), (b'debug', 'Debug'), (b'verbose', 'Verbose'), (b'deprecated', 'Deprecated'), (b'warning', 'Warning'), (b'system_warning', 'System Warning'), (b'error', 'Error')]),
|
||||
field=models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_skipped', 'Host Skipped'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='jobevent',
|
||||
@@ -362,7 +362,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='jobevent',
|
||||
name='event',
|
||||
field=models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_error', 'Host Failure'), (b'runner_on_skipped', 'Host Skipped'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_no_hosts', 'No Hosts Remaining'), (b'runner_on_async_poll', 'Host Polling'), (b'runner_on_async_ok', 'Host Async OK'), (b'runner_on_async_failed', 'Host Async Failure'), (b'runner_item_on_ok', 'Item OK'), (b'runner_item_on_failed', 'Item Failed'), (b'runner_item_on_skipped', 'Item Skipped'), (b'runner_retry', 'Host Retry'), (b'runner_on_file_diff', 'File Difference'), (b'playbook_on_start', 'Playbook Started'), (b'playbook_on_notify', 'Running Handlers'), (b'playbook_on_include', 'Including File'), (b'playbook_on_no_hosts_matched', 'No Hosts Matched'), (b'playbook_on_no_hosts_remaining', 'No Hosts Remaining'), (b'playbook_on_task_start', 'Task Started'), (b'playbook_on_vars_prompt', 'Variables Prompted'), (b'playbook_on_setup', 'Gathering Facts'), (b'playbook_on_import_for_host', 'internal: on Import for Host'), (b'playbook_on_not_import_for_host', 'internal: on Not Import for Host'), (b'playbook_on_play_start', 'Play Started'), (b'playbook_on_stats', 'Playbook Complete'), (b'debug', 'Debug'), (b'verbose', 'Verbose'), (b'deprecated', 'Deprecated'), (b'warning', 'Warning'), (b'system_warning', 'System Warning'), (b'error', 'Error')]),
|
||||
field=models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='adhoccommandevent',
|
||||
@@ -505,7 +505,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='host',
|
||||
name='instance_id',
|
||||
field=models.CharField(default=b'', help_text='The value used by the remote inventory source to uniquely identify the host', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', help_text='The value used by the remote inventory source to uniquely identify the host', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
@@ -520,7 +520,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
@@ -535,7 +535,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_url',
|
||||
field=models.CharField(default=b'', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True),
|
||||
field=models.CharField(default='', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
@@ -555,12 +555,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_url',
|
||||
field=models.CharField(default=b'', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True),
|
||||
field=models.CharField(default='', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
@@ -600,7 +600,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='execution_node',
|
||||
field=models.TextField(default=b'', help_text='The Tower node the job executed on.', editable=False, blank=True),
|
||||
field=models.TextField(default='', help_text='The Tower node the job executed on.', editable=False, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
@@ -610,7 +610,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='job_explanation',
|
||||
field=models.TextField(default=b'', help_text="A status field to indicate the state of the job if it wasn't able to run and capture stdout", editable=False, blank=True),
|
||||
field=models.TextField(default='', help_text="A status field to indicate the state of the job if it wasn't able to run and capture stdout", editable=False, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
from __future__ import unicode_literals
|
||||
from django.db import migrations
|
||||
|
||||
import _squashed
|
||||
from _squashed_31 import SQUASHED_31
|
||||
from . import _squashed
|
||||
from ._squashed_31 import SQUASHED_31
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -72,7 +72,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[(b'', 'Hosts have a direct link to this inventory.'), (b'smart', 'Hosts for inventory generated using the host_filter property.')]),
|
||||
field=models.CharField(default='', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[('', 'Hosts have a direct link to this inventory.'), ('smart', 'Hosts for inventory generated using the host_filter property.')]),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SmartInventoryMembership',
|
||||
@@ -143,7 +143,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
name='scm_last_revision',
|
||||
field=models.CharField(default=b'', max_length=1024, editable=False, blank=True),
|
||||
field=models.CharField(default='', max_length=1024, editable=False, blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
@@ -163,27 +163,27 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source_path',
|
||||
field=models.CharField(default=b'', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source_path',
|
||||
field=models.CharField(default=b'', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='launch_type',
|
||||
field=models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency'), (b'workflow', 'Workflow'), (b'sync', 'Sync'), (b'scm', 'SCM Update')]),
|
||||
field=models.CharField(default='manual', max_length=20, editable=False, choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('sync', 'Sync'), ('scm', 'SCM Update')]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
@@ -211,12 +211,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
name='verbosity',
|
||||
field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, b'0 (WARNING)'), (1, b'1 (INFO)'), (2, b'2 (DEBUG)')]),
|
||||
field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, '0 (WARNING)'), (1, '1 (INFO)'), (2, '2 (DEBUG)')]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventoryupdate',
|
||||
name='verbosity',
|
||||
field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, b'0 (WARNING)'), (1, b'1 (INFO)'), (2, b'2 (DEBUG)')]),
|
||||
field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, '0 (WARNING)'), (1, '1 (INFO)'), (2, '2 (DEBUG)')]),
|
||||
),
|
||||
|
||||
# Job Templates
|
||||
@@ -317,7 +317,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='inventory',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[(b'', 'Hosts have a direct link to this inventory.'), (b'smart', 'Hosts for inventory generated using the host_filter property.')]),
|
||||
field=models.CharField(default='', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[('', 'Hosts have a direct link to this inventory.'), ('smart', 'Hosts for inventory generated using the host_filter property.')]),
|
||||
),
|
||||
|
||||
# Timeout help text update
|
||||
@@ -378,9 +378,9 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('kind', models.CharField(max_length=32, choices=[(b'ssh', 'Machine'), (b'vault', 'Vault'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'cloud', 'Cloud'), (b'insights', 'Insights')])),
|
||||
('kind', models.CharField(max_length=32, choices=[('ssh', 'Machine'), ('vault', 'Vault'), ('net', 'Network'), ('scm', 'Source Control'), ('cloud', 'Cloud'), ('insights', 'Insights')])),
|
||||
('managed_by_tower', models.BooleanField(default=False, editable=False)),
|
||||
('inputs', awx.main.fields.CredentialTypeInputField(default={}, blank=True, help_text='Enter inputs using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.')),
|
||||
('injectors', awx.main.fields.CredentialTypeInjectorField(default={}, blank=True, help_text='Enter injectors using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.')),
|
||||
@@ -435,7 +435,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='become_method',
|
||||
field=models.CharField(default=b'', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[(b'', 'None'), (b'sudo', 'Sudo'), (b'su', 'Su'), (b'pbrun', 'Pbrun'), (b'pfexec', 'Pfexec'), (b'dzdo', 'DZDO'), (b'pmrun', 'Pmrun'), (b'runas', 'Runas')]),
|
||||
field=models.CharField(default='', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec'), ('dzdo', 'DZDO'), ('pmrun', 'Pmrun'), ('runas', 'Runas')]),
|
||||
),
|
||||
|
||||
# Connecting activity stream
|
||||
@@ -496,6 +496,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='execution_node',
|
||||
field=models.TextField(default=b'', help_text='The node the job executed on.', editable=False, blank=True),
|
||||
field=models.TextField(default='', help_text='The node the job executed on.', editable=False, blank=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -20,11 +20,11 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'rhv', 'Red Hat Virtualization'), (b'tower', 'Ansible Tower'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'rhv', 'Red Hat Virtualization'), (b'tower', 'Ansible Tower'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -21,9 +21,9 @@ class Migration(migrations.Migration):
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('event_data', awx.main.fields.JSONField(blank=True, default={})),
|
||||
('uuid', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('uuid', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('counter', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('stdout', models.TextField(default=b'', editable=False)),
|
||||
('stdout', models.TextField(default='', editable=False)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('start_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('end_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
@@ -39,17 +39,17 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('event', models.CharField(choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_error', 'Host Failure'), (b'runner_on_skipped', 'Host Skipped'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_no_hosts', 'No Hosts Remaining'), (b'runner_on_async_poll', 'Host Polling'), (b'runner_on_async_ok', 'Host Async OK'), (b'runner_on_async_failed', 'Host Async Failure'), (b'runner_item_on_ok', 'Item OK'), (b'runner_item_on_failed', 'Item Failed'), (b'runner_item_on_skipped', 'Item Skipped'), (b'runner_retry', 'Host Retry'), (b'runner_on_file_diff', 'File Difference'), (b'playbook_on_start', 'Playbook Started'), (b'playbook_on_notify', 'Running Handlers'), (b'playbook_on_include', 'Including File'), (b'playbook_on_no_hosts_matched', 'No Hosts Matched'), (b'playbook_on_no_hosts_remaining', 'No Hosts Remaining'), (b'playbook_on_task_start', 'Task Started'), (b'playbook_on_vars_prompt', 'Variables Prompted'), (b'playbook_on_setup', 'Gathering Facts'), (b'playbook_on_import_for_host', 'internal: on Import for Host'), (b'playbook_on_not_import_for_host', 'internal: on Not Import for Host'), (b'playbook_on_play_start', 'Play Started'), (b'playbook_on_stats', 'Playbook Complete'), (b'debug', 'Debug'), (b'verbose', 'Verbose'), (b'deprecated', 'Deprecated'), (b'warning', 'Warning'), (b'system_warning', 'System Warning'), (b'error', 'Error')], max_length=100)),
|
||||
('event', models.CharField(choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')], max_length=100)),
|
||||
('event_data', awx.main.fields.JSONField(blank=True, default={})),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('changed', models.BooleanField(default=False, editable=False)),
|
||||
('uuid', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('playbook', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('play', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('role', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('task', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('uuid', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('playbook', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('play', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('role', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('task', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('counter', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('stdout', models.TextField(default=b'', editable=False)),
|
||||
('stdout', models.TextField(default='', editable=False)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('start_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('end_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
@@ -66,9 +66,9 @@ class Migration(migrations.Migration):
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('event_data', awx.main.fields.JSONField(blank=True, default={})),
|
||||
('uuid', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('uuid', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('counter', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('stdout', models.TextField(default=b'', editable=False)),
|
||||
('stdout', models.TextField(default='', editable=False)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('start_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('end_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
|
||||
@@ -18,77 +18,77 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='job_template_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='credential_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='inventory_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='project_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='workflow_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notification_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'singleton:system_administrator', b'organization.credential_admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['singleton:system_administrator', 'organization.credential_admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventory',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'organization.inventory_admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'organization.project_admin_role', b'singleton:system_administrator'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['organization.project_admin_role', 'singleton:system_administrator'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'singleton:system_administrator', b'organization.workflow_admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'organization.execute_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'organization.execute_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='jobtemplate',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'project.organization.job_template_admin_role', b'inventory.organization.job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='jobtemplate',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'project.organization.execute_role', b'inventory.organization.execute_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='organization',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'execute_role', b'project_admin_role', b'inventory_admin_role', b'workflow_admin_role', b'notification_admin_role', b'credential_admin_role', b'job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'execute_role', 'project_admin_role', 'inventory_admin_role', 'workflow_admin_role', 'notification_admin_role', 'credential_admin_role', 'job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
|
||||
]
|
||||
|
||||
@@ -35,8 +35,8 @@ class Migration(migrations.Migration):
|
||||
('skip_authorization', models.BooleanField(default=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('description', models.TextField(blank=True, default=b'')),
|
||||
('logo_data', models.TextField(default=b'', editable=False, validators=[django.core.validators.RegexValidator(re.compile(b'.*'))])),
|
||||
('description', models.TextField(blank=True, default='')),
|
||||
('logo_data', models.TextField(default='', editable=False, validators=[django.core.validators.RegexValidator(re.compile('.*'))])),
|
||||
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='main_oauth2application', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
@@ -52,7 +52,7 @@ class Migration(migrations.Migration):
|
||||
('scope', models.TextField(blank=True)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('description', models.CharField(blank=True, default=b'', max_length=200)),
|
||||
('description', models.CharField(blank=True, default='', max_length=200)),
|
||||
('last_used', models.DateTimeField(default=None, editable=False, null=True)),
|
||||
('application', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.OAUTH2_PROVIDER_APPLICATION_MODEL)),
|
||||
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='main_oauth2accesstoken', to=settings.AUTH_USER_MODEL)),
|
||||
|
||||
@@ -20,7 +20,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='oauth2accesstoken',
|
||||
name='scope',
|
||||
field=models.TextField(blank=True, default=b'write', help_text="Allowed scopes, further restricts user's permissions."),
|
||||
field=models.TextField(blank=True, default='write', help_text="Allowed scopes, further restricts user's permissions."),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='oauth2accesstoken',
|
||||
@@ -30,7 +30,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='oauth2application',
|
||||
name='authorization_grant_type',
|
||||
field=models.CharField(choices=[(b'authorization-code', 'Authorization code'), (b'implicit', 'Implicit'), (b'password', 'Resource owner password-based'), (b'client-credentials', 'Client credentials')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32),
|
||||
field=models.CharField(choices=[('authorization-code', 'Authorization code'), ('implicit', 'Implicit'), ('password', 'Resource owner password-based'), ('client-credentials', 'Client credentials')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='oauth2application',
|
||||
@@ -40,7 +40,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='oauth2application',
|
||||
name='client_type',
|
||||
field=models.CharField(choices=[(b'confidential', 'Confidential'), (b'public', 'Public')], help_text='Set to Public or Confidential depending on how secure the client device is.', max_length=32),
|
||||
field=models.CharField(choices=[('confidential', 'Confidential'), ('public', 'Public')], help_text='Set to Public or Confidential depending on how secure the client device is.', max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='oauth2application',
|
||||
|
||||
@@ -16,6 +16,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='oauth2accesstoken',
|
||||
name='scope',
|
||||
field=models.TextField(blank=True, default=b'write', help_text="Allowed scopes, further restricts user's permissions. Must be a simple space-separated string with allowed scopes ['read', 'write']."),
|
||||
field=models.TextField(blank=True, default='write', help_text="Allowed scopes, further restricts user's permissions. Must be a simple space-separated string with allowed scopes ['read', 'write']."),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -15,6 +15,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='controller_node',
|
||||
field=models.TextField(blank=True, default=b'', editable=False, help_text='The instance that managed the isolated execution environment.'),
|
||||
field=models.TextField(blank=True, default='', editable=False, help_text='The instance that managed the isolated execution environment.'),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -18,12 +18,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='organization',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='organization',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'member_role', b'auditor_role', b'execute_role', b'project_admin_role', b'inventory_admin_role', b'workflow_admin_role', b'notification_admin_role', b'credential_admin_role', b'job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['member_role', 'auditor_role', 'execute_role', 'project_admin_role', 'inventory_admin_role', 'workflow_admin_role', 'notification_admin_role', 'credential_admin_role', 'job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.RunPython(rebuild_role_hierarchy),
|
||||
]
|
||||
|
||||
@@ -15,6 +15,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='oauth2application',
|
||||
name='authorization_grant_type',
|
||||
field=models.CharField(choices=[(b'authorization-code', 'Authorization code'), (b'implicit', 'Implicit'), (b'password', 'Resource owner password-based')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32),
|
||||
field=models.CharField(choices=[('authorization-code', 'Authorization code'), ('implicit', 'Implicit'), ('password', 'Resource owner password-based')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -17,131 +17,131 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'credential', u'model_name': 'credential'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credential', 'model_name': 'credential', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'credential', u'model_name': 'credential'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credential', 'model_name': 'credential', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credentialtype',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'credentialtype', u'model_name': 'credentialtype'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credentialtype', 'model_name': 'credentialtype', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credentialtype',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'credentialtype', u'model_name': 'credentialtype'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credentialtype', 'model_name': 'credentialtype', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='custominventoryscript',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'custominventoryscript', u'model_name': 'custominventoryscript'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'custominventoryscript', 'model_name': 'custominventoryscript', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='custominventoryscript',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'custominventoryscript', u'model_name': 'custominventoryscript'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'custominventoryscript', 'model_name': 'custominventoryscript', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='group',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'group', u'model_name': 'group'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'group', 'model_name': 'group', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='group',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'group', u'model_name': 'group'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'group', 'model_name': 'group', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='host',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'host', u'model_name': 'host'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'host', 'model_name': 'host', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='host',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'host', u'model_name': 'host'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'host', 'model_name': 'host', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventory',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'inventory', u'model_name': 'inventory'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'inventory', 'model_name': 'inventory', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventory',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'inventory', u'model_name': 'inventory'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'inventory', 'model_name': 'inventory', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='label',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'label', u'model_name': 'label'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'label', 'model_name': 'label', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='label',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'label', u'model_name': 'label'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'label', 'model_name': 'label', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notificationtemplate',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'notificationtemplate', u'model_name': 'notificationtemplate'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'notificationtemplate', 'model_name': 'notificationtemplate', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notificationtemplate',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'notificationtemplate', u'model_name': 'notificationtemplate'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'notificationtemplate', 'model_name': 'notificationtemplate', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='organization',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'organization', u'model_name': 'organization'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'organization', 'model_name': 'organization', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='organization',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'organization', u'model_name': 'organization'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'organization', 'model_name': 'organization', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='schedule',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'schedule', u'model_name': 'schedule'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'schedule', 'model_name': 'schedule', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='schedule',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'schedule', u'model_name': 'schedule'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'schedule', 'model_name': 'schedule', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'team', u'model_name': 'team'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'team', 'model_name': 'team', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'team', u'model_name': 'team'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'team', 'model_name': 'team', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'unifiedjob', u'model_name': 'unifiedjob'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjob', 'model_name': 'unifiedjob', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'unifiedjob', u'model_name': 'unifiedjob'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjob', 'model_name': 'unifiedjob', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'unifiedjobtemplate', u'model_name': 'unifiedjobtemplate'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjobtemplate', 'model_name': 'unifiedjobtemplate', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'unifiedjobtemplate', u'model_name': 'unifiedjobtemplate'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjobtemplate', 'model_name': 'unifiedjobtemplate', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
]
|
||||
|
||||
25
awx/main/migrations/0055_v340_add_grafana_notification.py
Normal file
25
awx/main/migrations/0055_v340_add_grafana_notification.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.16 on 2019-01-20 12:00
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0054_v340_workflow_convergence'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='notification',
|
||||
name='notification_type',
|
||||
field=models.CharField(choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('grafana', 'Grafana'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')], max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notificationtemplate',
|
||||
name='notification_type',
|
||||
field=models.CharField(choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('grafana', 'Grafana'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')], max_length=32),
|
||||
),
|
||||
]
|
||||
25
awx/main/migrations/0056_v350_custom_venv_history.py
Normal file
25
awx/main/migrations/0056_v350_custom_venv_history.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.16 on 2019-01-22 22:20
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0055_v340_add_grafana_notification'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='inventoryupdate',
|
||||
name='custom_virtualenv',
|
||||
field=models.CharField(blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='custom_virtualenv',
|
||||
field=models.CharField(blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True),
|
||||
),
|
||||
]
|
||||
19
awx/main/migrations/0057_v350_remove_become_method_type.py
Normal file
19
awx/main/migrations/0057_v350_remove_become_method_type.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.16 on 2019-01-29 19:56
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
# AWX
|
||||
from awx.main.migrations import _credentialtypes as credentialtypes
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0056_v350_custom_venv_history'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(credentialtypes.remove_become_methods),
|
||||
]
|
||||
25
awx/main/migrations/0058_v350_remove_limit_limit.py
Normal file
25
awx/main/migrations/0058_v350_remove_limit_limit.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.16 on 2019-02-05 18:29
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0057_v350_remove_become_method_type'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='job',
|
||||
name='limit',
|
||||
field=models.TextField(blank=True, default=''),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='jobtemplate',
|
||||
name='limit',
|
||||
field=models.TextField(blank=True, default=''),
|
||||
),
|
||||
]
|
||||
@@ -1,7 +1,6 @@
|
||||
import logging
|
||||
|
||||
from django.db.models import Q
|
||||
import six
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
@@ -39,8 +38,8 @@ def rename_inventory_sources(apps, schema_editor):
|
||||
Q(deprecated_group__inventory__organization=org)).distinct().all()):
|
||||
|
||||
inventory = invsrc.deprecated_group.inventory if invsrc.deprecated_group else invsrc.inventory
|
||||
name = six.text_type('{0} - {1} - {2}').format(invsrc.name, inventory.name, i)
|
||||
logger.debug(six.text_type("Renaming InventorySource({0}) {1} -> {2}").format(
|
||||
name = '{0} - {1} - {2}'.format(invsrc.name, inventory.name, i)
|
||||
logger.debug("Renaming InventorySource({0}) {1} -> {2}".format(
|
||||
invsrc.pk, invsrc.name, name
|
||||
))
|
||||
invsrc.name = name
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import logging
|
||||
import json
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
import six
|
||||
|
||||
from awx.conf.migrations._reencrypt import (
|
||||
decrypt_field,
|
||||
|
||||
@@ -3,8 +3,6 @@ import logging
|
||||
from django.utils.timezone import now
|
||||
from django.utils.text import slugify
|
||||
|
||||
import six
|
||||
|
||||
from awx.main.models.base import PERM_INVENTORY_SCAN, PERM_INVENTORY_DEPLOY
|
||||
from awx.main import utils
|
||||
|
||||
@@ -26,7 +24,7 @@ def _create_fact_scan_project(ContentType, Project, org):
|
||||
polymorphic_ctype=ct)
|
||||
proj.save()
|
||||
|
||||
slug_name = slugify(six.text_type(name)).replace(u'-', u'_')
|
||||
slug_name = slugify(str(name)).replace(u'-', u'_')
|
||||
proj.local_path = u'_%d__%s' % (int(proj.pk), slug_name)
|
||||
|
||||
proj.save()
|
||||
|
||||
@@ -45,8 +45,8 @@ def replaces(squashed, applied=False):
|
||||
'''
|
||||
squashed_keys, key_index = squash_data(squashed)
|
||||
if applied:
|
||||
return [(b'main', key) for key in squashed_keys[:key_index]]
|
||||
return [(b'main', key) for key in squashed_keys[key_index:]]
|
||||
return [('main', key) for key in squashed_keys[:key_index]]
|
||||
return [('main', key) for key in squashed_keys[key_index:]]
|
||||
|
||||
|
||||
def operations(squashed, applied=False):
|
||||
|
||||
@@ -42,12 +42,12 @@ SQUASHED_30 = {
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_administrator', 'organization.admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
],
|
||||
'0033_v303_v245_host_variable_fix': [
|
||||
|
||||
@@ -17,24 +17,24 @@ SQUASHED_31 = {
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
],
|
||||
'0036_v311_insights': [
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
],
|
||||
'0037_v313_instance_version': [
|
||||
|
||||
@@ -152,10 +152,6 @@ def o_auth2_token_get_absolute_url(self, request=None):
|
||||
|
||||
OAuth2AccessToken.add_to_class('get_absolute_url', o_auth2_token_get_absolute_url)
|
||||
|
||||
|
||||
# Import signal handlers only after models have been defined.
|
||||
import awx.main.signals # noqa
|
||||
|
||||
from awx.main.registrar import activity_stream_registrar # noqa
|
||||
activity_stream_registrar.connect(Organization)
|
||||
activity_stream_registrar.connect(Inventory)
|
||||
|
||||
@@ -7,6 +7,7 @@ from awx.main.fields import JSONField
|
||||
|
||||
# Django
|
||||
from django.db import models
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
__all__ = ['ActivityStream']
|
||||
@@ -84,9 +85,9 @@ class ActivityStream(models.Model):
|
||||
if self.actor:
|
||||
self.deleted_actor = {
|
||||
'id': self.actor_id,
|
||||
'username': self.actor.username,
|
||||
'first_name': self.actor.first_name,
|
||||
'last_name': self.actor.last_name,
|
||||
'username': smart_str(self.actor.username),
|
||||
'first_name': smart_str(self.actor.first_name),
|
||||
'last_name': smart_str(self.actor.last_name),
|
||||
}
|
||||
if 'update_fields' in kwargs and 'deleted_actor' not in kwargs['update_fields']:
|
||||
kwargs['update_fields'].append('deleted_actor')
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
# Python
|
||||
import logging
|
||||
from urlparse import urljoin
|
||||
from urllib.parse import urljoin
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -109,7 +109,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
||||
return self.limit
|
||||
|
||||
def clean_module_name(self):
|
||||
if type(self.module_name) not in (str, unicode):
|
||||
if type(self.module_name) is not str:
|
||||
raise ValidationError(_("Invalid type for ad hoc command"))
|
||||
module_name = self.module_name.strip() or 'command'
|
||||
if module_name not in settings.AD_HOC_COMMANDS:
|
||||
@@ -117,7 +117,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
||||
return module_name
|
||||
|
||||
def clean_module_args(self):
|
||||
if type(self.module_args) not in (str, unicode):
|
||||
if type(self.module_args) is not str:
|
||||
raise ValidationError(_("Invalid type for ad hoc command"))
|
||||
module_args = self.module_args
|
||||
if self.module_name in ('command', 'shell') and not module_args:
|
||||
|
||||
@@ -92,7 +92,7 @@ class BaseModel(models.Model):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
if 'name' in self.__dict__:
|
||||
return u'%s-%s' % (self.name, self.pk)
|
||||
else:
|
||||
@@ -152,7 +152,7 @@ class CreatedModifiedModel(BaseModel):
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
update_fields = kwargs.get('update_fields', [])
|
||||
update_fields = list(kwargs.get('update_fields', []))
|
||||
# Manually perform auto_now_add and auto_now logic.
|
||||
if not self.pk and not self.created:
|
||||
self.created = now()
|
||||
|
||||
@@ -7,7 +7,6 @@ import os
|
||||
import re
|
||||
import stat
|
||||
import tempfile
|
||||
import six
|
||||
|
||||
# Jinja2
|
||||
from jinja2 import Template
|
||||
@@ -33,7 +32,6 @@ from awx.main.models.rbac import (
|
||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
)
|
||||
from awx.main.utils import encrypt_field
|
||||
from awx.main.constants import CHOICES_PRIVILEGE_ESCALATION_METHODS
|
||||
from . import injectors as builtin_injectors
|
||||
|
||||
__all__ = ['Credential', 'CredentialType', 'V1Credential', 'build_safe_env']
|
||||
@@ -164,7 +162,6 @@ class V1Credential(object):
|
||||
max_length=32,
|
||||
blank=True,
|
||||
default='',
|
||||
choices=CHOICES_PRIVILEGE_ESCALATION_METHODS,
|
||||
help_text=_('Privilege escalation method.')
|
||||
),
|
||||
'become_username': models.CharField(
|
||||
@@ -325,10 +322,11 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
|
||||
@property
|
||||
def has_encrypted_ssh_key_data(self):
|
||||
if self.pk:
|
||||
try:
|
||||
ssh_key_data = decrypt_field(self, 'ssh_key_data')
|
||||
else:
|
||||
ssh_key_data = self.ssh_key_data
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
try:
|
||||
pem_objects = validate_ssh_private_key(ssh_key_data)
|
||||
for pem_object in pem_objects:
|
||||
@@ -383,7 +381,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
super(Credential, self).save(*args, **kwargs)
|
||||
|
||||
def encrypt_field(self, field, ask):
|
||||
if not hasattr(self, field):
|
||||
if field not in self.inputs:
|
||||
return None
|
||||
encrypted = encrypt_field(self, field, ask=ask)
|
||||
if encrypted:
|
||||
@@ -415,13 +413,13 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
type_alias = self.credential_type.name
|
||||
else:
|
||||
type_alias = self.credential_type_id
|
||||
if self.kind == 'vault' and self.inputs.get('vault_id', None):
|
||||
if self.kind == 'vault' and self.has_input('vault_id'):
|
||||
if display:
|
||||
fmt_str = six.text_type('{} (id={})')
|
||||
fmt_str = '{} (id={})'
|
||||
else:
|
||||
fmt_str = six.text_type('{}_{}')
|
||||
return fmt_str.format(type_alias, self.inputs.get('vault_id'))
|
||||
return six.text_type(type_alias)
|
||||
fmt_str = '{}_{}'
|
||||
return fmt_str.format(type_alias, self.get_input('vault_id'))
|
||||
return str(type_alias)
|
||||
|
||||
@staticmethod
|
||||
def unique_dict(cred_qs):
|
||||
@@ -430,6 +428,34 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
ret[cred.unique_hash()] = cred
|
||||
return ret
|
||||
|
||||
def get_input(self, field_name, **kwargs):
|
||||
"""
|
||||
Get an injectable and decrypted value for an input field.
|
||||
|
||||
Retrieves the value for a given credential input field name. Return
|
||||
values for secret input fields are decrypted. If the credential doesn't
|
||||
have an input value defined for the given field name, an AttributeError
|
||||
is raised unless a default value is provided.
|
||||
|
||||
:param field_name(str): The name of the input field.
|
||||
:param default(optional[str]): A default return value to use.
|
||||
"""
|
||||
if field_name in self.credential_type.secret_fields:
|
||||
try:
|
||||
return decrypt_field(self, field_name)
|
||||
except AttributeError:
|
||||
if 'default' in kwargs:
|
||||
return kwargs['default']
|
||||
raise AttributeError
|
||||
if field_name in self.inputs:
|
||||
return self.inputs[field_name]
|
||||
if 'default' in kwargs:
|
||||
return kwargs['default']
|
||||
raise AttributeError(field_name)
|
||||
|
||||
def has_input(self, field_name):
|
||||
return field_name in self.inputs and self.inputs[field_name] not in ('', None)
|
||||
|
||||
|
||||
class CredentialType(CommonModelNameNotUnique):
|
||||
'''
|
||||
@@ -511,7 +537,7 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
if field['id'] == field_id:
|
||||
if 'choices' in field:
|
||||
return field['choices'][0]
|
||||
return {'string': '', 'boolean': False, 'become_method': ''}[field['type']]
|
||||
return {'string': '', 'boolean': False}[field['type']]
|
||||
|
||||
@classmethod
|
||||
def default(cls, f):
|
||||
@@ -611,8 +637,9 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
safe_namespace[field_name] = namespace[field_name] = value
|
||||
continue
|
||||
|
||||
value = credential.get_input(field_name)
|
||||
|
||||
if field_name in self.secret_fields:
|
||||
value = decrypt_field(credential, field_name)
|
||||
safe_namespace[field_name] = '**********'
|
||||
elif len(value):
|
||||
safe_namespace[field_name] = value
|
||||
@@ -632,7 +659,7 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
data = Template(file_tmpl).render(**namespace)
|
||||
_, path = tempfile.mkstemp(dir=private_data_dir)
|
||||
with open(path, 'w') as f:
|
||||
f.write(data.encode('utf-8'))
|
||||
f.write(data)
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
|
||||
# determine if filename indicates single file or many
|
||||
@@ -649,9 +676,7 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
try:
|
||||
injector_field.validate_env_var_allowed(env_var)
|
||||
except ValidationError as e:
|
||||
logger.error(six.text_type(
|
||||
'Ignoring prohibited env var {}, reason: {}'
|
||||
).format(env_var, e))
|
||||
logger.error('Ignoring prohibited env var {}, reason: {}'.format(env_var, e))
|
||||
continue
|
||||
env[env_var] = Template(tmpl).render(**namespace)
|
||||
safe_env[env_var] = Template(tmpl).render(**safe_namespace)
|
||||
@@ -709,7 +734,7 @@ def ssh(cls):
|
||||
}, {
|
||||
'id': 'become_method',
|
||||
'label': ugettext_noop('Privilege Escalation Method'),
|
||||
'type': 'become_method',
|
||||
'type': 'string',
|
||||
'help_text': ugettext_noop('Specify a method for "become" operations. This is '
|
||||
'equivalent to specifying the --become-method '
|
||||
'Ansible parameter.')
|
||||
@@ -982,7 +1007,7 @@ def cloudforms(cls):
|
||||
'label': ugettext_noop('CloudForms URL'),
|
||||
'type': 'string',
|
||||
'help_text': ugettext_noop('Enter the URL for the virtual machine that '
|
||||
'corresponds to your CloudForm instance. '
|
||||
'corresponds to your CloudForms instance. '
|
||||
'For example, https://cloudforms.example.org')
|
||||
}, {
|
||||
'id': 'username',
|
||||
|
||||
@@ -3,25 +3,28 @@ import os
|
||||
import stat
|
||||
import tempfile
|
||||
|
||||
from awx.main.utils import decrypt_field
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
def aws(cred, env, private_data_dir):
|
||||
env['AWS_ACCESS_KEY_ID'] = cred.username
|
||||
env['AWS_SECRET_ACCESS_KEY'] = decrypt_field(cred, 'password')
|
||||
if len(cred.security_token) > 0:
|
||||
env['AWS_SECURITY_TOKEN'] = decrypt_field(cred, 'security_token')
|
||||
env['AWS_ACCESS_KEY_ID'] = cred.get_input('username', default='')
|
||||
env['AWS_SECRET_ACCESS_KEY'] = cred.get_input('password', default='')
|
||||
|
||||
if cred.has_input('security_token'):
|
||||
env['AWS_SECURITY_TOKEN'] = cred.get_input('security_token', default='')
|
||||
|
||||
|
||||
def gce(cred, env, private_data_dir):
|
||||
env['GCE_EMAIL'] = cred.username
|
||||
env['GCE_PROJECT'] = cred.project
|
||||
project = cred.get_input('project', default='')
|
||||
username = cred.get_input('username', default='')
|
||||
|
||||
env['GCE_EMAIL'] = username
|
||||
env['GCE_PROJECT'] = project
|
||||
json_cred = {
|
||||
'type': 'service_account',
|
||||
'private_key': decrypt_field(cred, 'ssh_key_data'),
|
||||
'client_email': cred.username,
|
||||
'project_id': cred.project
|
||||
'private_key': cred.get_input('ssh_key_data', default=''),
|
||||
'client_email': username,
|
||||
'project_id': project
|
||||
}
|
||||
handle, path = tempfile.mkstemp(dir=private_data_dir)
|
||||
f = os.fdopen(handle, 'w')
|
||||
@@ -32,21 +35,25 @@ def gce(cred, env, private_data_dir):
|
||||
|
||||
|
||||
def azure_rm(cred, env, private_data_dir):
|
||||
if len(cred.client) and len(cred.tenant):
|
||||
env['AZURE_CLIENT_ID'] = cred.client
|
||||
env['AZURE_SECRET'] = decrypt_field(cred, 'secret')
|
||||
env['AZURE_TENANT'] = cred.tenant
|
||||
env['AZURE_SUBSCRIPTION_ID'] = cred.subscription
|
||||
client = cred.get_input('client', default='')
|
||||
tenant = cred.get_input('tenant', default='')
|
||||
|
||||
if len(client) and len(tenant):
|
||||
env['AZURE_CLIENT_ID'] = client
|
||||
env['AZURE_TENANT'] = tenant
|
||||
env['AZURE_SECRET'] = cred.get_input('secret', default='')
|
||||
env['AZURE_SUBSCRIPTION_ID'] = cred.get_input('subscription', default='')
|
||||
else:
|
||||
env['AZURE_SUBSCRIPTION_ID'] = cred.subscription
|
||||
env['AZURE_AD_USER'] = cred.username
|
||||
env['AZURE_PASSWORD'] = decrypt_field(cred, 'password')
|
||||
if cred.inputs.get('cloud_environment', None):
|
||||
env['AZURE_CLOUD_ENVIRONMENT'] = cred.inputs['cloud_environment']
|
||||
env['AZURE_SUBSCRIPTION_ID'] = cred.get_input('subscription', default='')
|
||||
env['AZURE_AD_USER'] = cred.get_input('username', default='')
|
||||
env['AZURE_PASSWORD'] = cred.get_input('password', default='')
|
||||
|
||||
if cred.has_input('cloud_environment'):
|
||||
env['AZURE_CLOUD_ENVIRONMENT'] = cred.get_input('cloud_environment')
|
||||
|
||||
|
||||
def vmware(cred, env, private_data_dir):
|
||||
env['VMWARE_USER'] = cred.username
|
||||
env['VMWARE_PASSWORD'] = decrypt_field(cred, 'password')
|
||||
env['VMWARE_HOST'] = cred.host
|
||||
env['VMWARE_USER'] = cred.get_input('username', default='')
|
||||
env['VMWARE_PASSWORD'] = cred.get_input('password', default='')
|
||||
env['VMWARE_HOST'] = cred.get_input('host', default='')
|
||||
env['VMWARE_VALIDATE_CERTS'] = str(settings.VMWARE_VALIDATE_CERTS)
|
||||
|
||||
@@ -9,7 +9,6 @@ from django.utils.text import Truncator
|
||||
from django.utils.timezone import utc
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.encoding import force_text
|
||||
import six
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.fields import JSONField
|
||||
@@ -27,7 +26,7 @@ __all__ = ['JobEvent', 'ProjectUpdateEvent', 'AdHocCommandEvent',
|
||||
|
||||
def sanitize_event_keys(kwargs, valid_keys):
|
||||
# Sanity check: Don't honor keys that we don't recognize.
|
||||
for key in kwargs.keys():
|
||||
for key in list(kwargs.keys()):
|
||||
if key not in valid_keys:
|
||||
kwargs.pop(key)
|
||||
|
||||
@@ -35,7 +34,7 @@ def sanitize_event_keys(kwargs, valid_keys):
|
||||
for key in [
|
||||
'play', 'role', 'task', 'playbook'
|
||||
]:
|
||||
if isinstance(kwargs.get('event_data', {}).get(key), six.string_types):
|
||||
if isinstance(kwargs.get('event_data', {}).get(key), str):
|
||||
if len(kwargs['event_data'][key]) > 1024:
|
||||
kwargs['event_data'][key] = Truncator(kwargs['event_data'][key]).chars(1024)
|
||||
|
||||
@@ -353,9 +352,16 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
if hasattr(self, 'job') and not from_parent_update:
|
||||
if getattr(settings, 'CAPTURE_JOB_EVENT_HOSTS', False):
|
||||
self._update_hosts()
|
||||
if self.event == 'playbook_on_stats':
|
||||
self._update_parents_failed_and_changed()
|
||||
if self.parent_uuid:
|
||||
kwargs = {}
|
||||
if self.changed is True:
|
||||
kwargs['changed'] = True
|
||||
if self.failed is True:
|
||||
kwargs['failed'] = True
|
||||
if kwargs:
|
||||
JobEvent.objects.filter(job_id=self.job_id, uuid=self.parent_uuid).update(**kwargs)
|
||||
|
||||
if self.event == 'playbook_on_stats':
|
||||
hostnames = self._hostnames()
|
||||
self._update_host_summary_from_stats(hostnames)
|
||||
try:
|
||||
@@ -424,7 +430,7 @@ class JobEvent(BasePlaybookEvent):
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:job_event_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
return u'%s @ %s' % (self.get_event_display2(), self.created.isoformat())
|
||||
|
||||
def _update_from_event_data(self):
|
||||
@@ -436,15 +442,6 @@ class JobEvent(BasePlaybookEvent):
|
||||
updated_fields.add('host_name')
|
||||
return updated_fields
|
||||
|
||||
def _update_parents_failed_and_changed(self):
|
||||
# Update parent events to reflect failed, changed
|
||||
runner_events = JobEvent.objects.filter(job=self.job,
|
||||
event__startswith='runner_on')
|
||||
changed_events = runner_events.filter(changed=True)
|
||||
failed_events = runner_events.filter(failed=True)
|
||||
JobEvent.objects.filter(uuid__in=changed_events.values_list('parent_uuid', flat=True)).update(changed=True)
|
||||
JobEvent.objects.filter(uuid__in=failed_events.values_list('parent_uuid', flat=True)).update(failed=True)
|
||||
|
||||
def _update_hosts(self, extra_host_pks=None):
|
||||
# Update job event hosts m2m from host_name, propagate to parent events.
|
||||
extra_host_pks = set(extra_host_pks or [])
|
||||
@@ -580,7 +577,7 @@ class BaseCommandEvent(CreatedModifiedModel):
|
||||
editable=False,
|
||||
)
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
return u'%s @ %s' % (self.get_event_display(), self.created.isoformat())
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import six
|
||||
import random
|
||||
from decimal import Decimal
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import MinValueValidator
|
||||
from django.db import models, connection
|
||||
from django.db.models.signals import post_save, post_delete
|
||||
@@ -31,15 +29,6 @@ from awx.main.models.mixins import RelatedJobsMixin
|
||||
__all__ = ('Instance', 'InstanceGroup', 'JobOrigin', 'TowerScheduleState',)
|
||||
|
||||
|
||||
def validate_queuename(v):
|
||||
# kombu doesn't play nice with unicode in queue names
|
||||
if v:
|
||||
try:
|
||||
'{}'.format(v.decode('utf-8'))
|
||||
except UnicodeEncodeError:
|
||||
raise ValidationError(_(six.text_type('{} contains unsupported characters')).format(v))
|
||||
|
||||
|
||||
class HasPolicyEditsMixin(HasEditsMixin):
|
||||
|
||||
class Meta:
|
||||
@@ -163,11 +152,6 @@ class Instance(HasPolicyEditsMixin, BaseModel):
|
||||
self.save(update_fields=['capacity', 'version', 'modified', 'cpu',
|
||||
'memory', 'cpu_capacity', 'mem_capacity'])
|
||||
|
||||
def clean_hostname(self):
|
||||
validate_queuename(self.hostname)
|
||||
return self.hostname
|
||||
|
||||
|
||||
|
||||
class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
|
||||
"""A model representing a Queue/Group of AWX Instances."""
|
||||
@@ -234,9 +218,6 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
|
||||
def clean_name(self):
|
||||
validate_queuename(self.name)
|
||||
return self.name
|
||||
|
||||
def fit_task_to_most_remaining_capacity_instance(self, task):
|
||||
instance_most_capacity = None
|
||||
|
||||
@@ -3,12 +3,13 @@
|
||||
|
||||
# Python
|
||||
import datetime
|
||||
import time
|
||||
import itertools
|
||||
import logging
|
||||
import re
|
||||
import copy
|
||||
from urlparse import urljoin
|
||||
import os.path
|
||||
import six
|
||||
from urllib.parse import urljoin
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -39,6 +40,7 @@ from awx.main.models.mixins import (
|
||||
ResourceMixin,
|
||||
TaskManagerInventoryUpdateMixin,
|
||||
RelatedJobsMixin,
|
||||
CustomVirtualEnvMixin,
|
||||
)
|
||||
from awx.main.models.notifications import (
|
||||
NotificationTemplate,
|
||||
@@ -342,9 +344,13 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
host_updates = hosts_to_update.setdefault(host_pk, {})
|
||||
host_updates['has_inventory_sources'] = False
|
||||
# Now apply updates to hosts where needed (in batches).
|
||||
all_update_pks = hosts_to_update.keys()
|
||||
for offset in xrange(0, len(all_update_pks), 500):
|
||||
update_pks = all_update_pks[offset:(offset + 500)]
|
||||
all_update_pks = list(hosts_to_update.keys())
|
||||
|
||||
def _chunk(items, chunk_size):
|
||||
for i, group in itertools.groupby(enumerate(items), lambda x: x[0] // chunk_size):
|
||||
yield (g[1] for g in group)
|
||||
|
||||
for update_pks in _chunk(all_update_pks, 500):
|
||||
for host in hosts_qs.filter(pk__in=update_pks):
|
||||
host_updates = hosts_to_update[host.pk]
|
||||
for field, value in host_updates.items():
|
||||
@@ -411,12 +417,12 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
failed_group_pks.add(group_pk)
|
||||
|
||||
# Now apply updates to each group as needed (in batches).
|
||||
all_update_pks = groups_to_update.keys()
|
||||
for offset in xrange(0, len(all_update_pks), 500):
|
||||
all_update_pks = list(groups_to_update.keys())
|
||||
for offset in range(0, len(all_update_pks), 500):
|
||||
update_pks = all_update_pks[offset:(offset + 500)]
|
||||
for group in self.groups.filter(pk__in=update_pks):
|
||||
group_updates = groups_to_update[group.pk]
|
||||
for field, value in group_updates.items():
|
||||
for field, value in list(group_updates.items()):
|
||||
if getattr(group, field) != value:
|
||||
setattr(group, field, value)
|
||||
else:
|
||||
@@ -428,7 +434,8 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
'''
|
||||
Update model fields that are computed from database relationships.
|
||||
'''
|
||||
logger.debug("Going to update inventory computed fields")
|
||||
logger.debug("Going to update inventory computed fields, pk={0}".format(self.pk))
|
||||
start_time = time.time()
|
||||
if update_hosts:
|
||||
self.update_host_computed_fields()
|
||||
if update_groups:
|
||||
@@ -456,7 +463,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
}
|
||||
# CentOS python seems to have issues clobbering the inventory on poor timing during certain operations
|
||||
iobj = Inventory.objects.get(id=self.id)
|
||||
for field, value in computed_fields.items():
|
||||
for field, value in list(computed_fields.items()):
|
||||
if getattr(iobj, field) != value:
|
||||
setattr(iobj, field, value)
|
||||
# update in-memory object
|
||||
@@ -465,7 +472,8 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
computed_fields.pop(field)
|
||||
if computed_fields:
|
||||
iobj.save(update_fields=computed_fields.keys())
|
||||
logger.debug("Finished updating inventory computed fields")
|
||||
logger.debug("Finished updating inventory computed fields, pk={0}, in "
|
||||
"{1:.3f} seconds".format(self.pk, time.time() - start_time))
|
||||
|
||||
def websocket_emit_status(self, status):
|
||||
connection.on_commit(lambda: emit_channel_notification(
|
||||
@@ -1347,7 +1355,7 @@ class InventorySourceOptions(BaseModel):
|
||||
source_vars_dict = VarsDictProperty('source_vars')
|
||||
|
||||
def clean_instance_filters(self):
|
||||
instance_filters = six.text_type(self.instance_filters or '')
|
||||
instance_filters = str(self.instance_filters or '')
|
||||
if self.source == 'ec2':
|
||||
invalid_filters = []
|
||||
instance_filter_re = re.compile(r'^((tag:.+)|([a-z][a-z\.-]*[a-z]))=.*$')
|
||||
@@ -1373,7 +1381,7 @@ class InventorySourceOptions(BaseModel):
|
||||
return ''
|
||||
|
||||
def clean_group_by(self):
|
||||
group_by = six.text_type(self.group_by or '')
|
||||
group_by = str(self.group_by or '')
|
||||
if self.source == 'ec2':
|
||||
get_choices = getattr(self, 'get_%s_group_by_choices' % self.source)
|
||||
valid_choices = [x[0] for x in get_choices()]
|
||||
@@ -1530,7 +1538,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, RelatedJobsMix
|
||||
if '_eager_fields' not in kwargs:
|
||||
kwargs['_eager_fields'] = {}
|
||||
if 'name' not in kwargs['_eager_fields']:
|
||||
name = six.text_type('{} - {}').format(self.inventory.name, self.name)
|
||||
name = '{} - {}'.format(self.inventory.name, self.name)
|
||||
name_field = self._meta.get_field('name')
|
||||
if len(name) > name_field.max_length:
|
||||
name = name[:name_field.max_length]
|
||||
@@ -1614,7 +1622,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, RelatedJobsMix
|
||||
return InventoryUpdate.objects.filter(inventory_source=self)
|
||||
|
||||
|
||||
class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin, TaskManagerInventoryUpdateMixin):
|
||||
class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin, TaskManagerInventoryUpdateMixin, CustomVirtualEnvMixin):
|
||||
'''
|
||||
Internal job for tracking inventory updates from external sources.
|
||||
'''
|
||||
@@ -1733,6 +1741,18 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
|
||||
return self.global_instance_groups
|
||||
return selected_groups
|
||||
|
||||
@property
|
||||
def ansible_virtualenv_path(self):
|
||||
if self.inventory_source and self.inventory_source.source_project:
|
||||
project = self.inventory_source.source_project
|
||||
if project and project.custom_virtualenv:
|
||||
return project.custom_virtualenv
|
||||
if self.inventory_source and self.inventory_source.inventory:
|
||||
organization = self.inventory_source.inventory.organization
|
||||
if organization and organization.custom_virtualenv:
|
||||
return organization.custom_virtualenv
|
||||
return settings.ANSIBLE_VENV_PATH
|
||||
|
||||
def cancel(self, job_explanation=None, is_chain=False):
|
||||
res = super(InventoryUpdate, self).cancel(job_explanation=job_explanation, is_chain=is_chain)
|
||||
if res:
|
||||
|
||||
@@ -8,9 +8,8 @@ import logging
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
from urlparse import urljoin
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -94,8 +93,7 @@ class JobOptions(BaseModel):
|
||||
blank=True,
|
||||
default=0,
|
||||
)
|
||||
limit = models.CharField(
|
||||
max_length=1024,
|
||||
limit = models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
)
|
||||
@@ -347,8 +345,8 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
except JobLaunchConfig.DoesNotExist:
|
||||
wj_config = JobLaunchConfig()
|
||||
actual_inventory = wj_config.inventory if wj_config.inventory else self.inventory
|
||||
for idx in xrange(min(self.job_slice_count,
|
||||
actual_inventory.hosts.count())):
|
||||
for idx in range(min(self.job_slice_count,
|
||||
actual_inventory.hosts.count())):
|
||||
create_kwargs = dict(workflow_job=job,
|
||||
unified_job_template=self,
|
||||
ancestor_artifacts=dict(job_slice=idx + 1))
|
||||
@@ -452,7 +450,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
|
||||
@property
|
||||
def cache_timeout_blocked(self):
|
||||
if Job.objects.filter(job_template=self, status__in=['pending', 'waiting', 'running']).count() > getattr(settings, 'SCHEDULE_MAX_JOBS', 10):
|
||||
if Job.objects.filter(job_template=self, status__in=['pending', 'waiting', 'running']).count() >= getattr(settings, 'SCHEDULE_MAX_JOBS', 10):
|
||||
logger.error("Job template %s could not be started because there are more than %s other jobs from that template waiting to run" %
|
||||
(self.name, getattr(settings, 'SCHEDULE_MAX_JOBS', 10)))
|
||||
return True
|
||||
@@ -490,7 +488,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
return UnifiedJob.objects.filter(unified_job_template=self)
|
||||
|
||||
|
||||
class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskManagerJobMixin):
|
||||
class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskManagerJobMixin, CustomVirtualEnvMixin):
|
||||
'''
|
||||
A job applies a project (with playbook) to an inventory source with a given
|
||||
credential. It represents a single invocation of ansible-playbook with the
|
||||
@@ -695,7 +693,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
count_hosts = Host.objects.filter(inventory__jobs__pk=self.pk).count()
|
||||
if self.job_slice_count > 1:
|
||||
# Integer division intentional
|
||||
count_hosts = (count_hosts + self.job_slice_count - self.job_slice_number) / self.job_slice_count
|
||||
count_hosts = (count_hosts + self.job_slice_count - self.job_slice_number) // self.job_slice_count
|
||||
return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1
|
||||
|
||||
@property
|
||||
@@ -823,7 +821,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
timeout = now() - datetime.timedelta(seconds=timeout)
|
||||
hosts = hosts.filter(ansible_facts_modified__gte=timeout)
|
||||
for host in hosts:
|
||||
filepath = os.sep.join(map(six.text_type, [destination, host.name]))
|
||||
filepath = os.sep.join(map(str, [destination, host.name]))
|
||||
if not os.path.realpath(filepath).startswith(destination):
|
||||
system_tracking_logger.error('facts for host {} could not be cached'.format(smart_str(host.name)))
|
||||
continue
|
||||
@@ -840,7 +838,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
def finish_job_fact_cache(self, destination, modification_times):
|
||||
destination = os.path.join(destination, 'facts')
|
||||
for host in self._get_inventory_hosts():
|
||||
filepath = os.sep.join(map(six.text_type, [destination, host.name]))
|
||||
filepath = os.sep.join(map(str, [destination, host.name]))
|
||||
if not os.path.realpath(filepath).startswith(destination):
|
||||
system_tracking_logger.error('facts for host {} could not be cached'.format(smart_str(host.name)))
|
||||
continue
|
||||
@@ -1120,7 +1118,7 @@ class JobHostSummary(CreatedModifiedModel):
|
||||
skipped = models.PositiveIntegerField(default=0, editable=False)
|
||||
failed = models.BooleanField(default=False, editable=False)
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
host = getattr_dne(self, 'host')
|
||||
hostname = host.name if host else 'N/A'
|
||||
return '%s changed=%d dark=%d failures=%d ok=%d processed=%d skipped=%s' % \
|
||||
|
||||
@@ -3,7 +3,6 @@ import os
|
||||
import json
|
||||
from copy import copy, deepcopy
|
||||
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.apps import apps
|
||||
@@ -167,7 +166,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
decrypted_default = default
|
||||
if (
|
||||
survey_element['type'] == "password" and
|
||||
isinstance(decrypted_default, six.string_types) and
|
||||
isinstance(decrypted_default, str) and
|
||||
decrypted_default.startswith('$encrypted$')
|
||||
):
|
||||
decrypted_default = decrypt_value(get_encryption_key('value', pk=None), decrypted_default)
|
||||
@@ -190,7 +189,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
if (survey_element['type'] == "password"):
|
||||
password_value = data.get(survey_element['variable'])
|
||||
if (
|
||||
isinstance(password_value, six.string_types) and
|
||||
isinstance(password_value, str) and
|
||||
password_value == '$encrypted$'
|
||||
):
|
||||
if survey_element.get('default') is None and survey_element['required']:
|
||||
@@ -203,7 +202,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
errors.append("'%s' value missing" % survey_element['variable'])
|
||||
elif survey_element['type'] in ["textarea", "text", "password"]:
|
||||
if survey_element['variable'] in data:
|
||||
if not isinstance(data[survey_element['variable']], six.string_types):
|
||||
if not isinstance(data[survey_element['variable']], str):
|
||||
errors.append("Value %s for '%s' expected to be a string." % (data[survey_element['variable']],
|
||||
survey_element['variable']))
|
||||
return errors
|
||||
@@ -247,7 +246,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
errors.append("'%s' value is expected to be a list." % survey_element['variable'])
|
||||
else:
|
||||
choice_list = copy(survey_element['choices'])
|
||||
if isinstance(choice_list, six.string_types):
|
||||
if isinstance(choice_list, str):
|
||||
choice_list = choice_list.split('\n')
|
||||
for val in data[survey_element['variable']]:
|
||||
if val not in choice_list:
|
||||
@@ -255,7 +254,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
choice_list))
|
||||
elif survey_element['type'] == 'multiplechoice':
|
||||
choice_list = copy(survey_element['choices'])
|
||||
if isinstance(choice_list, six.string_types):
|
||||
if isinstance(choice_list, str):
|
||||
choice_list = choice_list.split('\n')
|
||||
if survey_element['variable'] in data:
|
||||
if data[survey_element['variable']] not in choice_list:
|
||||
@@ -315,7 +314,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
if 'prompts' not in _exclude_errors:
|
||||
errors['extra_vars'] = [_('Variables {list_of_keys} are not allowed on launch. Check the Prompt on Launch setting '+
|
||||
'on the {model_name} to include Extra Variables.').format(
|
||||
list_of_keys=six.text_type(', ').join([six.text_type(key) for key in extra_vars.keys()]),
|
||||
list_of_keys=', '.join([str(key) for key in extra_vars.keys()]),
|
||||
model_name=self._meta.verbose_name.title())]
|
||||
|
||||
return (accepted, rejected, errors)
|
||||
@@ -386,7 +385,7 @@ class SurveyJobMixin(models.Model):
|
||||
extra_vars = json.loads(self.extra_vars)
|
||||
for key in self.survey_passwords:
|
||||
value = extra_vars.get(key)
|
||||
if value and isinstance(value, six.string_types) and value.startswith('$encrypted$'):
|
||||
if value and isinstance(value, str) and value.startswith('$encrypted$'):
|
||||
extra_vars[key] = decrypt_value(get_encryption_key('value', pk=None), value)
|
||||
return json.dumps(extra_vars)
|
||||
else:
|
||||
|
||||
@@ -20,6 +20,7 @@ from awx.main.notifications.pagerduty_backend import PagerDutyBackend
|
||||
from awx.main.notifications.hipchat_backend import HipChatBackend
|
||||
from awx.main.notifications.webhook_backend import WebhookBackend
|
||||
from awx.main.notifications.mattermost_backend import MattermostBackend
|
||||
from awx.main.notifications.grafana_backend import GrafanaBackend
|
||||
from awx.main.notifications.rocketchat_backend import RocketChatBackend
|
||||
from awx.main.notifications.irc_backend import IrcBackend
|
||||
from awx.main.fields import JSONField
|
||||
@@ -36,6 +37,7 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
||||
('slack', _('Slack'), SlackBackend),
|
||||
('twilio', _('Twilio'), TwilioBackend),
|
||||
('pagerduty', _('Pagerduty'), PagerDutyBackend),
|
||||
('grafana', _('Grafana'), GrafanaBackend),
|
||||
('hipchat', _('HipChat'), HipChatBackend),
|
||||
('webhook', _('Webhook'), WebhookBackend),
|
||||
('mattermost', _('Mattermost'), MattermostBackend),
|
||||
@@ -82,7 +84,7 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
||||
setattr(self, '_saved_{}_{}'.format("config", field), value)
|
||||
self.notification_configuration[field] = ''
|
||||
else:
|
||||
encrypted = encrypt_field(self, 'notification_configuration', subfield=field, skip_utf8=True)
|
||||
encrypted = encrypt_field(self, 'notification_configuration', subfield=field)
|
||||
self.notification_configuration[field] = encrypted
|
||||
if 'notification_configuration' not in update_fields:
|
||||
update_fields.append('notification_configuration')
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# Python
|
||||
import datetime
|
||||
import os
|
||||
import urlparse
|
||||
import urllib.parse as urlparse
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -15,7 +15,6 @@ from django.utils.text import slugify
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.timezone import now, make_aware, get_default_timezone
|
||||
|
||||
import six
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
@@ -68,7 +67,7 @@ class ProjectOptions(models.Model):
|
||||
@classmethod
|
||||
def get_local_path_choices(cls):
|
||||
if os.path.exists(settings.PROJECTS_ROOT):
|
||||
paths = [x.decode('utf-8') for x in os.listdir(settings.PROJECTS_ROOT)
|
||||
paths = [x for x in os.listdir(settings.PROJECTS_ROOT)
|
||||
if (os.path.isdir(os.path.join(settings.PROJECTS_ROOT, x)) and
|
||||
not x.startswith('.') and not x.startswith('_'))]
|
||||
qs = Project.objects
|
||||
@@ -134,7 +133,7 @@ class ProjectOptions(models.Model):
|
||||
def clean_scm_url(self):
|
||||
if self.scm_type == 'insights':
|
||||
self.scm_url = settings.INSIGHTS_URL_BASE
|
||||
scm_url = six.text_type(self.scm_url or '')
|
||||
scm_url = str(self.scm_url or '')
|
||||
if not self.scm_type:
|
||||
return ''
|
||||
try:
|
||||
@@ -145,7 +144,7 @@ class ProjectOptions(models.Model):
|
||||
scm_url_parts = urlparse.urlsplit(scm_url)
|
||||
if self.scm_type and not any(scm_url_parts):
|
||||
raise ValidationError(_('SCM URL is required.'))
|
||||
return six.text_type(self.scm_url or '')
|
||||
return str(self.scm_url or '')
|
||||
|
||||
def clean_credential(self):
|
||||
if not self.scm_type:
|
||||
@@ -166,8 +165,8 @@ class ProjectOptions(models.Model):
|
||||
check_special_cases=False)
|
||||
scm_url_parts = urlparse.urlsplit(scm_url)
|
||||
# Prefer the username/password in the URL, if provided.
|
||||
scm_username = scm_url_parts.username or cred.username or ''
|
||||
if scm_url_parts.password or cred.password:
|
||||
scm_username = scm_url_parts.username or cred.get_input('username', default='')
|
||||
if scm_url_parts.password or cred.has_input('password'):
|
||||
scm_password = '********'
|
||||
else:
|
||||
scm_password = ''
|
||||
@@ -329,7 +328,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
skip_update = bool(kwargs.pop('skip_update', False))
|
||||
# Create auto-generated local path if project uses SCM.
|
||||
if self.pk and self.scm_type and not self.local_path.startswith('_'):
|
||||
slug_name = slugify(six.text_type(self.name)).replace(u'-', u'_')
|
||||
slug_name = slugify(str(self.name)).replace(u'-', u'_')
|
||||
self.local_path = u'_%d__%s' % (int(self.pk), slug_name)
|
||||
if 'local_path' not in update_fields:
|
||||
update_fields.append('local_path')
|
||||
@@ -544,8 +543,7 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage
|
||||
res = super(ProjectUpdate, self).cancel(job_explanation=job_explanation, is_chain=is_chain)
|
||||
if res and self.launch_type != 'sync':
|
||||
for inv_src in self.scm_inventory_updates.filter(status='running'):
|
||||
inv_src.cancel(job_explanation=six.text_type(
|
||||
'Source project update `{}` was canceled.').format(self.name))
|
||||
inv_src.cancel(job_explanation='Source project update `{}` was canceled.'.format(self.name))
|
||||
return res
|
||||
|
||||
'''
|
||||
|
||||
@@ -155,7 +155,7 @@ class Role(models.Model):
|
||||
object_id = models.PositiveIntegerField(null=True, default=None)
|
||||
content_object = GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
if 'role_field' in self.__dict__:
|
||||
return u'%s-%s' % (self.name, self.pk)
|
||||
else:
|
||||
@@ -204,7 +204,7 @@ class Role(models.Model):
|
||||
value = description.get('default')
|
||||
|
||||
if '%s' in value and content_type:
|
||||
value = value % model_name
|
||||
value = value % model_name
|
||||
|
||||
return value
|
||||
|
||||
@@ -315,7 +315,7 @@ class Role(models.Model):
|
||||
# minus 4k of padding for the other parts of the query, leads us
|
||||
# to the magic number of 41496, or 40000 for a nice round number
|
||||
def split_ids_for_sqlite(role_ids):
|
||||
for i in xrange(0, len(role_ids), 40000):
|
||||
for i in range(0, len(role_ids), 40000):
|
||||
yield role_ids[i:i + 40000]
|
||||
|
||||
|
||||
|
||||
@@ -209,7 +209,7 @@ class Schedule(CommonModel, LaunchTimeConfig):
|
||||
pass
|
||||
return x
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
return u'%s_t%s_%s_%s' % (self.name, self.unified_job_template.id, self.id, self.next_run)
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
from StringIO import StringIO
|
||||
from io import StringIO
|
||||
import codecs
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
@@ -11,7 +12,6 @@ import socket
|
||||
import subprocess
|
||||
import tempfile
|
||||
from collections import OrderedDict
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -350,10 +350,11 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
|
||||
validated_kwargs = kwargs.copy()
|
||||
if unallowed_fields:
|
||||
if parent_field_name is None:
|
||||
logger.warn(six.text_type('Fields {} are not allowed as overrides to spawn from {}.').format(
|
||||
six.text_type(', ').join(unallowed_fields), self
|
||||
logger.warn('Fields {} are not allowed as overrides to spawn from {}.'.format(
|
||||
', '.join(unallowed_fields), self
|
||||
))
|
||||
map(validated_kwargs.pop, unallowed_fields)
|
||||
for f in unallowed_fields:
|
||||
validated_kwargs.pop(f)
|
||||
|
||||
unified_job = copy_model_by_class(self, unified_job_class, fields, validated_kwargs)
|
||||
|
||||
@@ -735,7 +736,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
def _resources_sufficient_for_launch(self):
|
||||
return True
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
return u'%s-%s-%s' % (self.created, self.id, self.status)
|
||||
|
||||
@property
|
||||
@@ -900,7 +901,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
parent = getattr(self, self._get_parent_field_name())
|
||||
if parent is None:
|
||||
return
|
||||
valid_fields = parent.get_ask_mapping().keys()
|
||||
valid_fields = list(parent.get_ask_mapping().keys())
|
||||
# Special cases allowed for workflows
|
||||
if hasattr(self, 'extra_vars'):
|
||||
valid_fields.extend(['survey_passwords', 'extra_vars'])
|
||||
@@ -991,9 +992,11 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
if not os.path.exists(settings.JOBOUTPUT_ROOT):
|
||||
os.makedirs(settings.JOBOUTPUT_ROOT)
|
||||
fd = tempfile.NamedTemporaryFile(
|
||||
mode='w',
|
||||
prefix='{}-{}-'.format(self.model_to_str(), self.pk),
|
||||
suffix='.out',
|
||||
dir=settings.JOBOUTPUT_ROOT
|
||||
dir=settings.JOBOUTPUT_ROOT,
|
||||
encoding='utf-8'
|
||||
)
|
||||
|
||||
# Before the addition of event-based stdout, older versions of
|
||||
@@ -1008,7 +1011,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
fd.write(legacy_stdout_text)
|
||||
if hasattr(fd, 'name'):
|
||||
fd.flush()
|
||||
return open(fd.name, 'r')
|
||||
return codecs.open(fd.name, 'r', encoding='utf-8')
|
||||
else:
|
||||
# we just wrote to this StringIO, so rewind it
|
||||
fd.seek(0)
|
||||
@@ -1030,10 +1033,16 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
# don't bother actually fetching the data
|
||||
total = self.get_event_queryset().aggregate(
|
||||
total=models.Sum(models.Func(models.F('stdout'), function='LENGTH'))
|
||||
)['total']
|
||||
)['total'] or 0
|
||||
if total > max_supported:
|
||||
raise StdoutMaxBytesExceeded(total, max_supported)
|
||||
|
||||
# psycopg2's copy_expert writes bytes, but callers of this
|
||||
# function assume a str-based fd will be returned; decode
|
||||
# .write() calls on the fly to maintain this interface
|
||||
_write = fd.write
|
||||
fd.write = lambda s: _write(smart_text(s))
|
||||
|
||||
cursor.copy_expert(
|
||||
"copy (select stdout from {} where {}={} order by start_line) to stdout".format(
|
||||
self._meta.db_table + 'event',
|
||||
@@ -1048,7 +1057,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
# up escaped line sequences
|
||||
fd.flush()
|
||||
subprocess.Popen("sed -i 's/\\\\r\\\\n/\\n/g' {}".format(fd.name), shell=True).wait()
|
||||
return open(fd.name, 'r')
|
||||
return codecs.open(fd.name, 'r', encoding='utf-8')
|
||||
else:
|
||||
# If we're dealing with an in-memory string buffer, use
|
||||
# string.replace()
|
||||
@@ -1063,7 +1072,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
return content
|
||||
|
||||
def _result_stdout_raw(self, redact_sensitive=False, escape_ascii=False):
|
||||
content = self.result_stdout_raw_handle().read().decode('utf-8')
|
||||
content = self.result_stdout_raw_handle().read()
|
||||
if redact_sensitive:
|
||||
content = UriCleaner.remove_sensitive(content)
|
||||
if escape_ascii:
|
||||
@@ -1096,7 +1105,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
else:
|
||||
end_actual = len(stdout_lines)
|
||||
|
||||
return_buffer = return_buffer.getvalue().decode('utf-8')
|
||||
return_buffer = return_buffer.getvalue()
|
||||
if redact_sensitive:
|
||||
return_buffer = UriCleaner.remove_sensitive(return_buffer)
|
||||
if escape_ascii:
|
||||
@@ -1295,9 +1304,9 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
'dispatcher', self.execution_node
|
||||
).running(timeout=timeout)
|
||||
except socket.timeout:
|
||||
logger.error(six.text_type(
|
||||
'could not reach dispatcher on {} within {}s'
|
||||
).format(self.execution_node, timeout))
|
||||
logger.error('could not reach dispatcher on {} within {}s'.format(
|
||||
self.execution_node, timeout
|
||||
))
|
||||
running = False
|
||||
return running
|
||||
|
||||
@@ -1314,7 +1323,8 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
def cancel(self, job_explanation=None, is_chain=False):
|
||||
if self.can_cancel:
|
||||
if not is_chain:
|
||||
map(lambda x: x.cancel(job_explanation=self._build_job_explanation(), is_chain=True), self.get_jobs_fail_chain())
|
||||
for x in self.get_jobs_fail_chain():
|
||||
x.cancel(job_explanation=self._build_job_explanation(), is_chain=True)
|
||||
|
||||
if not self.cancel_flag:
|
||||
self.cancel_flag = True
|
||||
@@ -1363,14 +1373,13 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
|
||||
created_by = getattr_dne(self, 'created_by')
|
||||
|
||||
if not created_by:
|
||||
wj = self.get_workflow_job()
|
||||
if wj:
|
||||
for name in ('awx', 'tower'):
|
||||
r['{}_workflow_job_id'.format(name)] = wj.pk
|
||||
r['{}_workflow_job_name'.format(name)] = wj.name
|
||||
created_by = getattr_dne(wj, 'created_by')
|
||||
wj = self.get_workflow_job()
|
||||
if wj:
|
||||
for name in ('awx', 'tower'):
|
||||
r['{}_workflow_job_id'.format(name)] = wj.pk
|
||||
r['{}_workflow_job_name'.format(name)] = wj.name
|
||||
|
||||
if not created_by:
|
||||
schedule = getattr_dne(self, 'schedule')
|
||||
if schedule:
|
||||
for name in ('awx', 'tower'):
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
#import urlparse
|
||||
import logging
|
||||
|
||||
# Django
|
||||
@@ -37,7 +36,7 @@ from awx.main.redact import REPLACE_STR
|
||||
from awx.main.fields import JSONField
|
||||
|
||||
from copy import copy
|
||||
from urlparse import urljoin
|
||||
from urllib.parse import urljoin
|
||||
|
||||
__all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowJobNode', 'WorkflowJobTemplateNode',]
|
||||
|
||||
@@ -277,6 +276,8 @@ class WorkflowJobNode(WorkflowNodeBase):
|
||||
data['extra_vars'] = extra_vars
|
||||
# ensure that unified jobs created by WorkflowJobs are marked
|
||||
data['_eager_fields'] = {'launch_type': 'workflow'}
|
||||
if self.workflow_job and self.workflow_job.created_by:
|
||||
data['_eager_fields']['created_by'] = self.workflow_job.created_by
|
||||
# Extra processing in the case that this is a slice job
|
||||
if 'job_slice' in self.ancestor_artifacts and is_root_node:
|
||||
data['_eager_fields']['allow_simultaneous'] = True
|
||||
@@ -406,7 +407,11 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
|
||||
@property
|
||||
def cache_timeout_blocked(self):
|
||||
# TODO: don't allow running of job template if same workflow template running
|
||||
if WorkflowJob.objects.filter(workflow_job_template=self,
|
||||
status__in=['pending', 'waiting', 'running']).count() >= getattr(settings, 'SCHEDULE_MAX_JOBS', 10):
|
||||
logger.error("Workflow Job template %s could not be started because there are more than %s other jobs from that template waiting to run" %
|
||||
(self.name, getattr(settings, 'SCHEDULE_MAX_JOBS', 10)))
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user