mirror of
https://github.com/ansible/awx.git
synced 2026-02-07 12:34:43 -03:30
Compare commits
299 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c1ab815c80 | ||
|
|
20e77c0092 | ||
|
|
de4d73d656 | ||
|
|
3358e568b5 | ||
|
|
aa4ca300f5 | ||
|
|
8d8aadb193 | ||
|
|
3194690e5f | ||
|
|
6cc3ac2e99 | ||
|
|
c4c1b9799e | ||
|
|
9f691a048d | ||
|
|
da68e613e3 | ||
|
|
b9a9d645de | ||
|
|
7032927ba3 | ||
|
|
e62035fa5e | ||
|
|
5fe6e75255 | ||
|
|
83372d6f03 | ||
|
|
7c53bf3681 | ||
|
|
e40e646211 | ||
|
|
b913d8411a | ||
|
|
2017504c51 | ||
|
|
16848e9154 | ||
|
|
3e03c726cb | ||
|
|
a2c2588383 | ||
|
|
67066a3fa5 | ||
|
|
bd76d1a75f | ||
|
|
c77ca928cc | ||
|
|
7d495713ee | ||
|
|
b78fee1f01 | ||
|
|
ee59ac957a | ||
|
|
6b7d712f9f | ||
|
|
00e837c17c | ||
|
|
06ff178f9e | ||
|
|
f07818f04a | ||
|
|
7a3002f218 | ||
|
|
a0ded889f9 | ||
|
|
8d46e78606 | ||
|
|
6b53ea51fc | ||
|
|
ce28968a11 | ||
|
|
c793b3a9c8 | ||
|
|
77cd875a9c | ||
|
|
fd708456df | ||
|
|
2f7a7b453f | ||
|
|
cec5a77762 | ||
|
|
7c57a8e5d0 | ||
|
|
87a2039ded | ||
|
|
90edb3b551 | ||
|
|
f37471c858 | ||
|
|
41a855fa6a | ||
|
|
54e1a802c5 | ||
|
|
8a7c714613 | ||
|
|
0a88d42645 | ||
|
|
93160fa4fd | ||
|
|
f738f52c5c | ||
|
|
ac925a03b5 | ||
|
|
90caea2273 | ||
|
|
a06366ccc9 | ||
|
|
f4fb13492b | ||
|
|
7f78018386 | ||
|
|
aabb55810b | ||
|
|
fb62e0ec2c | ||
|
|
448e49ae43 | ||
|
|
9d53bab050 | ||
|
|
3a467067f3 | ||
|
|
3f759d8ddb | ||
|
|
5959bfc4ae | ||
|
|
7ffbc7ed1e | ||
|
|
74d1859552 | ||
|
|
e4cb50921e | ||
|
|
d1e5dc1eae | ||
|
|
6a7f7a0256 | ||
|
|
bb2248cb24 | ||
|
|
fa6de04e79 | ||
|
|
87604749b7 | ||
|
|
684998cd51 | ||
|
|
2d4a3bc943 | ||
|
|
73c46030bc | ||
|
|
b744c4ebb7 | ||
|
|
328435d435 | ||
|
|
62eae017cf | ||
|
|
dcb1773918 | ||
|
|
a1514a3b26 | ||
|
|
88a0d98447 | ||
|
|
d88ed19edf | ||
|
|
47c1dc8171 | ||
|
|
28733beee8 | ||
|
|
c90a459837 | ||
|
|
ba8cf1aaf2 | ||
|
|
57c3b9ab17 | ||
|
|
b24a1746ae | ||
|
|
2f16b361f7 | ||
|
|
5bb59246af | ||
|
|
766b2f774d | ||
|
|
b8369defd6 | ||
|
|
7daa1fe786 | ||
|
|
74daa49e6f | ||
|
|
c7b51176a4 | ||
|
|
b50e72ab20 | ||
|
|
b3d4b57ae9 | ||
|
|
1c2605be8e | ||
|
|
7662a67a65 | ||
|
|
e2cb6a8caf | ||
|
|
60dee83481 | ||
|
|
d59e172f53 | ||
|
|
b22aa3e99e | ||
|
|
7275db8d66 | ||
|
|
938725c86c | ||
|
|
a3723db357 | ||
|
|
682b06be5a | ||
|
|
aa87a99441 | ||
|
|
a157a3598f | ||
|
|
36466b9694 | ||
|
|
1dec79b62d | ||
|
|
9ec958f839 | ||
|
|
98b25f619e | ||
|
|
908291dd3c | ||
|
|
764511f33f | ||
|
|
b0c3c2b16a | ||
|
|
dcd79cdb98 | ||
|
|
e1fb6542c3 | ||
|
|
9cc3579189 | ||
|
|
c8594edc0b | ||
|
|
a1b0fa252a | ||
|
|
e640bde42c | ||
|
|
d1fcb96ee2 | ||
|
|
b6f032bf88 | ||
|
|
8e104417a6 | ||
|
|
148e4ef10c | ||
|
|
3e8eb7f23e | ||
|
|
0eff06318f | ||
|
|
ad621a7da2 | ||
|
|
96a7fe0035 | ||
|
|
545119fb56 | ||
|
|
f40ee7ca15 | ||
|
|
c6e61395f5 | ||
|
|
d3c51ce75d | ||
|
|
9a16e9f787 | ||
|
|
d79b96b6cc | ||
|
|
b667162496 | ||
|
|
72cab99cd7 | ||
|
|
474252dbff | ||
|
|
45e9bdcf86 | ||
|
|
f52b23f298 | ||
|
|
0b3d9b026d | ||
|
|
1dbadca78e | ||
|
|
13861dee85 | ||
|
|
4ea757c91a | ||
|
|
7e74f823f4 | ||
|
|
e611a67be7 | ||
|
|
a98dfc978e | ||
|
|
642e6f792c | ||
|
|
b857fb5074 | ||
|
|
6f9862c72e | ||
|
|
1033c4d251 | ||
|
|
ab6430e50d | ||
|
|
8ceb505977 | ||
|
|
d9ca825935 | ||
|
|
cb5c16918c | ||
|
|
2a353a809b | ||
|
|
0b364b2918 | ||
|
|
4219089166 | ||
|
|
8db88e979e | ||
|
|
3077cb9802 | ||
|
|
70f7bd957d | ||
|
|
f951aa24bf | ||
|
|
225c3d6a39 | ||
|
|
8f8c4e6b7b | ||
|
|
e4c708f458 | ||
|
|
983d377a93 | ||
|
|
efcc1bf262 | ||
|
|
106d90aeb3 | ||
|
|
b695f583dd | ||
|
|
6d577feeba | ||
|
|
f8edb6b4f6 | ||
|
|
66b9a65a14 | ||
|
|
5632f72bb1 | ||
|
|
4d448510b4 | ||
|
|
f44faf4e61 | ||
|
|
3bd976bda9 | ||
|
|
c8d471466c | ||
|
|
fa1ef87f20 | ||
|
|
f09120a973 | ||
|
|
6223a78ff4 | ||
|
|
d05ffd24f4 | ||
|
|
ee1ed2aaa7 | ||
|
|
a9d7fea86f | ||
|
|
204af9ec91 | ||
|
|
9da636e294 | ||
|
|
6a47899dbb | ||
|
|
a8159273eb | ||
|
|
1816280a15 | ||
|
|
b9d3beaa7f | ||
|
|
cfc4c3a1a7 | ||
|
|
e08e88d940 | ||
|
|
1937c0cc08 | ||
|
|
19f855717d | ||
|
|
82f6799c34 | ||
|
|
d35732c4b7 | ||
|
|
793764283d | ||
|
|
fc8d2300af | ||
|
|
846d2a0cbd | ||
|
|
7706ef415a | ||
|
|
b2341408b9 | ||
|
|
a5e54c3858 | ||
|
|
7c96677510 | ||
|
|
3d03c473d6 | ||
|
|
5cb580be7a | ||
|
|
6900ded80b | ||
|
|
81dd54504e | ||
|
|
1c0ac75782 | ||
|
|
d82f68c88e | ||
|
|
a345675a97 | ||
|
|
d95373f2b7 | ||
|
|
6abc981a5e | ||
|
|
0ac38ef5fc | ||
|
|
ca1e597a4d | ||
|
|
7a3382dd76 | ||
|
|
b7c729c96f | ||
|
|
1be1fad610 | ||
|
|
307c9eafb3 | ||
|
|
7d5b198ce6 | ||
|
|
72c6ff095f | ||
|
|
60751dfa16 | ||
|
|
2545f14a93 | ||
|
|
20231041e6 | ||
|
|
a0afe0bdb7 | ||
|
|
e9a51c0bcc | ||
|
|
1321895e83 | ||
|
|
d58b4807d9 | ||
|
|
dae1f8dc7f | ||
|
|
33c3a6d89b | ||
|
|
a756b4400a | ||
|
|
3f35ea66fb | ||
|
|
4958a428ec | ||
|
|
ad20d6c93f | ||
|
|
77da8c6994 | ||
|
|
a2d5d9e320 | ||
|
|
4033e0f218 | ||
|
|
1a4eab6f25 | ||
|
|
7f89eb324a | ||
|
|
70b90dbb24 | ||
|
|
a1934823ba | ||
|
|
de0967a587 | ||
|
|
3ae6ea9cdc | ||
|
|
44df90686a | ||
|
|
50d3b69629 | ||
|
|
bbcf12b4fc | ||
|
|
f66485ff12 | ||
|
|
a98fe1955a | ||
|
|
d31851820a | ||
|
|
3398452197 | ||
|
|
487bf50544 | ||
|
|
997200dd19 | ||
|
|
aa048049ea | ||
|
|
d7949e3db9 | ||
|
|
838a3822a5 | ||
|
|
5ccee4aea2 | ||
|
|
ae38c11211 | ||
|
|
3b2ff25f3c | ||
|
|
4c36183343 | ||
|
|
d88ba5873b | ||
|
|
8b32b61072 | ||
|
|
14f2803ea7 | ||
|
|
8153d60a5f | ||
|
|
e35f1afd57 | ||
|
|
bfb00aecbe | ||
|
|
06fa2a9e26 | ||
|
|
2cd9a05329 | ||
|
|
058cfc55a6 | ||
|
|
4be9008821 | ||
|
|
e5436a0147 | ||
|
|
6e4c3b9a51 | ||
|
|
13cadbc779 | ||
|
|
9687c09108 | ||
|
|
4376365931 | ||
|
|
6c59111e6c | ||
|
|
d72f8eaf2c | ||
|
|
a0ce1350ec | ||
|
|
fc2a2e538f | ||
|
|
ff78156945 | ||
|
|
32ad6cdea6 | ||
|
|
566913fcec | ||
|
|
d39d4d9a9e | ||
|
|
daeba1a898 | ||
|
|
da3e521566 | ||
|
|
d27afe9691 | ||
|
|
ee111be261 | ||
|
|
b3f15a1e61 | ||
|
|
ccd39a60db | ||
|
|
86d289c375 | ||
|
|
f2c86cc962 | ||
|
|
97837a05e6 | ||
|
|
1a270bfc8b | ||
|
|
f7a51fe658 | ||
|
|
920eda9999 | ||
|
|
957ab9bf7c | ||
|
|
05ae6df80b | ||
|
|
8d6a6198dc | ||
|
|
318274c70f | ||
|
|
adfd8ed26b |
@@ -1,2 +1 @@
|
||||
.git
|
||||
awx/ui/node_modules
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -33,6 +33,7 @@ awx/ui_next/src/locales/
|
||||
awx/ui_next/coverage/
|
||||
awx/ui_next/build
|
||||
awx/ui_next/.env.local
|
||||
awx/ui_next/instrumented
|
||||
rsyslog.pid
|
||||
tools/prometheus/data
|
||||
tools/docker-compose/Dockerfile
|
||||
@@ -146,3 +147,5 @@ use_dev_supervisor.txt
|
||||
*.unison.tmp
|
||||
*.#
|
||||
/tools/docker-compose/overrides/
|
||||
/awx/ui_next/.ui-built
|
||||
/Dockerfile
|
||||
|
||||
32
CHANGELOG.md
32
CHANGELOG.md
@@ -2,6 +2,38 @@
|
||||
|
||||
This is a list of high-level changes for each release of AWX. A full list of commits can be found at `https://github.com/ansible/awx/releases/tag/<version>`.
|
||||
|
||||
# 17.1.0 (March 9th, 2021)
|
||||
- Addressed a security issue in AWX (CVE-2021-20253)
|
||||
- Fixed a bug permissions error related to redis in K8S-based deployments: https://github.com/ansible/awx/issues/9401
|
||||
|
||||
# 17.0.1 (January 26, 2021)
|
||||
- Fixed pgdocker directory permissions issue with Local Docker installer: https://github.com/ansible/awx/pull/9152
|
||||
- Fixed a bug in the UI which caused toggle settings to not be changed when clicked: https://github.com/ansible/awx/pull/9093
|
||||
|
||||
# 17.0.0 (January 22, 2021)
|
||||
- AWX now requires PostgreSQL 12 by default: https://github.com/ansible/awx/pull/8943
|
||||
**Note:** users who encounter permissions errors at upgrade time should `chown -R ~/.awx/pgdocker` to ensure it's owned by the user running the install playbook
|
||||
- Added support for region name for OpenStack inventory: https://github.com/ansible/awx/issues/5080
|
||||
- Added the ability to chain undefined attributes in custom notification templates: https://github.com/ansible/awx/issues/8677
|
||||
- Dramatically simplified the `image_build` role: https://github.com/ansible/awx/pull/8980
|
||||
- Fixed a bug which can cause schema migrations to fail at install time: https://github.com/ansible/awx/issues/9077
|
||||
- Fixed a bug which caused the `is_superuser` user property to be out of date in certain circumstances: https://github.com/ansible/awx/pull/8833
|
||||
- Fixed a bug which sometimes results in race conditions on setting access: https://github.com/ansible/awx/pull/8580
|
||||
- Fixed a bug which sometimes causes an unexpected delay in stdout for some playbooks: https://github.com/ansible/awx/issues/9085
|
||||
- (UI) Added support for credential password prompting on job launch: https://github.com/ansible/awx/pull/9028
|
||||
- (UI) Added the ability to configure LDAP settings in the UI: https://github.com/ansible/awx/issues/8291
|
||||
- (UI) Added a sync button to the Project detail view: https://github.com/ansible/awx/issues/8847
|
||||
- (UI) Added a form for configuring Google Outh 2.0 settings: https://github.com/ansible/awx/pull/8762
|
||||
- (UI) Added searchable keys and related keys to the Credentials list: https://github.com/ansible/awx/issues/8603
|
||||
- (UI) Added support for advanced search and copying to Notification Templates: https://github.com/ansible/awx/issues/7879
|
||||
- (UI) Added support for prompting on workflow nodes: https://github.com/ansible/awx/issues/5913
|
||||
- (UI) Added support for session timeouts: https://github.com/ansible/awx/pull/8250
|
||||
- (UI) Fixed a bug that broke websocket streaming for the insecure ws:// protocol: https://github.com/ansible/awx/pull/8877
|
||||
- (UI) Fixed a bug in the user interface when a translation for the browser's preferred locale isn't available: https://github.com/ansible/awx/issues/8884
|
||||
- (UI) Fixed bug where navigating from one survey question form directly to another wasn't reloading the form: https://github.com/ansible/awx/issues/7522
|
||||
- (UI) Fixed a bug which can cause an uncaught error while launching a Job Template: https://github.com/ansible/awx/issues/8936
|
||||
- Updated autobahn to address CVE-2020-35678
|
||||
|
||||
## 16.0.0 (December 10, 2020)
|
||||
- AWX now ships with a reimagined user interface. **Please read this before upgrading:** https://groups.google.com/g/awx-project/c/KuT5Ao92HWo
|
||||
- Removed support for syncing inventory from Red Hat CloudForms - https://github.com/ansible/awx/commit/0b701b3b2
|
||||
|
||||
@@ -85,7 +85,7 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
|
||||
|
||||
#### Frontend Development
|
||||
|
||||
See [the ui development documentation](awx/ui/README.md).
|
||||
See [the ui development documentation](awx/ui_next/CONTRIBUTING.md).
|
||||
|
||||
|
||||
### Build the environment
|
||||
@@ -158,7 +158,7 @@ $ docker ps
|
||||
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
|
||||
44251b476f98 gcr.io/ansible-tower-engineering/awx_devel:devel "/entrypoint.sh /bin…" 27 seconds ago Up 23 seconds 0.0.0.0:6899->6899/tcp, 0.0.0.0:7899-7999->7899-7999/tcp, 0.0.0.0:8013->8013/tcp, 0.0.0.0:8043->8043/tcp, 0.0.0.0:8080->8080/tcp, 22/tcp, 0.0.0.0:8888->8888/tcp tools_awx_run_9e820694d57e
|
||||
40de380e3c2e redis:latest "docker-entrypoint.s…" 28 seconds ago Up 26 seconds
|
||||
b66a506d3007 postgres:10 "docker-entrypoint.s…" 28 seconds ago Up 26 seconds 0.0.0.0:5432->5432/tcp tools_postgres_1
|
||||
b66a506d3007 postgres:12 "docker-entrypoint.s…" 28 seconds ago Up 26 seconds 0.0.0.0:5432->5432/tcp tools_postgres_1
|
||||
```
|
||||
**NOTE**
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ Please note that deploying from `HEAD` (or the latest commit) is **not** stable,
|
||||
|
||||
For more on how to clone the repo, view [git clone help](https://git-scm.com/docs/git-clone).
|
||||
|
||||
Once you have a local copy, run commands within the root of the project tree.
|
||||
Once you have a local copy, run the commands in the following sections from the root of the project tree.
|
||||
|
||||
### AWX branding
|
||||
|
||||
@@ -497,7 +497,7 @@ Before starting the install process, review the [inventory](./installer/inventor
|
||||
|
||||
*docker_compose_dir*
|
||||
|
||||
> When using docker-compose, the `docker-compose.yml` file will be created there (default `/tmp/awxcompose`).
|
||||
> When using docker-compose, the `docker-compose.yml` file will be created there (default `~/.awx/awxcompose`).
|
||||
|
||||
*custom_venv_dir*
|
||||
|
||||
|
||||
37
Makefile
37
Makefile
@@ -19,7 +19,8 @@ PYCURL_SSL_LIBRARY ?= openssl
|
||||
COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
COMPOSE_HOST ?= $(shell hostname)
|
||||
|
||||
VENV_BASE ?= /venv
|
||||
VENV_BASE ?= /var/lib/awx/venv/
|
||||
COLLECTION_BASE ?= /var/lib/awx/vendor/awx_ansible_collections
|
||||
SCL_PREFIX ?=
|
||||
CELERY_SCHEDULE_FILE ?= /var/lib/awx/beat.db
|
||||
|
||||
@@ -270,7 +271,7 @@ uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1="exec:supervisorctl restart tower-processes:awx-dispatcher tower-processes:awx-receiver"
|
||||
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/var/lib/awx/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1="exec:supervisorctl restart tower-processes:awx-dispatcher tower-processes:awx-receiver"
|
||||
|
||||
daphne:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -340,7 +341,7 @@ check: flake8 pep8 # pyflakes pylint
|
||||
|
||||
awx-link:
|
||||
[ -d "/awx_devel/awx.egg-info" ] || python3 /awx_devel/setup.py egg_info_dev
|
||||
cp -f /tmp/awx.egg-link /venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.egg-link
|
||||
cp -f /tmp/awx.egg-link /var/lib/awx/venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.egg-link
|
||||
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||
|
||||
@@ -462,19 +463,24 @@ endif
|
||||
|
||||
# UI TASKS
|
||||
# --------------------------------------
|
||||
awx/ui_next/node_modules:
|
||||
$(NPM_BIN) --prefix awx/ui_next install
|
||||
|
||||
UI_BUILD_FLAG_FILE = awx/ui_next/.ui-built
|
||||
|
||||
clean-ui:
|
||||
rm -rf node_modules
|
||||
rm -rf awx/ui_next/node_modules
|
||||
rm -rf awx/ui_next/build
|
||||
rm -rf awx/ui_next/src/locales/_build
|
||||
rm -rf $(UI_BUILD_FLAG_FILE)
|
||||
git checkout awx/ui_next/src/locales
|
||||
|
||||
ui-release: ui-devel
|
||||
ui-devel: awx/ui_next/node_modules
|
||||
$(NPM_BIN) --prefix awx/ui_next run extract-strings
|
||||
$(NPM_BIN) --prefix awx/ui_next run compile-strings
|
||||
$(NPM_BIN) --prefix awx/ui_next run build
|
||||
awx/ui_next/node_modules:
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn --ignore-scripts install
|
||||
|
||||
$(UI_BUILD_FLAG_FILE):
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-strings
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run compile-strings
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run build
|
||||
git checkout awx/ui_next/src/locales
|
||||
mkdir -p awx/public/static/css
|
||||
mkdir -p awx/public/static/js
|
||||
@@ -482,6 +488,12 @@ ui-devel: awx/ui_next/node_modules
|
||||
cp -r awx/ui_next/build/static/css/* awx/public/static/css
|
||||
cp -r awx/ui_next/build/static/js/* awx/public/static/js
|
||||
cp -r awx/ui_next/build/static/media/* awx/public/static/media
|
||||
touch $@
|
||||
|
||||
ui-release: awx/ui_next/node_modules $(UI_BUILD_FLAG_FILE)
|
||||
|
||||
ui-devel: awx/ui_next/node_modules
|
||||
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
|
||||
|
||||
ui-zuul-lint-and-test:
|
||||
$(NPM_BIN) --prefix awx/ui_next install
|
||||
@@ -607,7 +619,10 @@ clean-elk:
|
||||
docker rm tools_kibana_1
|
||||
|
||||
psql-container:
|
||||
docker run -it --net tools_default --rm postgres:10 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
docker run -it --net tools_default --rm postgres:12 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
|
||||
VERSION:
|
||||
@echo "awx: $(VERSION)"
|
||||
|
||||
Dockerfile: installer/roles/image_build/templates/Dockerfile.j2
|
||||
ansible localhost -m template -a "src=installer/roles/image_build/templates/Dockerfile.j2 dest=Dockerfile"
|
||||
|
||||
15
README.md
15
README.md
@@ -1,7 +1,5 @@
|
||||
[](https://ansible.softwarefactory-project.io/zuul/status)
|
||||
|
||||
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
|
||||
|
||||
AWX provides a web-based user interface, REST API, and task engine built on top of [Ansible](https://github.com/ansible/ansible). It is the upstream project for [Tower](https://www.ansible.com/tower), a commercial derivative of AWX.
|
||||
|
||||
To install AWX, please view the [Install guide](./INSTALL.md).
|
||||
@@ -16,20 +14,20 @@ Contributing
|
||||
------------
|
||||
|
||||
- Refer to the [Contributing guide](./CONTRIBUTING.md) to get started developing, testing, and building AWX.
|
||||
- All code submissions are done through pull requests against the `devel` branch.
|
||||
- All contributors must use git commit --signoff for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md)
|
||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.freenode.net, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
||||
- All code submissions are made through pull requests against the `devel` branch.
|
||||
- All contributors must use git commit --signoff for any commit to be merged and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md)
|
||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs. `git merge` for this reason.
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.freenode.net and talk about what you would like to do or add first. This not only helps everyone know what's going on, but it also helps save time and effort if the community decides some changes are needed.
|
||||
|
||||
Reporting Issues
|
||||
----------------
|
||||
|
||||
If you're experiencing a problem that you feel is a bug in AWX, or have ideas for how to improve AWX, we encourage you to open an issue, and share your feedback. But before opening a new issue, we ask that you please take a look at our [Issues guide](./ISSUES.md).
|
||||
If you're experiencing a problem that you feel is a bug in AWX or have ideas for improving AWX, we encourage you to open an issue and share your feedback. But before opening a new issue, we ask that you please take a look at our [Issues guide](./ISSUES.md).
|
||||
|
||||
Code of Conduct
|
||||
---------------
|
||||
|
||||
We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
|
||||
Get Involved
|
||||
------------
|
||||
@@ -43,4 +41,3 @@ License
|
||||
-------
|
||||
|
||||
[Apache v2](./LICENSE.md)
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
from awx.main.models import (
|
||||
ActivityStream,
|
||||
Inventory,
|
||||
Host,
|
||||
Project,
|
||||
JobTemplate,
|
||||
WorkflowJobTemplate,
|
||||
@@ -98,6 +99,7 @@ class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPI
|
||||
organization__id=org_id).count()
|
||||
org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['hosts'] = Host.objects.org_active_count(org_id)
|
||||
|
||||
full_context['related_field_counts'] = {}
|
||||
full_context['related_field_counts'][org_id] = org_counts
|
||||
|
||||
@@ -4,6 +4,7 @@ import logging
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import os
|
||||
|
||||
# Django
|
||||
from django.conf import LazySettings
|
||||
@@ -247,6 +248,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
# These values have to be stored via self.__dict__ in this way to get
|
||||
# around the magic __setattr__ method on this class (which is used to
|
||||
# store API-assigned settings in the database).
|
||||
self.__dict__['__forks__'] = {}
|
||||
self.__dict__['default_settings'] = default_settings
|
||||
self.__dict__['_awx_conf_settings'] = self
|
||||
self.__dict__['_awx_conf_preload_expires'] = None
|
||||
@@ -255,6 +257,26 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
self.__dict__['cache'] = EncryptedCacheProxy(cache, registry)
|
||||
self.__dict__['registry'] = registry
|
||||
|
||||
# record the current pid so we compare it post-fork for
|
||||
# processes like the dispatcher and callback receiver
|
||||
self.__dict__['pid'] = os.getpid()
|
||||
|
||||
def __clean_on_fork__(self):
|
||||
pid = os.getpid()
|
||||
# if the current pid does *not* match the value on self, it means
|
||||
# that value was copied on fork, and we're now in a *forked* process;
|
||||
# the *first* time we enter this code path (on setting access),
|
||||
# forcibly close DB/cache sockets and set a marker so we don't run
|
||||
# this code again _in this process_
|
||||
#
|
||||
if pid != self.__dict__['pid'] and pid not in self.__dict__['__forks__']:
|
||||
self.__dict__['__forks__'][pid] = True
|
||||
# It's important to close these post-fork, because we
|
||||
# don't want the forked processes to inherit the open sockets
|
||||
# for the DB and cache connections (that way lies race conditions)
|
||||
connection.close()
|
||||
django_cache.close()
|
||||
|
||||
@cached_property
|
||||
def all_supported_settings(self):
|
||||
return self.registry.get_registered_settings()
|
||||
@@ -330,6 +352,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
self.cache.set_many(settings_to_cache, timeout=SETTING_CACHE_TIMEOUT)
|
||||
|
||||
def _get_local(self, name, validate=True):
|
||||
self.__clean_on_fork__()
|
||||
self._preload_cache()
|
||||
cache_key = Setting.get_cache_key(name)
|
||||
try:
|
||||
|
||||
@@ -3354,6 +3354,15 @@ msgid ""
|
||||
"common scenarios."
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/models/credential/__init__.py:824
|
||||
msgid "Region Name"
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/models/credential/__init__.py:826
|
||||
msgid ""
|
||||
"For some cloud providers, like OVH, region must be specified."
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/models/credential/__init__.py:824
|
||||
#: awx/main/models/credential/__init__.py:1131
|
||||
#: awx/main/models/credential/__init__.py:1166
|
||||
|
||||
@@ -3354,6 +3354,15 @@ msgid ""
|
||||
"common scenarios."
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/models/credential/__init__.py:824
|
||||
msgid "Region Name"
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/models/credential/__init__.py:826
|
||||
msgid ""
|
||||
"For some cloud providers, like OVH, region must be specified."
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/models/credential/__init__.py:824
|
||||
#: awx/main/models/credential/__init__.py:1131
|
||||
#: awx/main/models/credential/__init__.py:1166
|
||||
|
||||
@@ -3294,6 +3294,16 @@ msgid ""
|
||||
"common scenarios."
|
||||
msgstr "Les domaines OpenStack définissent les limites administratives. Ils sont nécessaires uniquement pour les URL d’authentification Keystone v3. Voir la documentation Ansible Tower pour les scénarios courants."
|
||||
|
||||
#: awx/main/models/credential/__init__.py:824
|
||||
msgid "Region Name"
|
||||
msgstr "Nom de la region"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:826
|
||||
msgid ""
|
||||
"For some cloud providers, like OVH, region must be specified."
|
||||
msgstr ""
|
||||
"Chez certains fournisseurs, comme OVH, vous devez spécifier le nom de la région"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:812
|
||||
#: awx/main/models/credential/__init__.py:1110
|
||||
#: awx/main/models/credential/__init__.py:1144
|
||||
|
||||
@@ -75,7 +75,7 @@ class WebsocketSecretAuthHelper:
|
||||
nonce_diff = now - nonce_parsed
|
||||
if abs(nonce_diff) > nonce_tolerance:
|
||||
logger.warn(f"Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.")
|
||||
raise ValueError("Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.")
|
||||
raise ValueError(f"Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
|
||||
MAX_RETRIES = 2
|
||||
last_stats = time.time()
|
||||
last_flush = time.time()
|
||||
total = 0
|
||||
last_event = ''
|
||||
prof = None
|
||||
@@ -52,7 +53,7 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
|
||||
def read(self, queue):
|
||||
try:
|
||||
res = self.redis.blpop(settings.CALLBACK_QUEUE, timeout=settings.JOB_EVENT_BUFFER_SECONDS)
|
||||
res = self.redis.blpop(settings.CALLBACK_QUEUE, timeout=1)
|
||||
if res is None:
|
||||
return {'event': 'FLUSH'}
|
||||
self.total += 1
|
||||
@@ -102,6 +103,7 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
now = tz_now()
|
||||
if (
|
||||
force or
|
||||
(time.time() - self.last_flush) > settings.JOB_EVENT_BUFFER_SECONDS or
|
||||
any([len(events) >= 1000 for events in self.buff.values()])
|
||||
):
|
||||
for cls, events in self.buff.items():
|
||||
@@ -124,6 +126,7 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
for e in events:
|
||||
emit_event_detail(e)
|
||||
self.buff = {}
|
||||
self.last_flush = time.time()
|
||||
|
||||
def perform_work(self, body):
|
||||
try:
|
||||
|
||||
@@ -7,6 +7,7 @@ import tempfile
|
||||
import time
|
||||
import logging
|
||||
import yaml
|
||||
import datetime
|
||||
|
||||
from django.conf import settings
|
||||
import ansible_runner
|
||||
@@ -123,6 +124,7 @@ class IsolatedManager(object):
|
||||
dir=private_data_dir
|
||||
)
|
||||
params = self.runner_params.copy()
|
||||
params.get('envvars', dict())['ANSIBLE_CALLBACK_WHITELIST'] = 'profile_tasks'
|
||||
params['playbook'] = playbook
|
||||
params['private_data_dir'] = iso_dir
|
||||
if idle_timeout:
|
||||
@@ -167,8 +169,9 @@ class IsolatedManager(object):
|
||||
|
||||
extravars = {
|
||||
'src': self.private_data_dir,
|
||||
'dest': settings.AWX_PROOT_BASE_PATH,
|
||||
'ident': self.ident
|
||||
'dest': os.path.split(self.private_data_dir)[0],
|
||||
'ident': self.ident,
|
||||
'job_id': self.instance.id,
|
||||
}
|
||||
if playbook:
|
||||
extravars['playbook'] = playbook
|
||||
@@ -204,7 +207,10 @@ class IsolatedManager(object):
|
||||
:param interval: an interval (in seconds) to wait between status polls
|
||||
"""
|
||||
interval = interval if interval is not None else settings.AWX_ISOLATED_CHECK_INTERVAL
|
||||
extravars = {'src': self.private_data_dir}
|
||||
extravars = {
|
||||
'src': self.private_data_dir,
|
||||
'job_id': self.instance.id
|
||||
}
|
||||
status = 'failed'
|
||||
rc = None
|
||||
last_check = time.time()
|
||||
@@ -220,9 +226,13 @@ class IsolatedManager(object):
|
||||
logger.warning('Isolated job {} was manually canceled.'.format(self.instance.id))
|
||||
|
||||
logger.debug('Checking on isolated job {} with `check_isolated.yml`.'.format(self.instance.id))
|
||||
time_start = datetime.datetime.now()
|
||||
runner_obj = self.run_management_playbook('check_isolated.yml',
|
||||
self.private_data_dir,
|
||||
extravars=extravars)
|
||||
time_end = datetime.datetime.now()
|
||||
time_diff = time_end - time_start
|
||||
logger.debug('Finished checking on isolated job {} with `check_isolated.yml` took {} seconds.'.format(self.instance.id, time_diff.total_seconds()))
|
||||
status, rc = runner_obj.status, runner_obj.rc
|
||||
|
||||
if self.check_callback is not None and not self.captured_command_artifact:
|
||||
|
||||
@@ -21,7 +21,7 @@ from awx.main.signals import (
|
||||
disable_computed_fields
|
||||
)
|
||||
|
||||
from awx.main.management.commands.deletion import AWXCollector, pre_delete
|
||||
from awx.main.utils.deletion import AWXCollector, pre_delete
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
@@ -133,7 +133,7 @@ class AnsibleInventoryLoader(object):
|
||||
# NOTE: why do we add "python" to the start of these args?
|
||||
# the script that runs ansible-inventory specifies a python interpreter
|
||||
# that makes no sense in light of the fact that we put all the dependencies
|
||||
# inside of /venv/ansible, so we override the specified interpreter
|
||||
# inside of /var/lib/awx/venv/ansible, so we override the specified interpreter
|
||||
# https://github.com/ansible/ansible/issues/50714
|
||||
bargs = ['python', ansible_inventory_path, '-i', self.source]
|
||||
bargs.extend(['--playbook-dir', functioning_dir(self.source)])
|
||||
|
||||
@@ -81,10 +81,17 @@ User.add_to_class('accessible_objects', user_accessible_objects)
|
||||
|
||||
|
||||
def enforce_bigint_pk_migration():
|
||||
#
|
||||
# NOTE: this function is not actually in use anymore,
|
||||
# but has been intentionally kept for historical purposes,
|
||||
# and to serve as an illustration if we ever need to perform
|
||||
# bulk modification/migration of event data in the future.
|
||||
#
|
||||
# see: https://github.com/ansible/awx/issues/6010
|
||||
# look at all the event tables and verify that they have been fully migrated
|
||||
# from the *old* int primary key table to the replacement bigint table
|
||||
# if not, attempt to migrate them in the background
|
||||
#
|
||||
for tblname in (
|
||||
'main_jobevent', 'main_inventoryupdateevent',
|
||||
'main_projectupdateevent', 'main_adhoccommandevent',
|
||||
|
||||
@@ -819,6 +819,11 @@ ManagedCredentialType(
|
||||
'It is only needed for Keystone v3 authentication '
|
||||
'URLs. Refer to Ansible Tower documentation for '
|
||||
'common scenarios.')
|
||||
}, {
|
||||
'id': 'region',
|
||||
'label': ugettext_noop('Region Name'),
|
||||
'type': 'string',
|
||||
'help_text': ugettext_noop('For some cloud providers, like OVH, region must be specified'),
|
||||
}, {
|
||||
'id': 'verify_ssl',
|
||||
'label': ugettext_noop('Verify SSL'),
|
||||
|
||||
@@ -82,6 +82,7 @@ def _openstack_data(cred):
|
||||
if cred.has_input('domain'):
|
||||
openstack_auth['domain_name'] = cred.get_input('domain', default='')
|
||||
verify_state = cred.get_input('verify_ssl', default=True)
|
||||
|
||||
openstack_data = {
|
||||
'clouds': {
|
||||
'devstack': {
|
||||
@@ -90,6 +91,10 @@ def _openstack_data(cred):
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
if cred.has_input('project_region_name'):
|
||||
openstack_data['clouds']['devstack']['region_name'] = cred.get_input('project_region_name', default='')
|
||||
|
||||
return openstack_data
|
||||
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ from django.core.mail.message import EmailMessage
|
||||
from django.db import connection
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.encoding import smart_str, force_text
|
||||
from jinja2 import sandbox
|
||||
from jinja2 import sandbox, ChainableUndefined
|
||||
from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
|
||||
|
||||
# AWX
|
||||
@@ -357,7 +357,7 @@ class JobNotificationMixin(object):
|
||||
'url': 'https://towerhost/#/jobs/playbook/1010',
|
||||
'approval_status': 'approved',
|
||||
'approval_node_name': 'Approve Me',
|
||||
'workflow_url': 'https://towerhost/#/workflows/1010',
|
||||
'workflow_url': 'https://towerhost/#/jobs/workflow/1010',
|
||||
'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
|
||||
'traceback': '',
|
||||
'status': 'running',
|
||||
@@ -429,7 +429,7 @@ class JobNotificationMixin(object):
|
||||
raise RuntimeError("Define me")
|
||||
|
||||
def build_notification_message(self, nt, status):
|
||||
env = sandbox.ImmutableSandboxedEnvironment()
|
||||
env = sandbox.ImmutableSandboxedEnvironment(undefined=ChainableUndefined)
|
||||
|
||||
from awx.api.serializers import UnifiedJobSerializer
|
||||
job_serialization = UnifiedJobSerializer(self).to_representation(self)
|
||||
|
||||
@@ -620,7 +620,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
|
||||
return reverse('api:workflow_job_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
def get_ui_url(self):
|
||||
return urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.pk))
|
||||
return urljoin(settings.TOWER_URL_BASE, '/#/jobs/workflow/{}'.format(self.pk))
|
||||
|
||||
def notification_data(self):
|
||||
result = super(WorkflowJob, self).notification_data()
|
||||
@@ -752,7 +752,7 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
|
||||
return None
|
||||
|
||||
def get_ui_url(self):
|
||||
return urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.workflow_job.id))
|
||||
return urljoin(settings.TOWER_URL_BASE, '/#/jobs/workflow/{}'.format(self.workflow_job.id))
|
||||
|
||||
def _get_parent_field_name(self):
|
||||
return 'workflow_approval_template'
|
||||
@@ -840,7 +840,7 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
|
||||
return (msg, body)
|
||||
|
||||
def context(self, approval_status):
|
||||
workflow_url = urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.workflow_job.id))
|
||||
workflow_url = urljoin(settings.TOWER_URL_BASE, '/#/jobs/workflow/{}'.format(self.workflow_job.id))
|
||||
return {'approval_status': approval_status,
|
||||
'approval_node_name': self.workflow_approval_template.name,
|
||||
'workflow_url': workflow_url,
|
||||
|
||||
@@ -121,6 +121,27 @@ def sync_superuser_status_to_rbac(instance, **kwargs):
|
||||
Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR).members.remove(instance)
|
||||
|
||||
|
||||
def sync_rbac_to_superuser_status(instance, sender, **kwargs):
|
||||
'When the is_superuser flag is false but a user has the System Admin role, update the database to reflect that'
|
||||
if kwargs['action'] in ['post_add', 'post_remove', 'post_clear']:
|
||||
new_status_value = bool(kwargs['action'] == 'post_add')
|
||||
if hasattr(instance, 'singleton_name'): # duck typing, role.members.add() vs user.roles.add()
|
||||
role = instance
|
||||
if role.singleton_name == ROLE_SINGLETON_SYSTEM_ADMINISTRATOR:
|
||||
if kwargs['pk_set']:
|
||||
kwargs['model'].objects.filter(pk__in=kwargs['pk_set']).update(is_superuser=new_status_value)
|
||||
elif kwargs['action'] == 'post_clear':
|
||||
kwargs['model'].objects.all().update(is_superuser=False)
|
||||
else:
|
||||
user = instance
|
||||
if kwargs['action'] == 'post_clear':
|
||||
user.is_superuser = False
|
||||
user.save(update_fields=['is_superuser'])
|
||||
elif kwargs['model'].objects.filter(pk__in=kwargs['pk_set'], singleton_name=ROLE_SINGLETON_SYSTEM_ADMINISTRATOR).exists():
|
||||
user.is_superuser = new_status_value
|
||||
user.save(update_fields=['is_superuser'])
|
||||
|
||||
|
||||
def rbac_activity_stream(instance, sender, **kwargs):
|
||||
# Only if we are associating/disassociating
|
||||
if kwargs['action'] in ['pre_add', 'pre_remove']:
|
||||
@@ -197,6 +218,7 @@ m2m_changed.connect(rebuild_role_ancestor_list, Role.parents.through)
|
||||
m2m_changed.connect(rbac_activity_stream, Role.members.through)
|
||||
m2m_changed.connect(rbac_activity_stream, Role.parents.through)
|
||||
post_save.connect(sync_superuser_status_to_rbac, sender=User)
|
||||
m2m_changed.connect(sync_rbac_to_superuser_status, Role.members.through)
|
||||
pre_delete.connect(cleanup_detached_labels_on_deleted_parent, sender=UnifiedJob)
|
||||
pre_delete.connect(cleanup_detached_labels_on_deleted_parent, sender=UnifiedJobTemplate)
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ from awx.main.models import (
|
||||
Inventory, InventorySource, SmartInventoryMembership,
|
||||
Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob,
|
||||
JobEvent, ProjectUpdateEvent, InventoryUpdateEvent, AdHocCommandEvent, SystemJobEvent,
|
||||
build_safe_env, enforce_bigint_pk_migration
|
||||
build_safe_env
|
||||
)
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.exceptions import AwxTaskError, PostRunError
|
||||
@@ -138,12 +138,6 @@ def dispatch_startup():
|
||||
if Instance.objects.me().is_controller():
|
||||
awx_isolated_heartbeat()
|
||||
|
||||
# at process startup, detect the need to migrate old event records from int
|
||||
# to bigint; at *some point* in the future, once certain versions of AWX
|
||||
# and Tower fall out of use/support, we can probably just _assume_ that
|
||||
# everybody has moved to bigint, and remove this code entirely
|
||||
enforce_bigint_pk_migration()
|
||||
|
||||
# Update Tower's rsyslog.conf file based on loggins settings in the db
|
||||
reconfigure_rsyslog()
|
||||
|
||||
@@ -378,6 +372,7 @@ def gather_analytics():
|
||||
|
||||
from awx.conf.models import Setting
|
||||
from rest_framework.fields import DateTimeField
|
||||
from awx.main.signals import disable_activity_stream
|
||||
if not settings.INSIGHTS_TRACKING_STATE:
|
||||
return
|
||||
if not (settings.AUTOMATION_ANALYTICS_URL and settings.REDHAT_USERNAME and settings.REDHAT_PASSWORD):
|
||||
@@ -414,7 +409,8 @@ def gather_analytics():
|
||||
if not _gather_and_ship(incremental_collectors, since=start, until=until):
|
||||
break
|
||||
start = until
|
||||
settings.AUTOMATION_ANALYTICS_LAST_GATHER = until
|
||||
with disable_activity_stream():
|
||||
settings.AUTOMATION_ANALYTICS_LAST_GATHER = until
|
||||
if subset:
|
||||
_gather_and_ship(subset, since=since, until=gather_time)
|
||||
|
||||
@@ -736,6 +732,12 @@ def update_host_smart_inventory_memberships():
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
def migrate_legacy_event_data(tblname):
|
||||
#
|
||||
# NOTE: this function is not actually in use anymore,
|
||||
# but has been intentionally kept for historical purposes,
|
||||
# and to serve as an illustration if we ever need to perform
|
||||
# bulk modification/migration of event data in the future.
|
||||
#
|
||||
if 'event' not in tblname:
|
||||
return
|
||||
with advisory_lock(f'bigint_migration_{tblname}', wait=False) as acquired:
|
||||
@@ -891,10 +893,19 @@ class BaseTask(object):
|
||||
'''
|
||||
Create a temporary directory for job-related files.
|
||||
'''
|
||||
path = tempfile.mkdtemp(prefix='awx_%s_' % instance.pk, dir=settings.AWX_PROOT_BASE_PATH)
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
bwrap_path = tempfile.mkdtemp(
|
||||
prefix=f'bwrap_{instance.pk}_',
|
||||
dir=settings.AWX_PROOT_BASE_PATH
|
||||
)
|
||||
os.chmod(bwrap_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
if settings.AWX_CLEANUP_PATHS:
|
||||
self.cleanup_paths.append(path)
|
||||
self.cleanup_paths.append(bwrap_path)
|
||||
|
||||
path = tempfile.mkdtemp(
|
||||
prefix='awx_%s_' % instance.pk,
|
||||
dir=bwrap_path,
|
||||
)
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
runner_project_folder = os.path.join(path, 'project')
|
||||
if not os.path.exists(runner_project_folder):
|
||||
# Ansible Runner requires that this directory exists.
|
||||
@@ -987,14 +998,7 @@ class BaseTask(object):
|
||||
show_paths = self.proot_show_paths + local_paths + \
|
||||
settings.AWX_PROOT_SHOW_PATHS
|
||||
|
||||
pi_path = settings.AWX_PROOT_BASE_PATH
|
||||
if not self.instance.is_isolated() and not self.instance.is_containerized:
|
||||
pi_path = tempfile.mkdtemp(
|
||||
prefix='ansible_runner_pi_',
|
||||
dir=settings.AWX_PROOT_BASE_PATH
|
||||
)
|
||||
os.chmod(pi_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
self.cleanup_paths.append(pi_path)
|
||||
pi_path = os.path.split(private_data_dir)[0]
|
||||
|
||||
process_isolation_params = {
|
||||
'process_isolation': True,
|
||||
@@ -1006,6 +1010,8 @@ class BaseTask(object):
|
||||
'/etc/ssh',
|
||||
'/var/lib/awx',
|
||||
'/var/log',
|
||||
'/home',
|
||||
'/var/tmp',
|
||||
settings.PROJECTS_ROOT,
|
||||
settings.JOBOUTPUT_ROOT,
|
||||
] + getattr(settings, 'AWX_PROOT_HIDE_PATHS', None) or [],
|
||||
|
||||
@@ -2,7 +2,7 @@ import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
from awx.main.models import Project
|
||||
from awx.main.models import Project, Host
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -81,6 +81,8 @@ def test_org_counts_detail_admin(resourced_organization, user, get):
|
||||
assert response.status_code == 200
|
||||
|
||||
counts = response.data['summary_fields']['related_field_counts']
|
||||
assert counts['hosts'] == 0
|
||||
counts.pop('hosts')
|
||||
assert counts == COUNTS_PRIMES
|
||||
|
||||
|
||||
@@ -93,6 +95,8 @@ def test_org_counts_detail_member(resourced_organization, user, get):
|
||||
assert response.status_code == 200
|
||||
|
||||
counts = response.data['summary_fields']['related_field_counts']
|
||||
assert counts['hosts'] == 0
|
||||
counts.pop('hosts')
|
||||
assert counts == {
|
||||
'users': COUNTS_PRIMES['users'], # Policy is that members can see other users and admins
|
||||
'admins': COUNTS_PRIMES['admins'],
|
||||
@@ -111,6 +115,7 @@ def test_org_counts_list_admin(resourced_organization, user, get):
|
||||
assert response.status_code == 200
|
||||
|
||||
counts = response.data['results'][0]['summary_fields']['related_field_counts']
|
||||
assert 'hosts' not in counts # doesn't show in list view
|
||||
assert counts == COUNTS_PRIMES
|
||||
|
||||
|
||||
@@ -123,6 +128,7 @@ def test_org_counts_list_member(resourced_organization, user, get):
|
||||
assert response.status_code == 200
|
||||
|
||||
counts = response.data['results'][0]['summary_fields']['related_field_counts']
|
||||
assert 'hosts' not in counts # doesn't show in list view
|
||||
|
||||
assert counts == {
|
||||
'users': COUNTS_PRIMES['users'], # Policy is that members can see other users and admins
|
||||
@@ -145,6 +151,7 @@ def test_new_org_zero_counts(user, post):
|
||||
|
||||
new_org_list = post_response.render().data
|
||||
counts_dict = new_org_list['summary_fields']['related_field_counts']
|
||||
assert 'hosts' not in counts_dict # doesn't show in list view
|
||||
assert counts_dict == COUNTS_ZEROS
|
||||
|
||||
|
||||
@@ -167,6 +174,19 @@ def test_two_organizations(resourced_organization, organizations, user, get):
|
||||
assert counts[org_id_zero] == COUNTS_ZEROS
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_hosts_counted(resourced_organization, user, get):
|
||||
admin_user = user('admin', True)
|
||||
assert Host.objects.org_active_count(resourced_organization.id) == 0
|
||||
resourced_organization.inventories.first().hosts.create(name='Some Host')
|
||||
assert Host.objects.org_active_count(resourced_organization.id) == 1
|
||||
response = get(reverse('api:organization_detail', kwargs={'pk': resourced_organization.pk}), admin_user)
|
||||
assert response.status_code == 200
|
||||
|
||||
counts = response.data['summary_fields']['related_field_counts']
|
||||
assert counts['hosts'] == Host.objects.org_active_count(resourced_organization.id) == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_scan_JT_counted(resourced_organization, user, get):
|
||||
admin_user = user('admin', True)
|
||||
@@ -180,7 +200,10 @@ def test_scan_JT_counted(resourced_organization, user, get):
|
||||
# Test detail view
|
||||
detail_response = get(reverse('api:organization_detail', kwargs={'pk': resourced_organization.pk}), admin_user)
|
||||
assert detail_response.status_code == 200
|
||||
assert detail_response.data['summary_fields']['related_field_counts'] == counts_dict
|
||||
counts = detail_response.data['summary_fields']['related_field_counts']
|
||||
assert 'hosts' in counts
|
||||
counts.pop('hosts')
|
||||
assert counts == counts_dict
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -205,4 +228,7 @@ def test_JT_not_double_counted(resourced_organization, user, get):
|
||||
# Test detail view
|
||||
detail_response = get(reverse('api:organization_detail', kwargs={'pk': resourced_organization.pk}), admin_user)
|
||||
assert detail_response.status_code == 200
|
||||
assert detail_response.data['summary_fields']['related_field_counts'] == counts_dict
|
||||
counts = detail_response.data['summary_fields']['related_field_counts']
|
||||
assert 'hosts' in counts
|
||||
counts.pop('hosts')
|
||||
assert counts == counts_dict
|
||||
|
||||
@@ -6,7 +6,7 @@ from collections import OrderedDict
|
||||
from django.db.models.deletion import Collector, SET_NULL, CASCADE
|
||||
from django.core.management import call_command
|
||||
|
||||
from awx.main.management.commands.deletion import AWXCollector
|
||||
from awx.main.utils.deletion import AWXCollector
|
||||
from awx.main.models import (
|
||||
JobTemplate, User, Job, JobEvent, Notification,
|
||||
WorkflowJobNode, JobHostSummary
|
||||
|
||||
@@ -16,7 +16,7 @@ def test_awx_virtualenv_from_settings(inventory, project, machine_credential):
|
||||
)
|
||||
jt.credentials.add(machine_credential)
|
||||
job = jt.create_unified_job()
|
||||
assert job.ansible_virtualenv_path == '/venv/ansible'
|
||||
assert job.ansible_virtualenv_path == '/var/lib/awx/venv/ansible'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -43,28 +43,28 @@ def test_awx_custom_virtualenv(inventory, project, machine_credential, organizat
|
||||
jt.credentials.add(machine_credential)
|
||||
job = jt.create_unified_job()
|
||||
|
||||
job.organization.custom_virtualenv = '/venv/fancy-org'
|
||||
job.organization.custom_virtualenv = '/var/lib/awx/venv/fancy-org'
|
||||
job.organization.save()
|
||||
assert job.ansible_virtualenv_path == '/venv/fancy-org'
|
||||
assert job.ansible_virtualenv_path == '/var/lib/awx/venv/fancy-org'
|
||||
|
||||
job.project.custom_virtualenv = '/venv/fancy-proj'
|
||||
job.project.custom_virtualenv = '/var/lib/awx/venv/fancy-proj'
|
||||
job.project.save()
|
||||
assert job.ansible_virtualenv_path == '/venv/fancy-proj'
|
||||
assert job.ansible_virtualenv_path == '/var/lib/awx/venv/fancy-proj'
|
||||
|
||||
job.job_template.custom_virtualenv = '/venv/fancy-jt'
|
||||
job.job_template.custom_virtualenv = '/var/lib/awx/venv/fancy-jt'
|
||||
job.job_template.save()
|
||||
assert job.ansible_virtualenv_path == '/venv/fancy-jt'
|
||||
assert job.ansible_virtualenv_path == '/var/lib/awx/venv/fancy-jt'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_awx_custom_virtualenv_without_jt(project):
|
||||
project.custom_virtualenv = '/venv/fancy-proj'
|
||||
project.custom_virtualenv = '/var/lib/awx/venv/fancy-proj'
|
||||
project.save()
|
||||
job = Job(project=project)
|
||||
job.save()
|
||||
|
||||
job = Job.objects.get(pk=job.id)
|
||||
assert job.ansible_virtualenv_path == '/venv/fancy-proj'
|
||||
assert job.ansible_virtualenv_path == '/var/lib/awx/venv/fancy-proj'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -4,7 +4,7 @@ from unittest import mock
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from awx.main.access import UserAccess, RoleAccess, TeamAccess
|
||||
from awx.main.models import User, Organization, Inventory
|
||||
from awx.main.models import User, Organization, Inventory, Role
|
||||
|
||||
|
||||
class TestSysAuditorTransactional(TransactionTestCase):
|
||||
@@ -170,4 +170,34 @@ def test_org_admin_cannot_delete_member_attached_to_other_group(org_admin, org_m
|
||||
access = UserAccess(org_admin)
|
||||
other_org.member_role.members.add(org_member)
|
||||
assert not access.can_delete(org_member)
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize('reverse', (True, False))
|
||||
@pytest.mark.django_db
|
||||
def test_consistency_of_is_superuser_flag(reverse):
|
||||
users = [User.objects.create(username='rando_{}'.format(i)) for i in range(2)]
|
||||
for u in users:
|
||||
assert u.is_superuser is False
|
||||
|
||||
system_admin = Role.singleton('system_administrator')
|
||||
if reverse:
|
||||
for u in users:
|
||||
u.roles.add(system_admin)
|
||||
else:
|
||||
system_admin.members.add(*[u.id for u in users]) # like .add(42, 54)
|
||||
|
||||
for u in users:
|
||||
u.refresh_from_db()
|
||||
assert u.is_superuser is True
|
||||
|
||||
users[0].roles.clear()
|
||||
for u in users:
|
||||
u.refresh_from_db()
|
||||
assert users[0].is_superuser is False
|
||||
assert users[1].is_superuser is True
|
||||
|
||||
system_admin.members.clear()
|
||||
|
||||
for u in users:
|
||||
u.refresh_from_db()
|
||||
assert u.is_superuser is False
|
||||
|
||||
@@ -180,7 +180,7 @@ def test_openstack_client_config_generation(mocker, source, expected, private_da
|
||||
'source_vars_dict': {},
|
||||
'get_cloud_credential': mocker.Mock(return_value=credential),
|
||||
'get_extra_credentials': lambda x: [],
|
||||
'ansible_virtualenv_path': '/venv/foo'
|
||||
'ansible_virtualenv_path': '/var/lib/awx/venv/foo'
|
||||
})
|
||||
cloud_config = update.build_private_data(inventory_update, private_data_dir)
|
||||
cloud_credential = yaml.safe_load(
|
||||
@@ -224,6 +224,52 @@ def test_openstack_client_config_generation_with_project_domain_name(mocker, sou
|
||||
'source_vars_dict': {},
|
||||
'get_cloud_credential': mocker.Mock(return_value=credential),
|
||||
'get_extra_credentials': lambda x: [],
|
||||
'ansible_virtualenv_path': '/var/lib/awx/venv/foo'
|
||||
})
|
||||
cloud_config = update.build_private_data(inventory_update, private_data_dir)
|
||||
cloud_credential = yaml.safe_load(
|
||||
cloud_config.get('credentials')[credential]
|
||||
)
|
||||
assert cloud_credential['clouds'] == {
|
||||
'devstack': {
|
||||
'auth': {
|
||||
'auth_url': 'https://keystone.openstack.example.org',
|
||||
'password': 'secrete',
|
||||
'project_name': 'demo-project',
|
||||
'username': 'demo',
|
||||
'domain_name': 'my-demo-domain',
|
||||
'project_domain_name': 'project-domain',
|
||||
},
|
||||
'verify': expected,
|
||||
'private': True,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("source,expected", [
|
||||
(None, True), (False, False), (True, True)
|
||||
])
|
||||
def test_openstack_client_config_generation_with_project_region_name(mocker, source, expected, private_data_dir):
|
||||
update = tasks.RunInventoryUpdate()
|
||||
credential_type = CredentialType.defaults['openstack']()
|
||||
inputs = {
|
||||
'host': 'https://keystone.openstack.example.org',
|
||||
'username': 'demo',
|
||||
'password': 'secrete',
|
||||
'project': 'demo-project',
|
||||
'domain': 'my-demo-domain',
|
||||
'project_domain_name': 'project-domain',
|
||||
'project_region_name': 'region-name',
|
||||
}
|
||||
if source is not None:
|
||||
inputs['verify_ssl'] = source
|
||||
credential = Credential(pk=1, credential_type=credential_type, inputs=inputs)
|
||||
|
||||
inventory_update = mocker.Mock(**{
|
||||
'source': 'openstack',
|
||||
'source_vars_dict': {},
|
||||
'get_cloud_credential': mocker.Mock(return_value=credential),
|
||||
'get_extra_credentials': lambda x: [],
|
||||
'ansible_virtualenv_path': '/venv/foo'
|
||||
})
|
||||
cloud_config = update.build_private_data(inventory_update, private_data_dir)
|
||||
@@ -242,6 +288,7 @@ def test_openstack_client_config_generation_with_project_domain_name(mocker, sou
|
||||
},
|
||||
'verify': expected,
|
||||
'private': True,
|
||||
'region_name': 'region-name',
|
||||
}
|
||||
}
|
||||
|
||||
@@ -267,7 +314,7 @@ def test_openstack_client_config_generation_with_private_source_vars(mocker, sou
|
||||
'source_vars_dict': {'private': source},
|
||||
'get_cloud_credential': mocker.Mock(return_value=credential),
|
||||
'get_extra_credentials': lambda x: [],
|
||||
'ansible_virtualenv_path': '/venv/foo'
|
||||
'ansible_virtualenv_path': '/var/lib/awx/venv/foo'
|
||||
})
|
||||
cloud_config = update.build_private_data(inventory_update, private_data_dir)
|
||||
cloud_credential = yaml.load(
|
||||
@@ -505,8 +552,8 @@ class TestGenericRun():
|
||||
task.should_use_proot = lambda instance: True
|
||||
task.instance = job
|
||||
|
||||
private_data_dir = '/foo'
|
||||
cwd = '/bar'
|
||||
private_data_dir = os.path.join(settings.AWX_PROOT_BASE_PATH, 'foo')
|
||||
cwd = '/the/bar'
|
||||
|
||||
settings.AWX_PROOT_HIDE_PATHS = ['/AWX_PROOT_HIDE_PATHS1', '/AWX_PROOT_HIDE_PATHS2']
|
||||
settings.ANSIBLE_VENV_PATH = '/ANSIBLE_VENV_PATH'
|
||||
@@ -531,7 +578,7 @@ class TestGenericRun():
|
||||
'/AWX_PROOT_HIDE_PATHS1',
|
||||
'/AWX_PROOT_HIDE_PATHS2']:
|
||||
assert p in process_isolation_params['process_isolation_hide_paths']
|
||||
assert 9 == len(process_isolation_params['process_isolation_hide_paths'])
|
||||
assert 11 == len(process_isolation_params['process_isolation_hide_paths'])
|
||||
assert '/ANSIBLE_VENV_PATH' in process_isolation_params['process_isolation_ro_paths']
|
||||
assert '/AWX_VENV_PATH' in process_isolation_params['process_isolation_ro_paths']
|
||||
assert 2 == len(process_isolation_params['process_isolation_ro_paths'])
|
||||
@@ -625,13 +672,13 @@ class TestGenericRun():
|
||||
|
||||
def test_invalid_custom_virtualenv(self, patch_Job, private_data_dir):
|
||||
job = Job(project=Project(), inventory=Inventory())
|
||||
job.project.custom_virtualenv = '/venv/missing'
|
||||
job.project.custom_virtualenv = '/var/lib/awx/venv/missing'
|
||||
task = tasks.RunJob()
|
||||
|
||||
with pytest.raises(tasks.InvalidVirtualenvError) as e:
|
||||
task.build_env(job, private_data_dir)
|
||||
|
||||
assert 'Invalid virtual environment selected: /venv/missing' == str(e.value)
|
||||
assert 'Invalid virtual environment selected: /var/lib/awx/venv/missing' == str(e.value)
|
||||
|
||||
|
||||
class TestAdhocRun(TestJobExecution):
|
||||
|
||||
@@ -863,7 +863,7 @@ def wrap_args_with_proot(args, cwd, **kwargs):
|
||||
new_args = [getattr(settings, 'AWX_PROOT_CMD', 'bwrap'), '--unshare-pid', '--dev-bind', '/', '/', '--proc', '/proc']
|
||||
hide_paths = [settings.AWX_PROOT_BASE_PATH]
|
||||
if not kwargs.get('isolated'):
|
||||
hide_paths.extend(['/etc/tower', '/var/lib/awx', '/var/log', '/etc/ssh',
|
||||
hide_paths.extend(['/etc/tower', '/var/lib/awx', '/var/log', '/etc/ssh', '/var/tmp', '/home',
|
||||
settings.PROJECTS_ROOT, settings.JOBOUTPUT_ROOT])
|
||||
hide_paths.extend(getattr(settings, 'AWX_PROOT_HIDE_PATHS', None) or [])
|
||||
for path in sorted(set(hide_paths)):
|
||||
|
||||
@@ -9,6 +9,9 @@
|
||||
- ansible.posix
|
||||
|
||||
tasks:
|
||||
- name: "Output job the playbook is running for"
|
||||
debug:
|
||||
msg: "Checking on job {{ job_id }}"
|
||||
|
||||
- name: Determine if daemon process is alive.
|
||||
shell: "ansible-runner is-alive {{src}}"
|
||||
|
||||
@@ -13,6 +13,10 @@
|
||||
- ansible.posix
|
||||
|
||||
tasks:
|
||||
- name: "Output job the playbook is running for"
|
||||
debug:
|
||||
msg: "Checking on job {{ job_id }}"
|
||||
|
||||
- name: synchronize job environment with isolated host
|
||||
synchronize:
|
||||
copy_links: true
|
||||
|
||||
@@ -19,7 +19,6 @@ from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import datetime
|
||||
import subprocess
|
||||
@@ -38,32 +37,35 @@ def main():
|
||||
# this datetime, then it will be deleted because its job has finished
|
||||
job_cutoff = datetime.datetime.now() - datetime.timedelta(hours=1)
|
||||
|
||||
for search_pattern in [
|
||||
'/tmp/awx_[0-9]*_*', '/tmp/ansible_runner_pi_*',
|
||||
]:
|
||||
for path in glob.iglob(search_pattern):
|
||||
st = os.stat(path)
|
||||
modtime = datetime.datetime.fromtimestamp(st.st_mtime)
|
||||
BASE_DIR = '/tmp'
|
||||
|
||||
if modtime > job_cutoff:
|
||||
continue
|
||||
elif modtime > folder_cutoff:
|
||||
bwrap_pattern = 'bwrap_[0-9]*_*'
|
||||
private_data_dir_pattern = 'awx_[0-9]*_*'
|
||||
|
||||
bwrap_path_pattern = os.path.join(BASE_DIR, bwrap_pattern)
|
||||
|
||||
for bwrap_path in glob.iglob(bwrap_path_pattern):
|
||||
st = os.stat(bwrap_path)
|
||||
modtime = datetime.datetime.fromtimestamp(st.st_mtime)
|
||||
|
||||
if modtime > job_cutoff:
|
||||
continue
|
||||
elif modtime > folder_cutoff:
|
||||
private_data_dir_path_pattern = os.path.join(BASE_DIR, bwrap_path, private_data_dir_pattern)
|
||||
private_data_dir_path = next(glob.iglob(private_data_dir_path_pattern), None)
|
||||
if private_data_dir_path:
|
||||
try:
|
||||
re_match = re.match(r'\/tmp\/awx_\d+_.+', path)
|
||||
if re_match is not None:
|
||||
try:
|
||||
if subprocess.check_call(['ansible-runner', 'is-alive', path]) == 0:
|
||||
continue
|
||||
except subprocess.CalledProcessError:
|
||||
# the job isn't running anymore, clean up this path
|
||||
module.debug('Deleting path {} its job has completed.'.format(path))
|
||||
except (ValueError, IndexError):
|
||||
continue
|
||||
else:
|
||||
module.debug('Deleting path {} because modification date is too old.'.format(path))
|
||||
changed = True
|
||||
paths_removed.add(path)
|
||||
shutil.rmtree(path)
|
||||
if subprocess.check_call(['ansible-runner', 'is-alive', private_data_dir_path]) == 0:
|
||||
continue
|
||||
except subprocess.CalledProcessError:
|
||||
# the job isn't running anymore, clean up this path
|
||||
module.debug('Deleting path {} its job has completed.'.format(bwrap_path))
|
||||
module.debug('Deleting path {} due to private_data_dir not being found.'.format(bwrap_path))
|
||||
else:
|
||||
module.debug('Deleting path {} because modification date is too old.'.format(bwrap_path))
|
||||
changed = True
|
||||
paths_removed.add(bwrap_path)
|
||||
shutil.rmtree(bwrap_path)
|
||||
|
||||
module.exit_json(changed=changed, paths_removed=list(paths_removed))
|
||||
|
||||
|
||||
@@ -116,7 +116,7 @@ LOGIN_URL = '/api/login/'
|
||||
|
||||
# Absolute filesystem path to the directory to host projects (with playbooks).
|
||||
# This directory should not be web-accessible.
|
||||
PROJECTS_ROOT = os.path.join(BASE_DIR, 'projects')
|
||||
PROJECTS_ROOT = '/var/lib/awx/projects/'
|
||||
|
||||
# Absolute filesystem path to the directory to host collections for
|
||||
# running inventory imports, isolated playbooks
|
||||
@@ -125,10 +125,10 @@ AWX_ANSIBLE_COLLECTIONS_PATHS = os.path.join(BASE_DIR, 'vendor', 'awx_ansible_co
|
||||
# Absolute filesystem path to the directory for job status stdout (default for
|
||||
# development and tests, default for production defined in production.py). This
|
||||
# directory should not be web-accessible
|
||||
JOBOUTPUT_ROOT = os.path.join(BASE_DIR, 'job_output')
|
||||
JOBOUTPUT_ROOT = '/var/lib/awx/job_status/'
|
||||
|
||||
# Absolute filesystem path to the directory to store logs
|
||||
LOG_ROOT = os.path.join(BASE_DIR)
|
||||
LOG_ROOT = '/var/log/tower/'
|
||||
|
||||
# The heartbeat file for the tower scheduler
|
||||
SCHEDULE_METADATA_LOCATION = os.path.join(BASE_DIR, '.tower_cycle')
|
||||
@@ -196,9 +196,9 @@ LOCAL_STDOUT_EXPIRE_TIME = 2592000
|
||||
# events into the database
|
||||
JOB_EVENT_WORKERS = 4
|
||||
|
||||
# The number of seconds (must be an integer) to buffer callback receiver bulk
|
||||
# The number of seconds to buffer callback receiver bulk
|
||||
# writes in memory before flushing via JobEvent.objects.bulk_create()
|
||||
JOB_EVENT_BUFFER_SECONDS = 1
|
||||
JOB_EVENT_BUFFER_SECONDS = .1
|
||||
|
||||
# The interval at which callback receiver statistics should be
|
||||
# recorded
|
||||
@@ -662,7 +662,7 @@ INV_ENV_VARIABLE_BLOCKED = ("HOME", "USER", "_", "TERM")
|
||||
# ----------------
|
||||
EC2_ENABLED_VAR = 'ec2_state'
|
||||
EC2_ENABLED_VALUE = 'running'
|
||||
EC2_INSTANCE_ID_VAR = 'ec2_id'
|
||||
EC2_INSTANCE_ID_VAR = 'instance_id'
|
||||
EC2_EXCLUDE_EMPTY_GROUPS = True
|
||||
|
||||
# ------------
|
||||
@@ -932,6 +932,14 @@ LOGGING = {
|
||||
'backupCount': 5,
|
||||
'formatter':'simple',
|
||||
},
|
||||
'isolated_manager': {
|
||||
'level': 'WARNING',
|
||||
'class':'logging.handlers.RotatingFileHandler',
|
||||
'filename': os.path.join(LOG_ROOT, 'isolated_manager.log'),
|
||||
'maxBytes': 1024 * 1024 * 5, # 5 MB
|
||||
'backupCount': 5,
|
||||
'formatter':'simple',
|
||||
},
|
||||
},
|
||||
'loggers': {
|
||||
'django': {
|
||||
@@ -981,6 +989,11 @@ LOGGING = {
|
||||
'awx.main.wsbroadcast': {
|
||||
'handlers': ['wsbroadcast'],
|
||||
},
|
||||
'awx.isolated.manager': {
|
||||
'level': 'WARNING',
|
||||
'handlers': ['console', 'file', 'isolated_manager'],
|
||||
'propagate': True
|
||||
},
|
||||
'awx.isolated.manager.playbooks': {
|
||||
'handlers': ['management_playbooks'],
|
||||
'propagate': False
|
||||
|
||||
@@ -148,9 +148,9 @@ include(optional('/etc/tower/settings.py'), scope=locals())
|
||||
include(optional('/etc/tower/conf.d/*.py'), scope=locals())
|
||||
|
||||
# Installed differently in Dockerfile compared to production versions
|
||||
AWX_ANSIBLE_COLLECTIONS_PATHS = '/vendor/awx_ansible_collections'
|
||||
AWX_ANSIBLE_COLLECTIONS_PATHS = '/var/lib/awx/vendor/awx_ansible_collections'
|
||||
|
||||
BASE_VENV_PATH = "/venv/"
|
||||
BASE_VENV_PATH = "/var/lib/awx/venv/"
|
||||
ANSIBLE_VENV_PATH = os.path.join(BASE_VENV_PATH, "ansible")
|
||||
AWX_VENV_PATH = os.path.join(BASE_VENV_PATH, "awx")
|
||||
|
||||
|
||||
@@ -48,56 +48,12 @@ if "pytest" in sys.modules:
|
||||
}
|
||||
}
|
||||
|
||||
# Absolute filesystem path to the directory to host projects (with playbooks).
|
||||
# This directory should NOT be web-accessible.
|
||||
PROJECTS_ROOT = '/var/lib/awx/projects/'
|
||||
|
||||
# Location for cross-development of inventory plugins
|
||||
AWX_ANSIBLE_COLLECTIONS_PATHS = '/vendor/awx_ansible_collections'
|
||||
|
||||
# Absolute filesystem path to the directory for job status stdout
|
||||
# This directory should not be web-accessible
|
||||
JOBOUTPUT_ROOT = os.path.join(BASE_DIR, 'job_status')
|
||||
AWX_ANSIBLE_COLLECTIONS_PATHS = '/var/lib/awx/vendor/awx_ansible_collections'
|
||||
|
||||
# The UUID of the system, for HA.
|
||||
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
# Local time zone for this installation. Choices can be found here:
|
||||
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
|
||||
# although not all choices may be available on all operating systems.
|
||||
# On Unix systems, a value of None will cause Django to use the same
|
||||
# timezone as the operating system.
|
||||
# If running in a Windows environment this must be set to the same as your
|
||||
# system time zone.
|
||||
USE_TZ = True
|
||||
TIME_ZONE = 'UTC'
|
||||
|
||||
# Language code for this installation. All choices can be found here:
|
||||
# http://www.i18nguy.com/unicode/language-identifiers.html
|
||||
LANGUAGE_CODE = 'en-us'
|
||||
|
||||
# SECURITY WARNING: keep the secret key used in production secret!
|
||||
# Hardcoded values can leak through source control. Consider loading
|
||||
# the secret key from an environment variable or a file instead.
|
||||
SECRET_KEY = 'p7z7g1ql4%6+(6nlebb6hdk7sd^&fnjpal308%n%+p^_e6vo1y'
|
||||
|
||||
# HTTP headers and meta keys to search to determine remote host name or IP. Add
|
||||
# additional items to this list, such as "HTTP_X_FORWARDED_FOR", if behind a
|
||||
# reverse proxy.
|
||||
REMOTE_HOST_HEADERS = ['REMOTE_ADDR', 'REMOTE_HOST']
|
||||
|
||||
# If Tower is behind a reverse proxy/load balancer, use this setting to
|
||||
# whitelist the proxy IP addresses from which Tower should trust custom
|
||||
# REMOTE_HOST_HEADERS header values
|
||||
# REMOTE_HOST_HEADERS = ['HTTP_X_FORWARDED_FOR', ''REMOTE_ADDR', 'REMOTE_HOST']
|
||||
# PROXY_IP_WHITELIST = ['10.0.1.100', '10.0.1.101']
|
||||
# If this setting is an empty list (the default), the headers specified by
|
||||
# REMOTE_HOST_HEADERS will be trusted unconditionally')
|
||||
PROXY_IP_WHITELIST = []
|
||||
|
||||
# Define additional environment variables to be passed to ansible subprocesses
|
||||
#AWX_TASK_ENV['FOO'] = 'BAR'
|
||||
|
||||
# If set, use -vvv for project updates instead of -v for more output.
|
||||
# PROJECT_UPDATE_VVV=True
|
||||
|
||||
@@ -108,40 +64,6 @@ PROXY_IP_WHITELIST = []
|
||||
# Enable logging to syslog. Setting level to ERROR captures 500 errors,
|
||||
# WARNING also logs 4xx responses.
|
||||
|
||||
LOGGING['handlers']['syslog'] = {
|
||||
'level': 'WARNING',
|
||||
'filters': ['require_debug_false'],
|
||||
'class': 'logging.NullHandler',
|
||||
'formatter': 'simple',
|
||||
}
|
||||
|
||||
LOGGING['loggers']['django.request']['handlers'] = ['console']
|
||||
LOGGING['loggers']['rest_framework.request']['handlers'] = ['console']
|
||||
LOGGING['loggers']['awx']['handlers'] = ['console', 'external_logger']
|
||||
LOGGING['loggers']['awx.main.commands.run_callback_receiver']['handlers'] = [] # propogates to awx
|
||||
LOGGING['loggers']['awx.main.tasks']['handlers'] = ['console', 'external_logger']
|
||||
LOGGING['loggers']['awx.main.scheduler']['handlers'] = ['console', 'external_logger']
|
||||
LOGGING['loggers']['django_auth_ldap']['handlers'] = ['console']
|
||||
LOGGING['loggers']['social']['handlers'] = ['console']
|
||||
LOGGING['loggers']['system_tracking_migrations']['handlers'] = ['console']
|
||||
LOGGING['loggers']['rbac_migrations']['handlers'] = ['console']
|
||||
LOGGING['loggers']['awx.isolated.manager.playbooks']['handlers'] = ['console']
|
||||
LOGGING['handlers']['callback_receiver'] = {'class': 'logging.NullHandler'}
|
||||
LOGGING['handlers']['fact_receiver'] = {'class': 'logging.NullHandler'}
|
||||
LOGGING['handlers']['task_system'] = {'class': 'logging.NullHandler'}
|
||||
LOGGING['handlers']['tower_warnings'] = {'class': 'logging.NullHandler'}
|
||||
LOGGING['handlers']['rbac_migrations'] = {'class': 'logging.NullHandler'}
|
||||
LOGGING['handlers']['system_tracking_migrations'] = {'class': 'logging.NullHandler'}
|
||||
LOGGING['handlers']['management_playbooks'] = {'class': 'logging.NullHandler'}
|
||||
|
||||
|
||||
# Enable the following lines to also log to a file.
|
||||
#LOGGING['handlers']['file'] = {
|
||||
# 'class': 'logging.FileHandler',
|
||||
# 'filename': os.path.join(BASE_DIR, 'awx.log'),
|
||||
# 'formatter': 'simple',
|
||||
#}
|
||||
|
||||
# Enable the following lines to turn on lots of permissions-related logging.
|
||||
#LOGGING['loggers']['awx.main.access']['level'] = 'DEBUG'
|
||||
#LOGGING['loggers']['awx.main.signals']['level'] = 'DEBUG'
|
||||
@@ -154,74 +76,6 @@ LOGGING['handlers']['management_playbooks'] = {'class': 'logging.NullHandler'}
|
||||
#LOGGING['loggers']['django_auth_ldap']['handlers'] = ['console']
|
||||
#LOGGING['loggers']['django_auth_ldap']['level'] = 'DEBUG'
|
||||
|
||||
###############################################################################
|
||||
# SCM TEST SETTINGS
|
||||
###############################################################################
|
||||
|
||||
# Define these variables to enable more complete testing of project support for
|
||||
# SCM updates. The test repositories listed do not have to contain any valid
|
||||
# playbooks.
|
||||
|
||||
try:
|
||||
path = os.path.expanduser(os.path.expandvars('~/.ssh/id_rsa'))
|
||||
TEST_SSH_KEY_DATA = open(path, 'rb').read()
|
||||
except IOError:
|
||||
TEST_SSH_KEY_DATA = ''
|
||||
|
||||
TEST_GIT_USERNAME = ''
|
||||
TEST_GIT_PASSWORD = ''
|
||||
TEST_GIT_KEY_DATA = TEST_SSH_KEY_DATA
|
||||
TEST_GIT_PUBLIC_HTTPS = 'https://github.com/ansible/ansible.github.com.git'
|
||||
TEST_GIT_PRIVATE_HTTPS = 'https://github.com/ansible/product-docs.git'
|
||||
TEST_GIT_PRIVATE_SSH = 'git@github.com:ansible/product-docs.git'
|
||||
|
||||
TEST_SVN_USERNAME = ''
|
||||
TEST_SVN_PASSWORD = ''
|
||||
TEST_SVN_PUBLIC_HTTPS = 'https://github.com/ansible/ansible.github.com'
|
||||
TEST_SVN_PRIVATE_HTTPS = 'https://github.com/ansible/product-docs'
|
||||
|
||||
# To test repo access via SSH login to localhost.
|
||||
import getpass
|
||||
try:
|
||||
TEST_SSH_LOOPBACK_USERNAME = getpass.getuser()
|
||||
except KeyError:
|
||||
TEST_SSH_LOOPBACK_USERNAME = 'root'
|
||||
TEST_SSH_LOOPBACK_PASSWORD = ''
|
||||
|
||||
###############################################################################
|
||||
# INVENTORY IMPORT TEST SETTINGS
|
||||
###############################################################################
|
||||
|
||||
# Define these variables to enable more complete testing of inventory import
|
||||
# from cloud providers.
|
||||
|
||||
# EC2 credentials
|
||||
TEST_AWS_ACCESS_KEY_ID = ''
|
||||
TEST_AWS_SECRET_ACCESS_KEY = ''
|
||||
TEST_AWS_REGIONS = 'all'
|
||||
# Check IAM STS credentials
|
||||
TEST_AWS_SECURITY_TOKEN = ''
|
||||
|
||||
# Rackspace credentials
|
||||
TEST_RACKSPACE_USERNAME = ''
|
||||
TEST_RACKSPACE_API_KEY = ''
|
||||
TEST_RACKSPACE_REGIONS = 'all'
|
||||
|
||||
# VMware credentials
|
||||
TEST_VMWARE_HOST = ''
|
||||
TEST_VMWARE_USER = ''
|
||||
TEST_VMWARE_PASSWORD = ''
|
||||
|
||||
# OpenStack credentials
|
||||
TEST_OPENSTACK_HOST = ''
|
||||
TEST_OPENSTACK_USER = ''
|
||||
TEST_OPENSTACK_PASSWORD = ''
|
||||
TEST_OPENSTACK_PROJECT = ''
|
||||
|
||||
# Azure credentials.
|
||||
TEST_AZURE_USERNAME = ''
|
||||
TEST_AZURE_KEY_DATA = ''
|
||||
|
||||
BROADCAST_WEBSOCKET_SECRET = '🤖starscream🤖'
|
||||
BROADCAST_WEBSOCKET_PORT = 8013
|
||||
BROADCAST_WEBSOCKET_VERIFY_CERT = False
|
||||
|
||||
@@ -1,192 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc. (formerly AnsibleWorks, Inc.)
|
||||
# All Rights Reserved.
|
||||
|
||||
# Local Django settings for AWX project. Rename to "local_settings.py" and
|
||||
# edit as needed for your development environment.
|
||||
|
||||
# All variables defined in awx/settings/development.py will already be loaded
|
||||
# into the global namespace before this file is loaded, to allow for reading
|
||||
# and updating the default settings as needed.
|
||||
|
||||
###############################################################################
|
||||
# MISC PROJECT SETTINGS
|
||||
###############################################################################
|
||||
|
||||
# Database settings to use PostgreSQL for development.
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.postgresql_psycopg2',
|
||||
'NAME': 'awx-dev',
|
||||
'USER': 'awx-dev',
|
||||
'PASSWORD': 'AWXsome1',
|
||||
'HOST': 'localhost',
|
||||
'PORT': '',
|
||||
}
|
||||
}
|
||||
|
||||
# Use SQLite for unit tests instead of PostgreSQL. If the lines below are
|
||||
# commented out, Django will create the test_awx-dev database in PostgreSQL to
|
||||
# run unit tests.
|
||||
if is_testing(sys.argv):
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': os.path.join(BASE_DIR, 'awx.sqlite3'),
|
||||
'TEST': {
|
||||
# Test database cannot be :memory: for tests.
|
||||
'NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3'),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
# AMQP configuration.
|
||||
BROKER_URL = 'amqp://guest:guest@localhost:5672'
|
||||
|
||||
# Absolute filesystem path to the directory to host projects (with playbooks).
|
||||
# This directory should NOT be web-accessible.
|
||||
PROJECTS_ROOT = os.path.join(BASE_DIR, 'projects')
|
||||
|
||||
# Absolute filesystem path to the directory for job status stdout
|
||||
# This directory should not be web-accessible
|
||||
JOBOUTPUT_ROOT = os.path.join(BASE_DIR, 'job_status')
|
||||
|
||||
# The UUID of the system, for HA.
|
||||
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
# Local time zone for this installation. Choices can be found here:
|
||||
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
|
||||
# although not all choices may be available on all operating systems.
|
||||
# On Unix systems, a value of None will cause Django to use the same
|
||||
# timezone as the operating system.
|
||||
# If running in a Windows environment this must be set to the same as your
|
||||
# system time zone.
|
||||
TIME_ZONE = None
|
||||
|
||||
# Language code for this installation. All choices can be found here:
|
||||
# http://www.i18nguy.com/unicode/language-identifiers.html
|
||||
LANGUAGE_CODE = 'en-us'
|
||||
|
||||
# SECURITY WARNING: keep the secret key used in production secret!
|
||||
# Hardcoded values can leak through source control. Consider loading
|
||||
# the secret key from an environment variable or a file instead.
|
||||
SECRET_KEY = 'p7z7g1ql4%6+(6nlebb6hdk7sd^&fnjpal308%n%+p^_e6vo1y'
|
||||
|
||||
# HTTP headers and meta keys to search to determine remote host name or IP. Add
|
||||
# additional items to this list, such as "HTTP_X_FORWARDED_FOR", if behind a
|
||||
# reverse proxy.
|
||||
REMOTE_HOST_HEADERS = ['REMOTE_ADDR', 'REMOTE_HOST']
|
||||
|
||||
# If Tower is behind a reverse proxy/load balancer, use this setting to
|
||||
# whitelist the proxy IP addresses from which Tower should trust custom
|
||||
# REMOTE_HOST_HEADERS header values
|
||||
# REMOTE_HOST_HEADERS = ['HTTP_X_FORWARDED_FOR', ''REMOTE_ADDR', 'REMOTE_HOST']
|
||||
# PROXY_IP_WHITELIST = ['10.0.1.100', '10.0.1.101']
|
||||
# If this setting is an empty list (the default), the headers specified by
|
||||
# REMOTE_HOST_HEADERS will be trusted unconditionally')
|
||||
PROXY_IP_WHITELIST = []
|
||||
|
||||
# Define additional environment variables to be passed to ansible subprocesses
|
||||
#AWX_TASK_ENV['FOO'] = 'BAR'
|
||||
|
||||
# If set, use -vvv for project updates instead of -v for more output.
|
||||
# PROJECT_UPDATE_VVV=True
|
||||
|
||||
###############################################################################
|
||||
# LOGGING SETTINGS
|
||||
###############################################################################
|
||||
|
||||
# Enable logging to syslog. Setting level to ERROR captures 500 errors,
|
||||
# WARNING also logs 4xx responses.
|
||||
LOGGING['handlers']['syslog'] = {
|
||||
'level': 'WARNING',
|
||||
'filters': [],
|
||||
'class': 'logging.handlers.SysLogHandler',
|
||||
'address': '/dev/log',
|
||||
'facility': 'local0',
|
||||
'formatter': 'simple',
|
||||
}
|
||||
|
||||
# Enable the following lines to also log to a file.
|
||||
#LOGGING['handlers']['file'] = {
|
||||
# 'class': 'logging.FileHandler',
|
||||
# 'filename': os.path.join(BASE_DIR, 'awx.log'),
|
||||
# 'formatter': 'simple',
|
||||
#}
|
||||
|
||||
# Enable the following lines to turn on lots of permissions-related logging.
|
||||
#LOGGING['loggers']['awx.main.access']['level'] = 'DEBUG'
|
||||
#LOGGING['loggers']['awx.main.signals']['level'] = 'DEBUG'
|
||||
#LOGGING['loggers']['awx.main.permissions']['level'] = 'DEBUG'
|
||||
|
||||
# Enable the following line to turn on database settings logging.
|
||||
#LOGGING['loggers']['awx.conf']['level'] = 'DEBUG'
|
||||
|
||||
# Enable the following lines to turn on LDAP auth logging.
|
||||
#LOGGING['loggers']['django_auth_ldap']['handlers'] = ['console']
|
||||
#LOGGING['loggers']['django_auth_ldap']['level'] = 'DEBUG'
|
||||
|
||||
###############################################################################
|
||||
# SCM TEST SETTINGS
|
||||
###############################################################################
|
||||
|
||||
# Define these variables to enable more complete testing of project support for
|
||||
# SCM updates. The test repositories listed do not have to contain any valid
|
||||
# playbooks.
|
||||
|
||||
try:
|
||||
path = os.path.expanduser(os.path.expandvars('~/.ssh/id_rsa'))
|
||||
TEST_SSH_KEY_DATA = file(path, 'rb').read()
|
||||
except IOError:
|
||||
TEST_SSH_KEY_DATA = ''
|
||||
|
||||
TEST_GIT_USERNAME = ''
|
||||
TEST_GIT_PASSWORD = ''
|
||||
TEST_GIT_KEY_DATA = TEST_SSH_KEY_DATA
|
||||
TEST_GIT_PUBLIC_HTTPS = 'https://github.com/ansible/ansible.github.com.git'
|
||||
TEST_GIT_PRIVATE_HTTPS = 'https://github.com/ansible/product-docs.git'
|
||||
TEST_GIT_PRIVATE_SSH = 'git@github.com:ansible/product-docs.git'
|
||||
|
||||
TEST_SVN_USERNAME = ''
|
||||
TEST_SVN_PASSWORD = ''
|
||||
TEST_SVN_PUBLIC_HTTPS = 'https://github.com/ansible/ansible.github.com'
|
||||
TEST_SVN_PRIVATE_HTTPS = 'https://github.com/ansible/product-docs'
|
||||
|
||||
# To test repo access via SSH login to localhost.
|
||||
import getpass
|
||||
TEST_SSH_LOOPBACK_USERNAME = getpass.getuser()
|
||||
TEST_SSH_LOOPBACK_PASSWORD = ''
|
||||
|
||||
###############################################################################
|
||||
# INVENTORY IMPORT TEST SETTINGS
|
||||
###############################################################################
|
||||
|
||||
# Define these variables to enable more complete testing of inventory import
|
||||
# from cloud providers.
|
||||
|
||||
# EC2 credentials
|
||||
TEST_AWS_ACCESS_KEY_ID = ''
|
||||
TEST_AWS_SECRET_ACCESS_KEY = ''
|
||||
TEST_AWS_REGIONS = 'all'
|
||||
# Check IAM STS credentials
|
||||
TEST_AWS_SECURITY_TOKEN = ''
|
||||
|
||||
|
||||
# Rackspace credentials
|
||||
TEST_RACKSPACE_USERNAME = ''
|
||||
TEST_RACKSPACE_API_KEY = ''
|
||||
TEST_RACKSPACE_REGIONS = 'all'
|
||||
|
||||
# VMware credentials
|
||||
TEST_VMWARE_HOST = ''
|
||||
TEST_VMWARE_USER = ''
|
||||
TEST_VMWARE_PASSWORD = ''
|
||||
|
||||
# OpenStack credentials
|
||||
TEST_OPENSTACK_HOST = ''
|
||||
TEST_OPENSTACK_USER = ''
|
||||
TEST_OPENSTACK_PASSWORD = ''
|
||||
TEST_OPENSTACK_PROJECT = ''
|
||||
|
||||
# Azure credentials.
|
||||
TEST_AZURE_USERNAME = ''
|
||||
TEST_AZURE_KEY_DATA = ''
|
||||
@@ -30,10 +30,6 @@ SECRET_KEY = None
|
||||
# See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
|
||||
ALLOWED_HOSTS = []
|
||||
|
||||
# Absolute filesystem path to the directory for job status stdout
|
||||
# This directory should not be web-accessible
|
||||
JOBOUTPUT_ROOT = '/var/lib/awx/job_status/'
|
||||
|
||||
# The heartbeat file for the tower scheduler
|
||||
SCHEDULE_METADATA_LOCATION = '/var/lib/awx/.tower_cycle'
|
||||
|
||||
@@ -46,15 +42,6 @@ AWX_VENV_PATH = os.path.join(BASE_VENV_PATH, "awx")
|
||||
|
||||
AWX_ISOLATED_USERNAME = 'awx'
|
||||
|
||||
LOGGING['handlers']['tower_warnings']['filename'] = '/var/log/tower/tower.log' # noqa
|
||||
LOGGING['handlers']['callback_receiver']['filename'] = '/var/log/tower/callback_receiver.log' # noqa
|
||||
LOGGING['handlers']['dispatcher']['filename'] = '/var/log/tower/dispatcher.log' # noqa
|
||||
LOGGING['handlers']['wsbroadcast']['filename'] = '/var/log/tower/wsbroadcast.log' # noqa
|
||||
LOGGING['handlers']['task_system']['filename'] = '/var/log/tower/task_system.log' # noqa
|
||||
LOGGING['handlers']['management_playbooks']['filename'] = '/var/log/tower/management_playbooks.log' # noqa
|
||||
LOGGING['handlers']['system_tracking_migrations']['filename'] = '/var/log/tower/tower_system_tracking_migrations.log' # noqa
|
||||
LOGGING['handlers']['rbac_migrations']['filename'] = '/var/log/tower/tower_rbac_migrations.log' # noqa
|
||||
|
||||
# Store a snapshot of default settings at this point before loading any
|
||||
# customizable config files.
|
||||
DEFAULTS_SNAPSHOT = {}
|
||||
|
||||
@@ -445,7 +445,8 @@ class LDAPGroupTypeField(fields.ChoiceField, DependsOnMixin):
|
||||
|
||||
default_error_messages = {
|
||||
'type_error': _('Expected an instance of LDAPGroupType but got {input_type} instead.'),
|
||||
'missing_parameters': _('Missing required parameters in {dependency}.')
|
||||
'missing_parameters': _('Missing required parameters in {dependency}.'),
|
||||
'invalid_parameters': _('Invalid group_type parameters. Expected instance of dict but got {parameters_type} instead.')
|
||||
}
|
||||
|
||||
def __init__(self, choices=None, **kwargs):
|
||||
@@ -465,7 +466,6 @@ class LDAPGroupTypeField(fields.ChoiceField, DependsOnMixin):
|
||||
if not data:
|
||||
return None
|
||||
|
||||
params = self.get_depends_on() or {}
|
||||
cls = find_class_in_modules(data)
|
||||
if not cls:
|
||||
return None
|
||||
@@ -475,8 +475,16 @@ class LDAPGroupTypeField(fields.ChoiceField, DependsOnMixin):
|
||||
# Backwords compatability. Before AUTH_LDAP_GROUP_TYPE_PARAMS existed
|
||||
# MemberDNGroupType was the only group type, of the underlying lib, that
|
||||
# took a parameter.
|
||||
params = self.get_depends_on() or {}
|
||||
params_sanitized = dict()
|
||||
for attr in inspect.getargspec(cls.__init__).args[1:]:
|
||||
|
||||
cls_args = inspect.getargspec(cls.__init__).args[1:]
|
||||
|
||||
if cls_args:
|
||||
if not isinstance(params, dict):
|
||||
self.fail('invalid_parameters', parameters_type=type(params))
|
||||
|
||||
for attr in cls_args:
|
||||
if attr in params:
|
||||
params_sanitized[attr] = params[attr]
|
||||
|
||||
|
||||
@@ -6,4 +6,5 @@ coverage
|
||||
build
|
||||
node_modules
|
||||
dist
|
||||
images
|
||||
images
|
||||
instrumented
|
||||
@@ -8,8 +8,8 @@
|
||||
"modules": true
|
||||
}
|
||||
},
|
||||
"plugins": ["react-hooks"],
|
||||
"extends": ["airbnb", "prettier", "prettier/react"],
|
||||
"plugins": ["react-hooks", "jsx-a11y"],
|
||||
"extends": ["airbnb", "prettier", "prettier/react", "plugin:jsx-a11y/strict"],
|
||||
"settings": {
|
||||
"react": {
|
||||
"version": "16.5.2"
|
||||
|
||||
@@ -57,7 +57,7 @@ The UI is built using [ReactJS](https://reactjs.org/docs/getting-started.html) a
|
||||
|
||||
The AWX UI requires the following:
|
||||
|
||||
- Node 10.x LTS
|
||||
- Node 14.x LTS
|
||||
- NPM 6.x LTS
|
||||
|
||||
Run the following to install all the dependencies:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# AWX-PF
|
||||
|
||||
## Requirements
|
||||
- node 10.x LTS, npm 6.x LTS, make, git
|
||||
- node 14.x LTS, npm 6.x LTS, make, git
|
||||
|
||||
## Development
|
||||
The API development server will need to be running. See [CONTRIBUTING.md](../../CONTRIBUTING.md).
|
||||
@@ -15,6 +15,19 @@ npm --prefix=awx/ui_next install
|
||||
npm --prefix=awx/ui_next start
|
||||
```
|
||||
|
||||
### Build for the Development Containers
|
||||
If you just want to build a ui for the container-based awx development
|
||||
environment, use these make targets:
|
||||
|
||||
```shell
|
||||
# The ui will be reachable at https://localhost:8043 or
|
||||
# http://localhost:8013
|
||||
make ui-devel
|
||||
|
||||
# clean up
|
||||
make clean-ui
|
||||
```
|
||||
|
||||
### Using an External Server
|
||||
If you normally run awx on an external host/server (in this example, `awx.local`),
|
||||
you'll need use the `TARGET` environment variable when starting the ui development
|
||||
|
||||
27
awx/ui_next/docs/APP_ARCHITECTURE.md
Normal file
27
awx/ui_next/docs/APP_ARCHITECTURE.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# Application Architecture
|
||||
|
||||
## Local Storage Integration
|
||||
The `useStorage` hook integrates with the browser's localStorage api.
|
||||
It accepts a localStorage key as its only argument and returns a state
|
||||
variable and setter function for that state variable. The hook enables
|
||||
bidirectional data transfer between tabs via an event listener that
|
||||
is registered with the Web Storage api.
|
||||
|
||||
|
||||

|
||||
|
||||
The `useStorage` hook currently lives in the `AppContainer` component. It
|
||||
can be relocated to a more general location should and if the need
|
||||
ever arise
|
||||
|
||||
## Session Expiration
|
||||
Session timeout state is communicated to the client in the HTTP(S)
|
||||
response headers. Every HTTP(S) response is intercepted to read the
|
||||
session expiration time before being passed into the rest of the
|
||||
application. A timeout date is computed from the intercepted HTTP(S)
|
||||
headers and is pushed into local storage, where it can be read using
|
||||
standard Web Storage apis or other utilities, such as `useStorage`.
|
||||
|
||||
|
||||

|
||||
|
||||
BIN
awx/ui_next/docs/images/sessionExpiration.png
Normal file
BIN
awx/ui_next/docs/images/sessionExpiration.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 70 KiB |
BIN
awx/ui_next/docs/images/useStorage.png
Normal file
BIN
awx/ui_next/docs/images/useStorage.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 57 KiB |
8406
awx/ui_next/package-lock.json
generated
8406
awx/ui_next/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -7,11 +7,12 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@lingui/react": "^2.9.1",
|
||||
"@patternfly/patternfly": "4.59.1",
|
||||
"@patternfly/react-core": "4.75.2",
|
||||
"@patternfly/react-icons": "4.7.16",
|
||||
"@patternfly/patternfly": "4.70.2",
|
||||
"@patternfly/react-core": "4.84.3",
|
||||
"@patternfly/react-icons": "4.7.22",
|
||||
"@patternfly/react-table": "^4.19.15",
|
||||
"ansi-to-html": "^0.6.11",
|
||||
"axios": "^0.18.1",
|
||||
"axios": "^0.21.1",
|
||||
"codemirror": "^5.47.0",
|
||||
"d3": "^5.12.0",
|
||||
"dagre": "^0.8.4",
|
||||
@@ -30,6 +31,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/polyfill": "^7.8.7",
|
||||
"@cypress/instrument-cra": "^1.4.0",
|
||||
"@lingui/cli": "^2.9.2",
|
||||
"@lingui/macro": "^2.9.1",
|
||||
"@nteract/mockument": "^1.0.4",
|
||||
@@ -42,7 +44,7 @@
|
||||
"eslint-config-prettier": "^5.0.0",
|
||||
"eslint-import-resolver-webpack": "0.11.1",
|
||||
"eslint-plugin-import": "^2.14.0",
|
||||
"eslint-plugin-jsx-a11y": "^6.1.1",
|
||||
"eslint-plugin-jsx-a11y": "^6.4.1",
|
||||
"eslint-plugin-react": "^7.11.1",
|
||||
"eslint-plugin-react-hooks": "^2.2.0",
|
||||
"http-proxy-middleware": "^1.0.3",
|
||||
@@ -53,6 +55,7 @@
|
||||
},
|
||||
"scripts": {
|
||||
"start": "PORT=3001 HTTPS=true DANGEROUSLY_DISABLE_HOST_CHECK=true react-scripts start",
|
||||
"start-instrumented": "DEBUG=instrument-cra PORT=3001 HTTPS=true DANGEROUSLY_DISABLE_HOST_CHECK=true react-scripts -r @cypress/instrument-cra start",
|
||||
"build": "INLINE_RUNTIME_CHUNK=false react-scripts build",
|
||||
"test": "TZ='UTC' react-scripts test --coverage --watchAll=false",
|
||||
"test-watch": "TZ='UTC' react-scripts test",
|
||||
|
||||
@@ -30,7 +30,12 @@ const ProtectedRoute = ({ children, ...rest }) =>
|
||||
|
||||
function App() {
|
||||
const catalogs = { en, ja };
|
||||
const language = getLanguageWithoutRegionCode(navigator);
|
||||
let language = getLanguageWithoutRegionCode(navigator);
|
||||
if (!Object.keys(catalogs).includes(language)) {
|
||||
// If there isn't a string catalog available for the browser's
|
||||
// preferred language, default to one that has strings.
|
||||
language = 'en';
|
||||
}
|
||||
const match = useRouteMatch();
|
||||
const { hash, search, pathname } = useLocation();
|
||||
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
import axios from 'axios';
|
||||
|
||||
import { SESSION_TIMEOUT_KEY } from '../constants';
|
||||
import { encodeQueryString } from '../util/qs';
|
||||
import debounce from '../util/debounce';
|
||||
|
||||
const updateStorage = debounce((key, val) => {
|
||||
window.localStorage.setItem(key, val);
|
||||
window.dispatchEvent(new Event('storage'));
|
||||
}, 500);
|
||||
|
||||
const defaultHttp = axios.create({
|
||||
xsrfCookieName: 'csrftoken',
|
||||
@@ -10,6 +17,15 @@ const defaultHttp = axios.create({
|
||||
},
|
||||
});
|
||||
|
||||
defaultHttp.interceptors.response.use(response => {
|
||||
const timeout = response?.headers['session-timeout'];
|
||||
if (timeout) {
|
||||
const timeoutDate = new Date().getTime() + timeout * 1000;
|
||||
updateStorage(SESSION_TIMEOUT_KEY, String(timeoutDate));
|
||||
}
|
||||
return response;
|
||||
});
|
||||
|
||||
class Base {
|
||||
constructor(http = defaultHttp, baseURL) {
|
||||
this.http = http;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import ActivityStream from './models/ActivityStream';
|
||||
import AdHocCommands from './models/AdHocCommands';
|
||||
import Applications from './models/Applications';
|
||||
import Auth from './models/Auth';
|
||||
@@ -39,6 +40,7 @@ import WorkflowJobTemplateNodes from './models/WorkflowJobTemplateNodes';
|
||||
import WorkflowJobTemplates from './models/WorkflowJobTemplates';
|
||||
import WorkflowJobs from './models/WorkflowJobs';
|
||||
|
||||
const ActivityStreamAPI = new ActivityStream();
|
||||
const AdHocCommandsAPI = new AdHocCommands();
|
||||
const ApplicationsAPI = new Applications();
|
||||
const AuthAPI = new Auth();
|
||||
@@ -81,6 +83,7 @@ const WorkflowJobTemplatesAPI = new WorkflowJobTemplates();
|
||||
const WorkflowJobsAPI = new WorkflowJobs();
|
||||
|
||||
export {
|
||||
ActivityStreamAPI,
|
||||
AdHocCommandsAPI,
|
||||
ApplicationsAPI,
|
||||
AuthAPI,
|
||||
|
||||
10
awx/ui_next/src/api/models/ActivityStream.js
Normal file
10
awx/ui_next/src/api/models/ActivityStream.js
Normal file
@@ -0,0 +1,10 @@
|
||||
import Base from '../Base';
|
||||
|
||||
class ActivityStream extends Base {
|
||||
constructor(http) {
|
||||
super(http);
|
||||
this.baseUrl = '/api/v2/activity_stream/';
|
||||
}
|
||||
}
|
||||
|
||||
export default ActivityStream;
|
||||
@@ -36,6 +36,10 @@ class Jobs extends RelaunchMixin(Base) {
|
||||
return this.http.post(`/api/v2${getBaseURL(type)}${id}/cancel/`);
|
||||
}
|
||||
|
||||
readCredentials(id, type) {
|
||||
return this.http.get(`/api/v2${getBaseURL(type)}${id}/credentials/`);
|
||||
}
|
||||
|
||||
readDetail(id, type) {
|
||||
return this.http.get(`/api/v2${getBaseURL(type)}${id}/`);
|
||||
}
|
||||
|
||||
@@ -55,6 +55,19 @@ class WorkflowJobTemplateNodes extends Base {
|
||||
readCredentials(id) {
|
||||
return this.http.get(`${this.baseUrl}${id}/credentials/`);
|
||||
}
|
||||
|
||||
associateCredentials(id, credentialId) {
|
||||
return this.http.post(`${this.baseUrl}${id}/credentials/`, {
|
||||
id: credentialId,
|
||||
});
|
||||
}
|
||||
|
||||
disassociateCredentials(id, credentialId) {
|
||||
return this.http.post(`${this.baseUrl}${id}/credentials/`, {
|
||||
id: credentialId,
|
||||
disassociate: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default WorkflowJobTemplateNodes;
|
||||
|
||||
@@ -12,8 +12,8 @@ import {
|
||||
import { BrandName } from '../../variables';
|
||||
import brandLogoImg from './brand-logo.svg';
|
||||
|
||||
class About extends React.Component {
|
||||
static createSpeechBubble(version) {
|
||||
function About({ ansible_version, version, isOpen, onClose, i18n }) {
|
||||
const createSpeechBubble = () => {
|
||||
let text = `${BrandName} ${version}`;
|
||||
let top = '';
|
||||
let bottom = '';
|
||||
@@ -28,31 +28,22 @@ class About extends React.Component {
|
||||
bottom = ` --${bottom}-- `;
|
||||
|
||||
return top + text + bottom;
|
||||
}
|
||||
};
|
||||
|
||||
constructor(props) {
|
||||
super(props);
|
||||
const speechBubble = createSpeechBubble();
|
||||
|
||||
this.createSpeechBubble = this.constructor.createSpeechBubble.bind(this);
|
||||
}
|
||||
|
||||
render() {
|
||||
const { ansible_version, version, isOpen, onClose, i18n } = this.props;
|
||||
|
||||
const speechBubble = this.createSpeechBubble(version);
|
||||
|
||||
return (
|
||||
<AboutModal
|
||||
isOpen={isOpen}
|
||||
onClose={onClose}
|
||||
productName={`Ansible ${BrandName}`}
|
||||
trademark={i18n._(t`Copyright 2019 Red Hat, Inc.`)}
|
||||
brandImageSrc={brandLogoImg}
|
||||
brandImageAlt={i18n._(t`Brand Image`)}
|
||||
>
|
||||
<pre>
|
||||
{speechBubble}
|
||||
{`
|
||||
return (
|
||||
<AboutModal
|
||||
isOpen={isOpen}
|
||||
onClose={onClose}
|
||||
productName={`Ansible ${BrandName}`}
|
||||
trademark={i18n._(t`Copyright 2019 Red Hat, Inc.`)}
|
||||
brandImageSrc={brandLogoImg}
|
||||
brandImageAlt={i18n._(t`Brand Image`)}
|
||||
>
|
||||
<pre>
|
||||
{speechBubble}
|
||||
{`
|
||||
\\
|
||||
\\ ^__^
|
||||
(oo)\\_______
|
||||
@@ -60,18 +51,17 @@ class About extends React.Component {
|
||||
||----w |
|
||||
|| ||
|
||||
`}
|
||||
</pre>
|
||||
<TextContent>
|
||||
<TextList component="dl">
|
||||
<TextListItem component="dt">
|
||||
{i18n._(t`Ansible Version`)}
|
||||
</TextListItem>
|
||||
<TextListItem component="dd">{ansible_version}</TextListItem>
|
||||
</TextList>
|
||||
</TextContent>
|
||||
</AboutModal>
|
||||
);
|
||||
}
|
||||
</pre>
|
||||
<TextContent>
|
||||
<TextList component="dl">
|
||||
<TextListItem component="dt">
|
||||
{i18n._(t`Ansible Version`)}
|
||||
</TextListItem>
|
||||
<TextListItem component="dd">{ansible_version}</TextListItem>
|
||||
</TextList>
|
||||
</TextContent>
|
||||
</AboutModal>
|
||||
);
|
||||
}
|
||||
|
||||
About.propTypes = {
|
||||
|
||||
@@ -57,7 +57,7 @@ function AdHocCommands({ adHocItems, i18n, hasListItems }) {
|
||||
fetchData();
|
||||
}, [fetchData]);
|
||||
const {
|
||||
isloading: isLaunchLoading,
|
||||
isLoading: isLaunchLoading,
|
||||
error: launchError,
|
||||
request: launchAdHocCommands,
|
||||
} = useRequest(
|
||||
|
||||
@@ -58,7 +58,7 @@ function AdHocCredentialStep({ i18n, credentialTypeId, onEnableLaunch }) {
|
||||
return <ContentError error={error} />;
|
||||
}
|
||||
if (isLoading) {
|
||||
return <ContentLoading error={error} />;
|
||||
return <ContentLoading />;
|
||||
}
|
||||
return (
|
||||
<Form>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { Fragment } from 'react';
|
||||
import React, { Fragment, useState } from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
@@ -17,95 +17,57 @@ const readTeams = async queryParams => TeamsAPI.read(queryParams);
|
||||
|
||||
const readTeamsOptions = async () => TeamsAPI.readOptions();
|
||||
|
||||
class AddResourceRole extends React.Component {
|
||||
constructor(props) {
|
||||
super(props);
|
||||
|
||||
this.state = {
|
||||
selectedResource: null,
|
||||
selectedResourceRows: [],
|
||||
selectedRoleRows: [],
|
||||
currentStepId: 1,
|
||||
maxEnabledStep: 1,
|
||||
};
|
||||
|
||||
this.handleResourceCheckboxClick = this.handleResourceCheckboxClick.bind(
|
||||
this
|
||||
);
|
||||
this.handleResourceSelect = this.handleResourceSelect.bind(this);
|
||||
this.handleRoleCheckboxClick = this.handleRoleCheckboxClick.bind(this);
|
||||
this.handleWizardNext = this.handleWizardNext.bind(this);
|
||||
this.handleWizardSave = this.handleWizardSave.bind(this);
|
||||
this.handleWizardGoToStep = this.handleWizardGoToStep.bind(this);
|
||||
}
|
||||
|
||||
handleResourceCheckboxClick(user) {
|
||||
const { selectedResourceRows, currentStepId } = this.state;
|
||||
function AddResourceRole({ onSave, onClose, roles, i18n, resource }) {
|
||||
const [selectedResource, setSelectedResource] = useState(null);
|
||||
const [selectedResourceRows, setSelectedResourceRows] = useState([]);
|
||||
const [selectedRoleRows, setSelectedRoleRows] = useState([]);
|
||||
const [currentStepId, setCurrentStepId] = useState(1);
|
||||
const [maxEnabledStep, setMaxEnabledStep] = useState(1);
|
||||
|
||||
const handleResourceCheckboxClick = user => {
|
||||
const selectedIndex = selectedResourceRows.findIndex(
|
||||
selectedRow => selectedRow.id === user.id
|
||||
);
|
||||
|
||||
if (selectedIndex > -1) {
|
||||
selectedResourceRows.splice(selectedIndex, 1);
|
||||
const stateToUpdate = { selectedResourceRows };
|
||||
if (selectedResourceRows.length === 0) {
|
||||
stateToUpdate.maxEnabledStep = currentStepId;
|
||||
setMaxEnabledStep(currentStepId);
|
||||
}
|
||||
this.setState(stateToUpdate);
|
||||
setSelectedRoleRows(selectedResourceRows);
|
||||
} else {
|
||||
this.setState(prevState => ({
|
||||
selectedResourceRows: [...prevState.selectedResourceRows, user],
|
||||
}));
|
||||
setSelectedResourceRows([...selectedResourceRows, user]);
|
||||
}
|
||||
}
|
||||
|
||||
handleRoleCheckboxClick(role) {
|
||||
const { selectedRoleRows } = this.state;
|
||||
};
|
||||
|
||||
const handleRoleCheckboxClick = role => {
|
||||
const selectedIndex = selectedRoleRows.findIndex(
|
||||
selectedRow => selectedRow.id === role.id
|
||||
);
|
||||
|
||||
if (selectedIndex > -1) {
|
||||
selectedRoleRows.splice(selectedIndex, 1);
|
||||
this.setState({ selectedRoleRows });
|
||||
setSelectedRoleRows(selectedRoleRows);
|
||||
} else {
|
||||
this.setState(prevState => ({
|
||||
selectedRoleRows: [...prevState.selectedRoleRows, role],
|
||||
}));
|
||||
setSelectedRoleRows([...selectedRoleRows, role]);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
handleResourceSelect(resourceType) {
|
||||
this.setState({
|
||||
selectedResource: resourceType,
|
||||
selectedResourceRows: [],
|
||||
selectedRoleRows: [],
|
||||
});
|
||||
}
|
||||
const handleResourceSelect = resourceType => {
|
||||
setSelectedResource(resourceType);
|
||||
setSelectedResourceRows([]);
|
||||
setSelectedRoleRows([]);
|
||||
};
|
||||
|
||||
handleWizardNext(step) {
|
||||
this.setState({
|
||||
currentStepId: step.id,
|
||||
maxEnabledStep: step.id,
|
||||
});
|
||||
}
|
||||
const handleWizardNext = step => {
|
||||
setCurrentStepId(step.id);
|
||||
setMaxEnabledStep(step.id);
|
||||
};
|
||||
|
||||
handleWizardGoToStep(step) {
|
||||
this.setState({
|
||||
currentStepId: step.id,
|
||||
});
|
||||
}
|
||||
|
||||
async handleWizardSave() {
|
||||
const { onSave } = this.props;
|
||||
const {
|
||||
selectedResourceRows,
|
||||
selectedRoleRows,
|
||||
selectedResource,
|
||||
} = this.state;
|
||||
const handleWizardGoToStep = step => {
|
||||
setCurrentStepId(step.id);
|
||||
};
|
||||
|
||||
const handleWizardSave = async () => {
|
||||
try {
|
||||
const roleRequests = [];
|
||||
|
||||
@@ -134,205 +96,198 @@ class AddResourceRole extends React.Component {
|
||||
} catch (err) {
|
||||
// TODO: handle this error
|
||||
}
|
||||
};
|
||||
|
||||
// Object roles can be user only, so we remove them when
|
||||
// showing role choices for team access
|
||||
const selectableRoles = { ...roles };
|
||||
if (selectedResource === 'teams') {
|
||||
Object.keys(roles).forEach(key => {
|
||||
if (selectableRoles[key].user_only) {
|
||||
delete selectableRoles[key];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
render() {
|
||||
const {
|
||||
selectedResource,
|
||||
selectedResourceRows,
|
||||
selectedRoleRows,
|
||||
currentStepId,
|
||||
maxEnabledStep,
|
||||
} = this.state;
|
||||
const { onClose, roles, i18n } = this.props;
|
||||
const userSearchColumns = [
|
||||
{
|
||||
name: i18n._(t`Username`),
|
||||
key: 'username__icontains',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
name: i18n._(t`First Name`),
|
||||
key: 'first_name__icontains',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Last Name`),
|
||||
key: 'last_name__icontains',
|
||||
},
|
||||
];
|
||||
const userSortColumns = [
|
||||
{
|
||||
name: i18n._(t`Username`),
|
||||
key: 'username',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`First Name`),
|
||||
key: 'first_name',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Last Name`),
|
||||
key: 'last_name',
|
||||
},
|
||||
];
|
||||
const teamSearchColumns = [
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
key: 'name',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Created By (Username)`),
|
||||
key: 'created_by__username',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Modified By (Username)`),
|
||||
key: 'modified_by__username',
|
||||
},
|
||||
];
|
||||
|
||||
// Object roles can be user only, so we remove them when
|
||||
// showing role choices for team access
|
||||
const selectableRoles = { ...roles };
|
||||
if (selectedResource === 'teams') {
|
||||
Object.keys(roles).forEach(key => {
|
||||
if (selectableRoles[key].user_only) {
|
||||
delete selectableRoles[key];
|
||||
}
|
||||
});
|
||||
}
|
||||
const teamSortColumns = [
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
key: 'name',
|
||||
},
|
||||
];
|
||||
|
||||
const userSearchColumns = [
|
||||
{
|
||||
name: i18n._(t`Username`),
|
||||
key: 'username__icontains',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
name: i18n._(t`First Name`),
|
||||
key: 'first_name__icontains',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Last Name`),
|
||||
key: 'last_name__icontains',
|
||||
},
|
||||
];
|
||||
let wizardTitle = '';
|
||||
|
||||
const userSortColumns = [
|
||||
{
|
||||
name: i18n._(t`Username`),
|
||||
key: 'username',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`First Name`),
|
||||
key: 'first_name',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Last Name`),
|
||||
key: 'last_name',
|
||||
},
|
||||
];
|
||||
switch (selectedResource) {
|
||||
case 'users':
|
||||
wizardTitle = i18n._(t`Add User Roles`);
|
||||
break;
|
||||
case 'teams':
|
||||
wizardTitle = i18n._(t`Add Team Roles`);
|
||||
break;
|
||||
default:
|
||||
wizardTitle = i18n._(t`Add Roles`);
|
||||
}
|
||||
|
||||
const teamSearchColumns = [
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
key: 'name',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Created By (Username)`),
|
||||
key: 'created_by__username',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Modified By (Username)`),
|
||||
key: 'modified_by__username',
|
||||
},
|
||||
];
|
||||
|
||||
const teamSortColumns = [
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
key: 'name',
|
||||
},
|
||||
];
|
||||
|
||||
let wizardTitle = '';
|
||||
|
||||
switch (selectedResource) {
|
||||
case 'users':
|
||||
wizardTitle = i18n._(t`Add User Roles`);
|
||||
break;
|
||||
case 'teams':
|
||||
wizardTitle = i18n._(t`Add Team Roles`);
|
||||
break;
|
||||
default:
|
||||
wizardTitle = i18n._(t`Add Roles`);
|
||||
}
|
||||
|
||||
const steps = [
|
||||
{
|
||||
id: 1,
|
||||
name: i18n._(t`Select a Resource Type`),
|
||||
component: (
|
||||
<div style={{ display: 'flex', flexWrap: 'wrap' }}>
|
||||
<div style={{ width: '100%', marginBottom: '10px' }}>
|
||||
{i18n._(
|
||||
t`Choose the type of resource that will be receiving new roles. For example, if you'd like to add new roles to a set of users please choose Users and click Next. You'll be able to select the specific resources in the next step.`
|
||||
)}
|
||||
</div>
|
||||
<SelectableCard
|
||||
isSelected={selectedResource === 'users'}
|
||||
label={i18n._(t`Users`)}
|
||||
dataCy="add-role-users"
|
||||
onClick={() => this.handleResourceSelect('users')}
|
||||
/>
|
||||
const steps = [
|
||||
{
|
||||
id: 1,
|
||||
name: i18n._(t`Select a Resource Type`),
|
||||
component: (
|
||||
<div style={{ display: 'flex', flexWrap: 'wrap' }}>
|
||||
<div style={{ width: '100%', marginBottom: '10px' }}>
|
||||
{i18n._(
|
||||
t`Choose the type of resource that will be receiving new roles. For example, if you'd like to add new roles to a set of users please choose Users and click Next. You'll be able to select the specific resources in the next step.`
|
||||
)}
|
||||
</div>
|
||||
<SelectableCard
|
||||
isSelected={selectedResource === 'users'}
|
||||
label={i18n._(t`Users`)}
|
||||
ariaLabel={i18n._(t`Users`)}
|
||||
dataCy="add-role-users"
|
||||
onClick={() => handleResourceSelect('users')}
|
||||
/>
|
||||
{resource?.type === 'credential' && !resource?.organization ? null : (
|
||||
<SelectableCard
|
||||
isSelected={selectedResource === 'teams'}
|
||||
label={i18n._(t`Teams`)}
|
||||
ariaLabel={i18n._(t`Teams`)}
|
||||
dataCy="add-role-teams"
|
||||
onClick={() => this.handleResourceSelect('teams')}
|
||||
onClick={() => handleResourceSelect('teams')}
|
||||
/>
|
||||
</div>
|
||||
),
|
||||
enableNext: selectedResource !== null,
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: i18n._(t`Select Items from List`),
|
||||
component: (
|
||||
<Fragment>
|
||||
{selectedResource === 'users' && (
|
||||
<SelectResourceStep
|
||||
searchColumns={userSearchColumns}
|
||||
sortColumns={userSortColumns}
|
||||
displayKey="username"
|
||||
onRowClick={this.handleResourceCheckboxClick}
|
||||
fetchItems={readUsers}
|
||||
fetchOptions={readUsersOptions}
|
||||
selectedLabel={i18n._(t`Selected`)}
|
||||
selectedResourceRows={selectedResourceRows}
|
||||
sortedColumnKey="username"
|
||||
/>
|
||||
)}
|
||||
{selectedResource === 'teams' && (
|
||||
<SelectResourceStep
|
||||
searchColumns={teamSearchColumns}
|
||||
sortColumns={teamSortColumns}
|
||||
onRowClick={this.handleResourceCheckboxClick}
|
||||
fetchItems={readTeams}
|
||||
fetchOptions={readTeamsOptions}
|
||||
selectedLabel={i18n._(t`Selected`)}
|
||||
selectedResourceRows={selectedResourceRows}
|
||||
/>
|
||||
)}
|
||||
</Fragment>
|
||||
),
|
||||
enableNext: selectedResourceRows.length > 0,
|
||||
canJumpTo: maxEnabledStep >= 2,
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: i18n._(t`Select Roles to Apply`),
|
||||
component: (
|
||||
<SelectRoleStep
|
||||
onRolesClick={this.handleRoleCheckboxClick}
|
||||
roles={selectableRoles}
|
||||
selectedListKey={selectedResource === 'users' ? 'username' : 'name'}
|
||||
selectedListLabel={i18n._(t`Selected`)}
|
||||
selectedResourceRows={selectedResourceRows}
|
||||
selectedRoleRows={selectedRoleRows}
|
||||
/>
|
||||
),
|
||||
nextButtonText: i18n._(t`Save`),
|
||||
enableNext: selectedRoleRows.length > 0,
|
||||
canJumpTo: maxEnabledStep >= 3,
|
||||
},
|
||||
];
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
enableNext: selectedResource !== null,
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: i18n._(t`Select Items from List`),
|
||||
component: (
|
||||
<Fragment>
|
||||
{selectedResource === 'users' && (
|
||||
<SelectResourceStep
|
||||
searchColumns={userSearchColumns}
|
||||
sortColumns={userSortColumns}
|
||||
displayKey="username"
|
||||
onRowClick={handleResourceCheckboxClick}
|
||||
fetchItems={readUsers}
|
||||
fetchOptions={readUsersOptions}
|
||||
selectedLabel={i18n._(t`Selected`)}
|
||||
selectedResourceRows={selectedResourceRows}
|
||||
sortedColumnKey="username"
|
||||
/>
|
||||
)}
|
||||
{selectedResource === 'teams' && (
|
||||
<SelectResourceStep
|
||||
searchColumns={teamSearchColumns}
|
||||
sortColumns={teamSortColumns}
|
||||
onRowClick={handleResourceCheckboxClick}
|
||||
fetchItems={readTeams}
|
||||
fetchOptions={readTeamsOptions}
|
||||
selectedLabel={i18n._(t`Selected`)}
|
||||
selectedResourceRows={selectedResourceRows}
|
||||
/>
|
||||
)}
|
||||
</Fragment>
|
||||
),
|
||||
enableNext: selectedResourceRows.length > 0,
|
||||
canJumpTo: maxEnabledStep >= 2,
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: i18n._(t`Select Roles to Apply`),
|
||||
component: (
|
||||
<SelectRoleStep
|
||||
onRolesClick={handleRoleCheckboxClick}
|
||||
roles={selectableRoles}
|
||||
selectedListKey={selectedResource === 'users' ? 'username' : 'name'}
|
||||
selectedListLabel={i18n._(t`Selected`)}
|
||||
selectedResourceRows={selectedResourceRows}
|
||||
selectedRoleRows={selectedRoleRows}
|
||||
/>
|
||||
),
|
||||
nextButtonText: i18n._(t`Save`),
|
||||
enableNext: selectedRoleRows.length > 0,
|
||||
canJumpTo: maxEnabledStep >= 3,
|
||||
},
|
||||
];
|
||||
|
||||
const currentStep = steps.find(step => step.id === currentStepId);
|
||||
const currentStep = steps.find(step => step.id === currentStepId);
|
||||
|
||||
// TODO: somehow internationalize steps and currentStep.nextButtonText
|
||||
return (
|
||||
<Wizard
|
||||
style={{ overflow: 'scroll' }}
|
||||
isOpen
|
||||
onNext={this.handleWizardNext}
|
||||
onClose={onClose}
|
||||
onSave={this.handleWizardSave}
|
||||
onGoToStep={this.handleWizardGoToStep}
|
||||
steps={steps}
|
||||
title={wizardTitle}
|
||||
nextButtonText={currentStep.nextButtonText || undefined}
|
||||
backButtonText={i18n._(t`Back`)}
|
||||
cancelButtonText={i18n._(t`Cancel`)}
|
||||
/>
|
||||
);
|
||||
}
|
||||
// TODO: somehow internationalize steps and currentStep.nextButtonText
|
||||
return (
|
||||
<Wizard
|
||||
style={{ overflow: 'scroll' }}
|
||||
isOpen
|
||||
onNext={handleWizardNext}
|
||||
onClose={onClose}
|
||||
onSave={handleWizardSave}
|
||||
onGoToStep={step => handleWizardGoToStep(step)}
|
||||
steps={steps}
|
||||
title={wizardTitle}
|
||||
nextButtonText={currentStep.nextButtonText || undefined}
|
||||
backButtonText={i18n._(t`Back`)}
|
||||
cancelButtonText={i18n._(t`Cancel`)}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
AddResourceRole.propTypes = {
|
||||
onClose: PropTypes.func.isRequired,
|
||||
onSave: PropTypes.func.isRequired,
|
||||
roles: PropTypes.shape(),
|
||||
resource: PropTypes.shape(),
|
||||
};
|
||||
|
||||
AddResourceRole.defaultProps = {
|
||||
roles: {},
|
||||
resource: {},
|
||||
};
|
||||
|
||||
export { AddResourceRole as _AddResourceRole };
|
||||
|
||||
@@ -1,22 +1,46 @@
|
||||
/* eslint-disable react/jsx-pascal-case */
|
||||
import React from 'react';
|
||||
import { shallow } from 'enzyme';
|
||||
import { mountWithContexts } from '../../../testUtils/enzymeHelpers';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
|
||||
import {
|
||||
mountWithContexts,
|
||||
waitForElement,
|
||||
} from '../../../testUtils/enzymeHelpers';
|
||||
import AddResourceRole, { _AddResourceRole } from './AddResourceRole';
|
||||
import { TeamsAPI, UsersAPI } from '../../api';
|
||||
|
||||
jest.mock('../../api');
|
||||
jest.mock('../../api/models/Teams');
|
||||
jest.mock('../../api/models/Users');
|
||||
|
||||
// TODO: Once error handling is functional in
|
||||
// this component write tests for it
|
||||
|
||||
describe('<_AddResourceRole />', () => {
|
||||
UsersAPI.read.mockResolvedValue({
|
||||
data: {
|
||||
count: 2,
|
||||
results: [
|
||||
{ id: 1, username: 'foo' },
|
||||
{ id: 2, username: 'bar' },
|
||||
{ id: 1, username: 'foo', url: '' },
|
||||
{ id: 2, username: 'bar', url: '' },
|
||||
],
|
||||
},
|
||||
});
|
||||
UsersAPI.readOptions.mockResolvedValue({
|
||||
data: { related: {}, actions: { GET: {} } },
|
||||
});
|
||||
TeamsAPI.read.mockResolvedValue({
|
||||
data: {
|
||||
count: 2,
|
||||
results: [
|
||||
{ id: 1, name: 'Team foo', url: '' },
|
||||
{ id: 2, name: 'Team bar', url: '' },
|
||||
],
|
||||
},
|
||||
});
|
||||
TeamsAPI.readOptions.mockResolvedValue({
|
||||
data: { related: {}, actions: { GET: {} } },
|
||||
});
|
||||
const roles = {
|
||||
admin_role: {
|
||||
description: 'Can manage all aspects of the organization',
|
||||
@@ -39,186 +63,180 @@ describe('<_AddResourceRole />', () => {
|
||||
/>
|
||||
);
|
||||
});
|
||||
test('handleRoleCheckboxClick properly updates state', () => {
|
||||
const wrapper = shallow(
|
||||
<_AddResourceRole
|
||||
onClose={() => {}}
|
||||
onSave={() => {}}
|
||||
roles={roles}
|
||||
i18n={{ _: val => val.toString() }}
|
||||
/>
|
||||
);
|
||||
wrapper.setState({
|
||||
selectedRoleRows: [
|
||||
{
|
||||
description: 'Can manage all aspects of the organization',
|
||||
name: 'Admin',
|
||||
id: 1,
|
||||
},
|
||||
],
|
||||
test('should save properly', async () => {
|
||||
let wrapper;
|
||||
act(() => {
|
||||
wrapper = mountWithContexts(
|
||||
<AddResourceRole onClose={() => {}} onSave={() => {}} roles={roles} />,
|
||||
{ context: { network: { handleHttpError: () => {} } } }
|
||||
);
|
||||
});
|
||||
wrapper.instance().handleRoleCheckboxClick({
|
||||
description: 'Can manage all aspects of the organization',
|
||||
name: 'Admin',
|
||||
id: 1,
|
||||
});
|
||||
expect(wrapper.state('selectedRoleRows')).toEqual([]);
|
||||
wrapper.instance().handleRoleCheckboxClick({
|
||||
description: 'Can manage all aspects of the organization',
|
||||
name: 'Admin',
|
||||
id: 1,
|
||||
});
|
||||
expect(wrapper.state('selectedRoleRows')).toEqual([
|
||||
{
|
||||
description: 'Can manage all aspects of the organization',
|
||||
name: 'Admin',
|
||||
id: 1,
|
||||
},
|
||||
]);
|
||||
});
|
||||
test('handleResourceCheckboxClick properly updates state', () => {
|
||||
const wrapper = shallow(
|
||||
<_AddResourceRole
|
||||
onClose={() => {}}
|
||||
onSave={() => {}}
|
||||
roles={roles}
|
||||
i18n={{ _: val => val.toString() }}
|
||||
/>
|
||||
);
|
||||
wrapper.setState({
|
||||
selectedResourceRows: [
|
||||
{
|
||||
id: 1,
|
||||
username: 'foobar',
|
||||
},
|
||||
],
|
||||
});
|
||||
wrapper.instance().handleResourceCheckboxClick({
|
||||
id: 1,
|
||||
username: 'foobar',
|
||||
});
|
||||
expect(wrapper.state('selectedResourceRows')).toEqual([]);
|
||||
wrapper.instance().handleResourceCheckboxClick({
|
||||
id: 1,
|
||||
username: 'foobar',
|
||||
});
|
||||
expect(wrapper.state('selectedResourceRows')).toEqual([
|
||||
{
|
||||
id: 1,
|
||||
username: 'foobar',
|
||||
},
|
||||
]);
|
||||
});
|
||||
test('clicking user/team cards updates state', () => {
|
||||
const spy = jest.spyOn(_AddResourceRole.prototype, 'handleResourceSelect');
|
||||
const wrapper = mountWithContexts(
|
||||
<AddResourceRole onClose={() => {}} onSave={() => {}} roles={roles} />,
|
||||
{ context: { network: { handleHttpError: () => {} } } }
|
||||
).find('AddResourceRole');
|
||||
wrapper.update();
|
||||
|
||||
// Step 1
|
||||
const selectableCardWrapper = wrapper.find('SelectableCard');
|
||||
expect(selectableCardWrapper.length).toBe(2);
|
||||
selectableCardWrapper.first().simulate('click');
|
||||
expect(spy).toHaveBeenCalledWith('users');
|
||||
expect(wrapper.state('selectedResource')).toBe('users');
|
||||
selectableCardWrapper.at(1).simulate('click');
|
||||
expect(spy).toHaveBeenCalledWith('teams');
|
||||
expect(wrapper.state('selectedResource')).toBe('teams');
|
||||
act(() => wrapper.find('SelectableCard[label="Users"]').prop('onClick')());
|
||||
wrapper.update();
|
||||
await act(async () =>
|
||||
wrapper.find('Button[type="submit"]').prop('onClick')()
|
||||
);
|
||||
wrapper.update();
|
||||
|
||||
// Step 2
|
||||
await waitForElement(wrapper, 'EmptyStateBody', el => el.length === 0);
|
||||
act(() =>
|
||||
wrapper.find('DataListCheck[name="foo"]').invoke('onChange')(true)
|
||||
);
|
||||
wrapper.update();
|
||||
expect(wrapper.find('DataListCheck[name="foo"]').prop('checked')).toBe(
|
||||
true
|
||||
);
|
||||
act(() => wrapper.find('Button[type="submit"]').prop('onClick')());
|
||||
wrapper.update();
|
||||
|
||||
// Step 3
|
||||
act(() =>
|
||||
wrapper.find('Checkbox[aria-label="Admin"]').invoke('onChange')(true)
|
||||
);
|
||||
wrapper.update();
|
||||
expect(wrapper.find('Checkbox[aria-label="Admin"]').prop('isChecked')).toBe(
|
||||
true
|
||||
);
|
||||
|
||||
// Save
|
||||
await act(async () =>
|
||||
wrapper.find('Button[type="submit"]').prop('onClick')()
|
||||
);
|
||||
expect(UsersAPI.associateRole).toBeCalledWith(1, 1);
|
||||
});
|
||||
test('handleResourceSelect clears out selected lists and sets selectedResource', () => {
|
||||
const wrapper = shallow(
|
||||
<_AddResourceRole
|
||||
|
||||
test('should successfuly click user/team cards', async () => {
|
||||
let wrapper;
|
||||
act(() => {
|
||||
wrapper = mountWithContexts(
|
||||
<AddResourceRole onClose={() => {}} onSave={() => {}} roles={roles} />,
|
||||
{ context: { network: { handleHttpError: () => {} } } }
|
||||
);
|
||||
});
|
||||
wrapper.update();
|
||||
|
||||
const selectableCardWrapper = wrapper.find('SelectableCard');
|
||||
expect(selectableCardWrapper.length).toBe(2);
|
||||
act(() => wrapper.find('SelectableCard[label="Users"]').prop('onClick')());
|
||||
wrapper.update();
|
||||
|
||||
await waitForElement(
|
||||
wrapper,
|
||||
'SelectableCard[label="Users"]',
|
||||
el => el.prop('isSelected') === true
|
||||
);
|
||||
act(() => wrapper.find('SelectableCard[label="Teams"]').prop('onClick')());
|
||||
wrapper.update();
|
||||
|
||||
await waitForElement(
|
||||
wrapper,
|
||||
'SelectableCard[label="Teams"]',
|
||||
el => el.prop('isSelected') === true
|
||||
);
|
||||
});
|
||||
|
||||
test('should reset values with resource type changes', async () => {
|
||||
let wrapper;
|
||||
act(() => {
|
||||
wrapper = mountWithContexts(
|
||||
<AddResourceRole onClose={() => {}} onSave={() => {}} roles={roles} />,
|
||||
{ context: { network: { handleHttpError: () => {} } } }
|
||||
);
|
||||
});
|
||||
wrapper.update();
|
||||
|
||||
// Step 1
|
||||
const selectableCardWrapper = wrapper.find('SelectableCard');
|
||||
expect(selectableCardWrapper.length).toBe(2);
|
||||
act(() => wrapper.find('SelectableCard[label="Users"]').prop('onClick')());
|
||||
wrapper.update();
|
||||
await act(async () =>
|
||||
wrapper.find('Button[type="submit"]').prop('onClick')()
|
||||
);
|
||||
wrapper.update();
|
||||
|
||||
// Step 2
|
||||
await waitForElement(wrapper, 'EmptyStateBody', el => el.length === 0);
|
||||
act(() =>
|
||||
wrapper.find('DataListCheck[name="foo"]').invoke('onChange')(true)
|
||||
);
|
||||
wrapper.update();
|
||||
expect(wrapper.find('DataListCheck[name="foo"]').prop('checked')).toBe(
|
||||
true
|
||||
);
|
||||
act(() => wrapper.find('Button[type="submit"]').prop('onClick')());
|
||||
wrapper.update();
|
||||
|
||||
// Step 3
|
||||
act(() =>
|
||||
wrapper.find('Checkbox[aria-label="Admin"]').invoke('onChange')(true)
|
||||
);
|
||||
wrapper.update();
|
||||
expect(wrapper.find('Checkbox[aria-label="Admin"]').prop('isChecked')).toBe(
|
||||
true
|
||||
);
|
||||
|
||||
// Go back to step 1
|
||||
act(() => {
|
||||
wrapper
|
||||
.find('WizardNavItem[content="Select a Resource Type"]')
|
||||
.find('button')
|
||||
.prop('onClick')({ id: 1 });
|
||||
});
|
||||
wrapper.update();
|
||||
expect(
|
||||
wrapper
|
||||
.find('WizardNavItem[content="Select a Resource Type"]')
|
||||
.prop('isCurrent')
|
||||
).toBe(true);
|
||||
|
||||
// Go back to step 1 and this time select teams. Doing so should clear following steps
|
||||
act(() => wrapper.find('SelectableCard[label="Teams"]').prop('onClick')());
|
||||
wrapper.update();
|
||||
await act(async () =>
|
||||
wrapper.find('Button[type="submit"]').prop('onClick')()
|
||||
);
|
||||
wrapper.update();
|
||||
|
||||
// Make sure no teams have been selected
|
||||
await waitForElement(wrapper, 'EmptyStateBody', el => el.length === 0);
|
||||
wrapper
|
||||
.find('DataListCheck')
|
||||
.map(item => expect(item.prop('checked')).toBe(false));
|
||||
act(() => wrapper.find('Button[type="submit"]').prop('onClick')());
|
||||
wrapper.update();
|
||||
|
||||
// Make sure that no roles have been selected
|
||||
wrapper
|
||||
.find('Checkbox')
|
||||
.map(card => expect(card.prop('isChecked')).toBe(false));
|
||||
|
||||
// Make sure the save button is disabled
|
||||
expect(wrapper.find('Button[type="submit"]').prop('isDisabled')).toBe(true);
|
||||
});
|
||||
|
||||
test('should not display team as a choice in case credential does not have organization', () => {
|
||||
const wrapper = mountWithContexts(
|
||||
<AddResourceRole
|
||||
onClose={() => {}}
|
||||
onSave={() => {}}
|
||||
roles={roles}
|
||||
i18n={{ _: val => val.toString() }}
|
||||
/>
|
||||
);
|
||||
wrapper.setState({
|
||||
selectedResource: 'teams',
|
||||
selectedResourceRows: [
|
||||
{
|
||||
id: 1,
|
||||
username: 'foobar',
|
||||
},
|
||||
],
|
||||
selectedRoleRows: [
|
||||
{
|
||||
description: 'Can manage all aspects of the organization',
|
||||
id: 1,
|
||||
name: 'Admin',
|
||||
},
|
||||
],
|
||||
});
|
||||
wrapper.instance().handleResourceSelect('users');
|
||||
expect(wrapper.state()).toEqual({
|
||||
selectedResource: 'users',
|
||||
selectedResourceRows: [],
|
||||
selectedRoleRows: [],
|
||||
currentStepId: 1,
|
||||
maxEnabledStep: 1,
|
||||
});
|
||||
wrapper.instance().handleResourceSelect('teams');
|
||||
expect(wrapper.state()).toEqual({
|
||||
selectedResource: 'teams',
|
||||
selectedResourceRows: [],
|
||||
selectedRoleRows: [],
|
||||
currentStepId: 1,
|
||||
maxEnabledStep: 1,
|
||||
});
|
||||
});
|
||||
test('handleWizardSave makes correct api calls, calls onSave when done', async () => {
|
||||
const handleSave = jest.fn();
|
||||
const wrapper = mountWithContexts(
|
||||
<AddResourceRole onClose={() => {}} onSave={handleSave} roles={roles} />,
|
||||
resource={{ type: 'credential', organization: null }}
|
||||
/>,
|
||||
{ context: { network: { handleHttpError: () => {} } } }
|
||||
).find('AddResourceRole');
|
||||
wrapper.setState({
|
||||
selectedResource: 'users',
|
||||
selectedResourceRows: [
|
||||
{
|
||||
id: 1,
|
||||
username: 'foobar',
|
||||
},
|
||||
],
|
||||
selectedRoleRows: [
|
||||
{
|
||||
description: 'Can manage all aspects of the organization',
|
||||
id: 1,
|
||||
name: 'Admin',
|
||||
},
|
||||
{
|
||||
description: 'May run any executable resources in the organization',
|
||||
id: 2,
|
||||
name: 'Execute',
|
||||
},
|
||||
],
|
||||
});
|
||||
await wrapper.instance().handleWizardSave();
|
||||
expect(UsersAPI.associateRole).toHaveBeenCalledTimes(2);
|
||||
expect(handleSave).toHaveBeenCalled();
|
||||
wrapper.setState({
|
||||
selectedResource: 'teams',
|
||||
selectedResourceRows: [
|
||||
{
|
||||
id: 1,
|
||||
name: 'foobar',
|
||||
},
|
||||
],
|
||||
selectedRoleRows: [
|
||||
{
|
||||
description: 'Can manage all aspects of the organization',
|
||||
id: 1,
|
||||
name: 'Admin',
|
||||
},
|
||||
{
|
||||
description: 'May run any executable resources in the organization',
|
||||
id: 2,
|
||||
name: 'Execute',
|
||||
},
|
||||
],
|
||||
});
|
||||
await wrapper.instance().handleWizardSave();
|
||||
expect(TeamsAPI.associateRole).toHaveBeenCalledTimes(2);
|
||||
expect(handleSave).toHaveBeenCalled();
|
||||
);
|
||||
|
||||
expect(wrapper.find('SelectableCard').length).toBe(1);
|
||||
wrapper.find('SelectableCard[label="Users"]').simulate('click');
|
||||
wrapper.update();
|
||||
expect(
|
||||
wrapper.find('SelectableCard[label="Users"]').prop('isSelected')
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -7,59 +7,55 @@ import { t } from '@lingui/macro';
|
||||
import CheckboxCard from './CheckboxCard';
|
||||
import SelectedList from '../SelectedList';
|
||||
|
||||
class RolesStep extends React.Component {
|
||||
render() {
|
||||
const {
|
||||
onRolesClick,
|
||||
roles,
|
||||
selectedListKey,
|
||||
selectedListLabel,
|
||||
selectedResourceRows,
|
||||
selectedRoleRows,
|
||||
i18n,
|
||||
} = this.props;
|
||||
|
||||
return (
|
||||
<Fragment>
|
||||
<div>
|
||||
{i18n._(
|
||||
t`Choose roles to apply to the selected resources. Note that all selected roles will be applied to all selected resources.`
|
||||
)}
|
||||
</div>
|
||||
<div>
|
||||
{selectedResourceRows.length > 0 && (
|
||||
<SelectedList
|
||||
displayKey={selectedListKey}
|
||||
isReadOnly
|
||||
label={selectedListLabel || i18n._(t`Selected`)}
|
||||
selected={selectedResourceRows}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<div
|
||||
style={{
|
||||
display: 'grid',
|
||||
gridTemplateColumns: '1fr 1fr',
|
||||
gap: '20px 20px',
|
||||
marginTop: '20px',
|
||||
}}
|
||||
>
|
||||
{Object.keys(roles).map(role => (
|
||||
<CheckboxCard
|
||||
description={roles[role].description}
|
||||
itemId={roles[role].id}
|
||||
isSelected={selectedRoleRows.some(
|
||||
item => item.id === roles[role].id
|
||||
)}
|
||||
key={roles[role].id}
|
||||
name={roles[role].name}
|
||||
onSelect={() => onRolesClick(roles[role])}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</Fragment>
|
||||
);
|
||||
}
|
||||
function RolesStep({
|
||||
onRolesClick,
|
||||
roles,
|
||||
selectedListKey,
|
||||
selectedListLabel,
|
||||
selectedResourceRows,
|
||||
selectedRoleRows,
|
||||
i18n,
|
||||
}) {
|
||||
return (
|
||||
<Fragment>
|
||||
<div>
|
||||
{i18n._(
|
||||
t`Choose roles to apply to the selected resources. Note that all selected roles will be applied to all selected resources.`
|
||||
)}
|
||||
</div>
|
||||
<div>
|
||||
{selectedResourceRows.length > 0 && (
|
||||
<SelectedList
|
||||
displayKey={selectedListKey}
|
||||
isReadOnly
|
||||
label={selectedListLabel || i18n._(t`Selected`)}
|
||||
selected={selectedResourceRows}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<div
|
||||
style={{
|
||||
display: 'grid',
|
||||
gridTemplateColumns: '1fr 1fr',
|
||||
gap: '20px 20px',
|
||||
marginTop: '20px',
|
||||
}}
|
||||
>
|
||||
{Object.keys(roles).map(role => (
|
||||
<CheckboxCard
|
||||
description={roles[role].description}
|
||||
itemId={roles[role].id}
|
||||
isSelected={selectedRoleRows.some(
|
||||
item => item.id === roles[role].id
|
||||
)}
|
||||
key={roles[role].id}
|
||||
name={roles[role].name}
|
||||
onSelect={() => onRolesClick(roles[role])}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</Fragment>
|
||||
);
|
||||
}
|
||||
|
||||
RolesStep.propTypes = {
|
||||
|
||||
@@ -12,52 +12,44 @@ import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { FormSelect, FormSelectOption } from '@patternfly/react-core';
|
||||
|
||||
class AnsibleSelect extends React.Component {
|
||||
constructor(props) {
|
||||
super(props);
|
||||
this.onSelectChange = this.onSelectChange.bind(this);
|
||||
}
|
||||
|
||||
onSelectChange(val, event) {
|
||||
const { onChange, name } = this.props;
|
||||
function AnsibleSelect({
|
||||
id,
|
||||
data,
|
||||
i18n,
|
||||
isValid,
|
||||
onBlur,
|
||||
value,
|
||||
className,
|
||||
isDisabled,
|
||||
onChange,
|
||||
name,
|
||||
}) {
|
||||
const onSelectChange = (val, event) => {
|
||||
event.target.name = name;
|
||||
onChange(event, val);
|
||||
}
|
||||
};
|
||||
|
||||
render() {
|
||||
const {
|
||||
id,
|
||||
data,
|
||||
i18n,
|
||||
isValid,
|
||||
onBlur,
|
||||
value,
|
||||
className,
|
||||
isDisabled,
|
||||
} = this.props;
|
||||
|
||||
return (
|
||||
<FormSelect
|
||||
id={id}
|
||||
value={value}
|
||||
onChange={this.onSelectChange}
|
||||
onBlur={onBlur}
|
||||
aria-label={i18n._(t`Select Input`)}
|
||||
validated={isValid ? 'default' : 'error'}
|
||||
className={className}
|
||||
isDisabled={isDisabled}
|
||||
>
|
||||
{data.map(option => (
|
||||
<FormSelectOption
|
||||
key={option.key}
|
||||
value={option.value}
|
||||
label={option.label}
|
||||
isDisabled={option.isDisabled}
|
||||
/>
|
||||
))}
|
||||
</FormSelect>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<FormSelect
|
||||
id={id}
|
||||
value={value}
|
||||
onChange={onSelectChange}
|
||||
onBlur={onBlur}
|
||||
aria-label={i18n._(t`Select Input`)}
|
||||
validated={isValid ? 'default' : 'error'}
|
||||
className={className}
|
||||
isDisabled={isDisabled}
|
||||
>
|
||||
{data.map(option => (
|
||||
<FormSelectOption
|
||||
key={option.key}
|
||||
value={option.value}
|
||||
label={option.label}
|
||||
isDisabled={option.isDisabled}
|
||||
/>
|
||||
))}
|
||||
</FormSelect>
|
||||
);
|
||||
}
|
||||
|
||||
const Option = shape({
|
||||
|
||||
@@ -1,21 +1,22 @@
|
||||
import React from 'react';
|
||||
import { mountWithContexts } from '../../../testUtils/enzymeHelpers';
|
||||
import AnsibleSelect, { _AnsibleSelect } from './AnsibleSelect';
|
||||
import AnsibleSelect from './AnsibleSelect';
|
||||
|
||||
const mockData = [
|
||||
{
|
||||
key: 'baz',
|
||||
label: 'Baz',
|
||||
value: '/venv/baz/',
|
||||
value: '/var/lib/awx/venv/baz/',
|
||||
},
|
||||
{
|
||||
key: 'default',
|
||||
label: 'Default',
|
||||
value: '/venv/ansible/',
|
||||
value: '/var/lib/awx/venv/ansible/',
|
||||
},
|
||||
];
|
||||
|
||||
describe('<AnsibleSelect />', () => {
|
||||
const onChange = jest.fn();
|
||||
test('initially renders succesfully', async () => {
|
||||
mountWithContexts(
|
||||
<AnsibleSelect
|
||||
@@ -29,19 +30,18 @@ describe('<AnsibleSelect />', () => {
|
||||
});
|
||||
|
||||
test('calls "onSelectChange" on dropdown select change', () => {
|
||||
const spy = jest.spyOn(_AnsibleSelect.prototype, 'onSelectChange');
|
||||
const wrapper = mountWithContexts(
|
||||
<AnsibleSelect
|
||||
id="bar"
|
||||
value="foo"
|
||||
name="bar"
|
||||
onChange={() => {}}
|
||||
onChange={onChange}
|
||||
data={mockData}
|
||||
/>
|
||||
);
|
||||
expect(spy).not.toHaveBeenCalled();
|
||||
expect(onChange).not.toHaveBeenCalled();
|
||||
wrapper.find('select').simulate('change');
|
||||
expect(spy).toHaveBeenCalled();
|
||||
expect(onChange).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('Returns correct select options', () => {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import React, { useEffect, useState, useCallback } from 'react';
|
||||
import React, { useEffect, useState, useCallback, useRef } from 'react';
|
||||
import { useHistory, useLocation, withRouter } from 'react-router-dom';
|
||||
import {
|
||||
Button,
|
||||
Nav,
|
||||
NavList,
|
||||
Page,
|
||||
@@ -13,6 +14,8 @@ import styled from 'styled-components';
|
||||
|
||||
import { ConfigAPI, MeAPI, RootAPI } from '../../api';
|
||||
import { ConfigProvider } from '../../contexts/Config';
|
||||
import { SESSION_TIMEOUT_KEY } from '../../constants';
|
||||
import { isAuthenticated } from '../../util/auth';
|
||||
import About from '../About';
|
||||
import AlertModal from '../AlertModal';
|
||||
import ErrorDetail from '../ErrorDetail';
|
||||
@@ -20,6 +23,17 @@ import BrandLogo from './BrandLogo';
|
||||
import NavExpandableGroup from './NavExpandableGroup';
|
||||
import PageHeaderToolbar from './PageHeaderToolbar';
|
||||
|
||||
// The maximum supported timeout for setTimeout(), in milliseconds,
|
||||
// is the highest number you can represent as a signed 32bit
|
||||
// integer (approximately 25 days)
|
||||
const MAX_TIMEOUT = 2 ** (32 - 1) - 1;
|
||||
|
||||
// The number of seconds the session timeout warning is displayed
|
||||
// before the user is logged out. Increasing this number (up to
|
||||
// the total session time, which is 1800s by default) will cause
|
||||
// the session timeout warning to display sooner.
|
||||
const SESSION_WARNING_DURATION = 10;
|
||||
|
||||
const PageHeader = styled(PFPageHeader)`
|
||||
& .pf-c-page__header-brand-link {
|
||||
color: inherit;
|
||||
@@ -30,6 +44,45 @@ const PageHeader = styled(PFPageHeader)`
|
||||
}
|
||||
`;
|
||||
|
||||
/**
|
||||
* The useStorage hook integrates with the browser's localStorage api.
|
||||
* It accepts a storage key as its only argument and returns a state
|
||||
* variable and setter function for that state variable.
|
||||
*
|
||||
* This utility behaves much like the standard useState hook with some
|
||||
* key differences:
|
||||
* 1. You don't pass it an initial value. Instead, the provided key
|
||||
* is used to retrieve the initial value from local storage. If
|
||||
* the key doesn't exist in local storage, null is returned.
|
||||
* 2. Behind the scenes, this hook registers an event listener with
|
||||
* the Web Storage api to establish a two-way binding between the
|
||||
* state variable and its corresponding local storage value. This
|
||||
* means that updates to the state variable with the setter
|
||||
* function will produce a corresponding update to the local
|
||||
* storage value and vice-versa.
|
||||
* 3. When local storage is shared across browser tabs, the data
|
||||
* binding is also shared across browser tabs. This means that
|
||||
* updates to the state variable using the setter function on
|
||||
* one tab will also update the state variable on any other tab
|
||||
* using this hook with the same key and vice-versa.
|
||||
*/
|
||||
function useStorage(key) {
|
||||
const [storageVal, setStorageVal] = useState(
|
||||
window.localStorage.getItem(key)
|
||||
);
|
||||
window.addEventListener('storage', () => {
|
||||
const newVal = window.localStorage.getItem(key);
|
||||
if (newVal !== storageVal) {
|
||||
setStorageVal(newVal);
|
||||
}
|
||||
});
|
||||
const setValue = val => {
|
||||
window.localStorage.setItem(key, val);
|
||||
setStorageVal(val);
|
||||
};
|
||||
return [storageVal, setValue];
|
||||
}
|
||||
|
||||
function AppContainer({ i18n, navRouteConfig = [], children }) {
|
||||
const history = useHistory();
|
||||
const { pathname } = useLocation();
|
||||
@@ -38,14 +91,51 @@ function AppContainer({ i18n, navRouteConfig = [], children }) {
|
||||
const [isAboutModalOpen, setIsAboutModalOpen] = useState(false);
|
||||
const [isReady, setIsReady] = useState(false);
|
||||
|
||||
const sessionTimeoutId = useRef();
|
||||
const sessionIntervalId = useRef();
|
||||
const [sessionTimeout, setSessionTimeout] = useStorage(SESSION_TIMEOUT_KEY);
|
||||
const [timeoutWarning, setTimeoutWarning] = useState(false);
|
||||
const [timeRemaining, setTimeRemaining] = useState(null);
|
||||
|
||||
const handleAboutModalOpen = () => setIsAboutModalOpen(true);
|
||||
const handleAboutModalClose = () => setIsAboutModalOpen(false);
|
||||
const handleConfigErrorClose = () => setConfigError(null);
|
||||
const handleSessionTimeout = () => setTimeoutWarning(true);
|
||||
|
||||
const handleLogout = useCallback(async () => {
|
||||
await RootAPI.logout();
|
||||
history.replace('/login');
|
||||
}, [history]);
|
||||
setSessionTimeout(null);
|
||||
}, [setSessionTimeout]);
|
||||
|
||||
const handleSessionContinue = () => {
|
||||
MeAPI.read();
|
||||
setTimeoutWarning(false);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!isAuthenticated(document.cookie)) history.replace('/login');
|
||||
const calcRemaining = () =>
|
||||
parseInt(sessionTimeout, 10) - new Date().getTime();
|
||||
const updateRemaining = () => setTimeRemaining(calcRemaining());
|
||||
setTimeoutWarning(false);
|
||||
clearTimeout(sessionTimeoutId.current);
|
||||
clearInterval(sessionIntervalId.current);
|
||||
sessionTimeoutId.current = setTimeout(
|
||||
handleSessionTimeout,
|
||||
Math.min(calcRemaining() - SESSION_WARNING_DURATION * 1000, MAX_TIMEOUT)
|
||||
);
|
||||
sessionIntervalId.current = setInterval(updateRemaining, 1000);
|
||||
return () => {
|
||||
clearTimeout(sessionTimeoutId.current);
|
||||
clearInterval(sessionIntervalId.current);
|
||||
};
|
||||
}, [history, sessionTimeout]);
|
||||
|
||||
useEffect(() => {
|
||||
if (timeRemaining !== null && timeRemaining <= 1) {
|
||||
handleLogout();
|
||||
}
|
||||
}, [handleLogout, timeRemaining]);
|
||||
|
||||
useEffect(() => {
|
||||
const loadConfig = async () => {
|
||||
@@ -128,6 +218,31 @@ function AppContainer({ i18n, navRouteConfig = [], children }) {
|
||||
{i18n._(t`Failed to retrieve configuration.`)}
|
||||
<ErrorDetail error={configError} />
|
||||
</AlertModal>
|
||||
<AlertModal
|
||||
title={i18n._(t`Your session is about to expire`)}
|
||||
isOpen={timeoutWarning && sessionTimeout > 0 && timeRemaining !== null}
|
||||
onClose={handleLogout}
|
||||
showClose={false}
|
||||
variant="warning"
|
||||
actions={[
|
||||
<Button
|
||||
key="confirm"
|
||||
variant="primary"
|
||||
onClick={handleSessionContinue}
|
||||
>
|
||||
{i18n._(t`Continue`)}
|
||||
</Button>,
|
||||
<Button key="logout" variant="secondary" onClick={handleLogout}>
|
||||
{i18n._(t`Logout`)}
|
||||
</Button>,
|
||||
]}
|
||||
>
|
||||
{i18n._(
|
||||
t`You will be logged out in ${Number(
|
||||
Math.max(Math.floor(timeRemaining / 1000), 0)
|
||||
)} seconds due to inactivity.`
|
||||
)}
|
||||
</AlertModal>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { Component } from 'react';
|
||||
import React, { useState } from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
@@ -17,129 +17,100 @@ import { QuestionCircleIcon, UserIcon } from '@patternfly/react-icons';
|
||||
const DOCLINK =
|
||||
'https://docs.ansible.com/ansible-tower/latest/html/userguide/index.html';
|
||||
|
||||
class PageHeaderToolbar extends Component {
|
||||
constructor(props) {
|
||||
super(props);
|
||||
this.state = {
|
||||
isHelpOpen: false,
|
||||
isUserOpen: false,
|
||||
};
|
||||
function PageHeaderToolbar({
|
||||
isAboutDisabled,
|
||||
onAboutClick,
|
||||
onLogoutClick,
|
||||
loggedInUser,
|
||||
i18n,
|
||||
}) {
|
||||
const [isHelpOpen, setIsHelpOpen] = useState(false);
|
||||
const [isUserOpen, setIsUserOpen] = useState(false);
|
||||
|
||||
this.handleHelpSelect = this.handleHelpSelect.bind(this);
|
||||
this.handleHelpToggle = this.handleHelpToggle.bind(this);
|
||||
this.handleUserSelect = this.handleUserSelect.bind(this);
|
||||
this.handleUserToggle = this.handleUserToggle.bind(this);
|
||||
}
|
||||
const handleHelpSelect = () => {
|
||||
setIsHelpOpen(!isHelpOpen);
|
||||
};
|
||||
|
||||
handleHelpSelect() {
|
||||
const { isHelpOpen } = this.state;
|
||||
const handleUserSelect = () => {
|
||||
setIsUserOpen(!isUserOpen);
|
||||
};
|
||||
|
||||
this.setState({ isHelpOpen: !isHelpOpen });
|
||||
}
|
||||
|
||||
handleUserSelect() {
|
||||
const { isUserOpen } = this.state;
|
||||
|
||||
this.setState({ isUserOpen: !isUserOpen });
|
||||
}
|
||||
|
||||
handleHelpToggle(isOpen) {
|
||||
this.setState({ isHelpOpen: isOpen });
|
||||
}
|
||||
|
||||
handleUserToggle(isOpen) {
|
||||
this.setState({ isUserOpen: isOpen });
|
||||
}
|
||||
|
||||
render() {
|
||||
const { isHelpOpen, isUserOpen } = this.state;
|
||||
const {
|
||||
isAboutDisabled,
|
||||
onAboutClick,
|
||||
onLogoutClick,
|
||||
loggedInUser,
|
||||
i18n,
|
||||
} = this.props;
|
||||
|
||||
return (
|
||||
<PageHeaderTools>
|
||||
<PageHeaderToolsGroup>
|
||||
<Tooltip position="left" content={<div>{i18n._(t`Info`)}</div>}>
|
||||
<PageHeaderToolsItem>
|
||||
<Dropdown
|
||||
isPlain
|
||||
isOpen={isHelpOpen}
|
||||
position={DropdownPosition.right}
|
||||
onSelect={this.handleHelpSelect}
|
||||
toggle={
|
||||
<DropdownToggle
|
||||
onToggle={this.handleHelpToggle}
|
||||
aria-label={i18n._(t`Info`)}
|
||||
>
|
||||
<QuestionCircleIcon />
|
||||
</DropdownToggle>
|
||||
}
|
||||
dropdownItems={[
|
||||
<DropdownItem key="help" target="_blank" href={DOCLINK}>
|
||||
{i18n._(t`Help`)}
|
||||
</DropdownItem>,
|
||||
<DropdownItem
|
||||
key="about"
|
||||
component="button"
|
||||
isDisabled={isAboutDisabled}
|
||||
onClick={onAboutClick}
|
||||
>
|
||||
{i18n._(t`About`)}
|
||||
</DropdownItem>,
|
||||
]}
|
||||
/>
|
||||
</PageHeaderToolsItem>
|
||||
</Tooltip>
|
||||
<Tooltip position="left" content={<div>{i18n._(t`User`)}</div>}>
|
||||
<PageHeaderToolsItem>
|
||||
<Dropdown
|
||||
id="toolbar-user-dropdown"
|
||||
isPlain
|
||||
isOpen={isUserOpen}
|
||||
position={DropdownPosition.right}
|
||||
onSelect={this.handleUserSelect}
|
||||
toggle={
|
||||
<DropdownToggle onToggle={this.handleUserToggle}>
|
||||
<UserIcon />
|
||||
{loggedInUser && (
|
||||
<span style={{ marginLeft: '10px' }}>
|
||||
{loggedInUser.username}
|
||||
</span>
|
||||
)}
|
||||
</DropdownToggle>
|
||||
}
|
||||
dropdownItems={[
|
||||
<DropdownItem
|
||||
key="user"
|
||||
href={
|
||||
loggedInUser
|
||||
? `/users/${loggedInUser.id}/details`
|
||||
: '/home'
|
||||
}
|
||||
>
|
||||
{i18n._(t`User Details`)}
|
||||
</DropdownItem>,
|
||||
<DropdownItem
|
||||
key="logout"
|
||||
component="button"
|
||||
onClick={onLogoutClick}
|
||||
id="logout-button"
|
||||
>
|
||||
{i18n._(t`Logout`)}
|
||||
</DropdownItem>,
|
||||
]}
|
||||
/>
|
||||
</PageHeaderToolsItem>
|
||||
</Tooltip>
|
||||
</PageHeaderToolsGroup>
|
||||
</PageHeaderTools>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<PageHeaderTools>
|
||||
<PageHeaderToolsGroup>
|
||||
<Tooltip position="left" content={<div>{i18n._(t`Info`)}</div>}>
|
||||
<PageHeaderToolsItem>
|
||||
<Dropdown
|
||||
isPlain
|
||||
isOpen={isHelpOpen}
|
||||
position={DropdownPosition.right}
|
||||
onSelect={handleHelpSelect}
|
||||
toggle={
|
||||
<DropdownToggle
|
||||
onToggle={setIsHelpOpen}
|
||||
aria-label={i18n._(t`Info`)}
|
||||
>
|
||||
<QuestionCircleIcon />
|
||||
</DropdownToggle>
|
||||
}
|
||||
dropdownItems={[
|
||||
<DropdownItem key="help" target="_blank" href={DOCLINK}>
|
||||
{i18n._(t`Help`)}
|
||||
</DropdownItem>,
|
||||
<DropdownItem
|
||||
key="about"
|
||||
component="button"
|
||||
isDisabled={isAboutDisabled}
|
||||
onClick={onAboutClick}
|
||||
>
|
||||
{i18n._(t`About`)}
|
||||
</DropdownItem>,
|
||||
]}
|
||||
/>
|
||||
</PageHeaderToolsItem>
|
||||
</Tooltip>
|
||||
<Tooltip position="left" content={<div>{i18n._(t`User`)}</div>}>
|
||||
<PageHeaderToolsItem>
|
||||
<Dropdown
|
||||
id="toolbar-user-dropdown"
|
||||
isPlain
|
||||
isOpen={isUserOpen}
|
||||
position={DropdownPosition.right}
|
||||
onSelect={handleUserSelect}
|
||||
toggle={
|
||||
<DropdownToggle onToggle={setIsUserOpen}>
|
||||
<UserIcon />
|
||||
{loggedInUser && (
|
||||
<span style={{ marginLeft: '10px' }}>
|
||||
{loggedInUser.username}
|
||||
</span>
|
||||
)}
|
||||
</DropdownToggle>
|
||||
}
|
||||
dropdownItems={[
|
||||
<DropdownItem
|
||||
key="user"
|
||||
href={
|
||||
loggedInUser ? `/users/${loggedInUser.id}/details` : '/home'
|
||||
}
|
||||
>
|
||||
{i18n._(t`User Details`)}
|
||||
</DropdownItem>,
|
||||
<DropdownItem
|
||||
key="logout"
|
||||
component="button"
|
||||
onClick={onLogoutClick}
|
||||
id="logout-button"
|
||||
>
|
||||
{i18n._(t`Logout`)}
|
||||
</DropdownItem>,
|
||||
]}
|
||||
/>
|
||||
</PageHeaderToolsItem>
|
||||
</Tooltip>
|
||||
</PageHeaderToolsGroup>
|
||||
</PageHeaderTools>
|
||||
);
|
||||
}
|
||||
|
||||
PageHeaderToolbar.propTypes = {
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
import React, { Fragment } from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import {
|
||||
PageSection as PFPageSection,
|
||||
PageSectionVariants,
|
||||
Breadcrumb,
|
||||
BreadcrumbItem,
|
||||
BreadcrumbHeading,
|
||||
} from '@patternfly/react-core';
|
||||
import { Link, Route, useRouteMatch } from 'react-router-dom';
|
||||
|
||||
import styled from 'styled-components';
|
||||
|
||||
const PageSection = styled(PFPageSection)`
|
||||
padding-top: 10px;
|
||||
padding-bottom: 10px;
|
||||
`;
|
||||
|
||||
const Breadcrumbs = ({ breadcrumbConfig }) => {
|
||||
const { light } = PageSectionVariants;
|
||||
|
||||
return (
|
||||
<PageSection variant={light}>
|
||||
<Breadcrumb>
|
||||
<Route path="/:path">
|
||||
<Crumb breadcrumbConfig={breadcrumbConfig} />
|
||||
</Route>
|
||||
</Breadcrumb>
|
||||
</PageSection>
|
||||
);
|
||||
};
|
||||
|
||||
const Crumb = ({ breadcrumbConfig, showDivider }) => {
|
||||
const match = useRouteMatch();
|
||||
const crumb = breadcrumbConfig[match.url];
|
||||
|
||||
let crumbElement = (
|
||||
<BreadcrumbItem key={match.url} showDivider={showDivider}>
|
||||
<Link to={match.url}>{crumb}</Link>
|
||||
</BreadcrumbItem>
|
||||
);
|
||||
|
||||
if (match.isExact) {
|
||||
crumbElement = (
|
||||
<BreadcrumbHeading key="breadcrumb-heading" showDivider={showDivider}>
|
||||
{crumb}
|
||||
</BreadcrumbHeading>
|
||||
);
|
||||
}
|
||||
|
||||
if (!crumb) {
|
||||
crumbElement = null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Fragment>
|
||||
{crumbElement}
|
||||
<Route path={`${match.url}/:path`}>
|
||||
<Crumb breadcrumbConfig={breadcrumbConfig} showDivider />
|
||||
</Route>
|
||||
</Fragment>
|
||||
);
|
||||
};
|
||||
|
||||
Breadcrumbs.propTypes = {
|
||||
breadcrumbConfig: PropTypes.objectOf(PropTypes.string).isRequired,
|
||||
};
|
||||
|
||||
Crumb.propTypes = {
|
||||
breadcrumbConfig: PropTypes.objectOf(PropTypes.string).isRequired,
|
||||
};
|
||||
|
||||
export default Breadcrumbs;
|
||||
@@ -1 +0,0 @@
|
||||
export { default } from './Breadcrumbs';
|
||||
@@ -6,6 +6,7 @@ import 'codemirror/mode/javascript/javascript';
|
||||
import 'codemirror/mode/yaml/yaml';
|
||||
import 'codemirror/mode/jinja2/jinja2';
|
||||
import 'codemirror/lib/codemirror.css';
|
||||
import 'codemirror/addon/display/placeholder';
|
||||
|
||||
const LINE_HEIGHT = 24;
|
||||
const PADDING = 12;
|
||||
@@ -55,6 +56,17 @@ const CodeMirror = styled(ReactCodeMirror)`
|
||||
background-color: var(--pf-c-form-control--disabled--BackgroundColor);
|
||||
}
|
||||
`}
|
||||
${props =>
|
||||
props.options &&
|
||||
props.options.placeholder &&
|
||||
`
|
||||
.CodeMirror-empty {
|
||||
pre.CodeMirror-placeholder {
|
||||
color: var(--pf-c-form-control--placeholder--Color);
|
||||
height: 100% !important;
|
||||
}
|
||||
}
|
||||
`}
|
||||
`;
|
||||
|
||||
function CodeMirrorInput({
|
||||
@@ -66,6 +78,7 @@ function CodeMirrorInput({
|
||||
rows,
|
||||
fullHeight,
|
||||
className,
|
||||
placeholder,
|
||||
}) {
|
||||
// Workaround for CodeMirror bug: If CodeMirror renders in a modal on the
|
||||
// modal's initial render, it appears as an empty box due to mis-calculated
|
||||
@@ -92,6 +105,7 @@ function CodeMirrorInput({
|
||||
smartIndent: false,
|
||||
lineNumbers: true,
|
||||
lineWrapping: true,
|
||||
placeholder,
|
||||
readOnly,
|
||||
}}
|
||||
fullHeight={fullHeight}
|
||||
|
||||
@@ -1,22 +1,25 @@
|
||||
import React from 'react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import styled from 'styled-components';
|
||||
import {
|
||||
EmptyState as PFEmptyState,
|
||||
EmptyStateBody,
|
||||
EmptyStateIcon,
|
||||
Spinner,
|
||||
} from '@patternfly/react-core';
|
||||
|
||||
const EmptyState = styled(PFEmptyState)`
|
||||
--pf-c-empty-state--m-lg--MaxWidth: none;
|
||||
min-height: 250px;
|
||||
`;
|
||||
|
||||
// TODO: Better loading state - skeleton lines / spinner, etc.
|
||||
const ContentLoading = ({ className, i18n }) => (
|
||||
<EmptyState variant="full" className={className}>
|
||||
<EmptyStateBody>{i18n._(t`Loading...`)}</EmptyStateBody>
|
||||
</EmptyState>
|
||||
);
|
||||
const ContentLoading = ({ className }) => {
|
||||
return (
|
||||
<EmptyState variant="full" className={className}>
|
||||
<EmptyStateIcon variant="container" component={Spinner} />
|
||||
</EmptyState>
|
||||
);
|
||||
};
|
||||
|
||||
export { ContentLoading as _ContentLoading };
|
||||
export default withI18n()(ContentLoading);
|
||||
export default ContentLoading;
|
||||
|
||||
@@ -34,7 +34,7 @@ function CopyButton({
|
||||
<>
|
||||
<Tooltip content={helperText.tooltip} position="top">
|
||||
<Button
|
||||
isDisabled={isDisabled}
|
||||
isDisabled={isLoading || isDisabled}
|
||||
aria-label={i18n._(t`Copy`)}
|
||||
variant="plain"
|
||||
onClick={copyItemToAPI}
|
||||
|
||||
@@ -16,10 +16,17 @@ function CredentialChip({ credential, i18n, i18nHash, ...props }) {
|
||||
type = toTitleCase(credential.kind);
|
||||
}
|
||||
|
||||
const buildCredentialName = () => {
|
||||
if (credential.kind === 'vault' && credential.inputs?.vault_id) {
|
||||
return `${credential.name} | ${credential.inputs.vault_id}`;
|
||||
}
|
||||
return `${credential.name}`;
|
||||
};
|
||||
|
||||
return (
|
||||
<Chip {...props}>
|
||||
<strong>{type}: </strong>
|
||||
{credential.name}
|
||||
{buildCredentialName()}
|
||||
</Chip>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -93,9 +93,11 @@ function DataListToolbar({
|
||||
onRemove={onRemove}
|
||||
/>
|
||||
</ToolbarItem>
|
||||
<ToolbarItem>
|
||||
<Sort qsConfig={qsConfig} columns={sortColumns} onSort={onSort} />
|
||||
</ToolbarItem>
|
||||
{sortColumns && (
|
||||
<ToolbarItem>
|
||||
<Sort qsConfig={qsConfig} columns={sortColumns} onSort={onSort} />
|
||||
</ToolbarItem>
|
||||
)}
|
||||
</ToolbarToggleGroup>
|
||||
{showExpandCollapse && (
|
||||
<ToolbarGroup>
|
||||
@@ -157,7 +159,7 @@ DataListToolbar.propTypes = {
|
||||
searchColumns: SearchColumns.isRequired,
|
||||
searchableKeys: PropTypes.arrayOf(PropTypes.string),
|
||||
relatedSearchableKeys: PropTypes.arrayOf(PropTypes.string),
|
||||
sortColumns: SortColumns.isRequired,
|
||||
sortColumns: SortColumns,
|
||||
showSelectAll: PropTypes.bool,
|
||||
isAllSelected: PropTypes.bool,
|
||||
isCompact: PropTypes.bool,
|
||||
@@ -174,6 +176,7 @@ DataListToolbar.defaultProps = {
|
||||
itemCount: 0,
|
||||
searchableKeys: [],
|
||||
relatedSearchableKeys: [],
|
||||
sortColumns: null,
|
||||
clearAllFilters: null,
|
||||
showSelectAll: false,
|
||||
isAllSelected: false,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { useState, useEffect, useContext } from 'react';
|
||||
import { arrayOf, func, object, string } from 'prop-types';
|
||||
import { arrayOf, func, shape, string, oneOfType, number } from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { Button, Tooltip, DropdownItem } from '@patternfly/react-core';
|
||||
@@ -149,7 +149,20 @@ DisassociateButton.defaultProps = {
|
||||
};
|
||||
|
||||
DisassociateButton.propTypes = {
|
||||
itemsToDisassociate: arrayOf(object),
|
||||
itemsToDisassociate: oneOfType([
|
||||
arrayOf(
|
||||
shape({
|
||||
id: number.isRequired,
|
||||
name: string.isRequired,
|
||||
})
|
||||
),
|
||||
arrayOf(
|
||||
shape({
|
||||
id: number.isRequired,
|
||||
hostname: string.isRequired,
|
||||
})
|
||||
),
|
||||
]),
|
||||
modalNote: string,
|
||||
modalTitle: string,
|
||||
onDisassociate: func.isRequired,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { Component, Fragment } from 'react';
|
||||
import React, { useState, Fragment } from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import styled from 'styled-components';
|
||||
import { withI18n } from '@lingui/react';
|
||||
@@ -32,27 +32,15 @@ const Expandable = styled(PFExpandable)`
|
||||
}
|
||||
`;
|
||||
|
||||
class ErrorDetail extends Component {
|
||||
constructor(props) {
|
||||
super(props);
|
||||
function ErrorDetail({ error, i18n }) {
|
||||
const { response } = error;
|
||||
const [isExpanded, setIsExpanded] = useState(false);
|
||||
|
||||
this.state = {
|
||||
isExpanded: false,
|
||||
};
|
||||
const handleToggle = () => {
|
||||
setIsExpanded(!isExpanded);
|
||||
};
|
||||
|
||||
this.handleToggle = this.handleToggle.bind(this);
|
||||
this.renderNetworkError = this.renderNetworkError.bind(this);
|
||||
this.renderStack = this.renderStack.bind(this);
|
||||
}
|
||||
|
||||
handleToggle() {
|
||||
const { isExpanded } = this.state;
|
||||
this.setState({ isExpanded: !isExpanded });
|
||||
}
|
||||
|
||||
renderNetworkError() {
|
||||
const { error } = this.props;
|
||||
const { response } = error;
|
||||
const renderNetworkError = () => {
|
||||
const message = getErrorMessage(response);
|
||||
|
||||
return (
|
||||
@@ -74,31 +62,25 @@ class ErrorDetail extends Component {
|
||||
</CardBody>
|
||||
</Fragment>
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
renderStack() {
|
||||
const { error } = this.props;
|
||||
const renderStack = () => {
|
||||
return <CardBody>{error.stack}</CardBody>;
|
||||
}
|
||||
};
|
||||
|
||||
render() {
|
||||
const { isExpanded } = this.state;
|
||||
const { error, i18n } = this.props;
|
||||
|
||||
return (
|
||||
<Expandable
|
||||
toggleText={i18n._(t`Details`)}
|
||||
onToggle={this.handleToggle}
|
||||
isExpanded={isExpanded}
|
||||
>
|
||||
<Card>
|
||||
{Object.prototype.hasOwnProperty.call(error, 'response')
|
||||
? this.renderNetworkError()
|
||||
: this.renderStack()}
|
||||
</Card>
|
||||
</Expandable>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<Expandable
|
||||
toggleText={i18n._(t`Details`)}
|
||||
onToggle={handleToggle}
|
||||
isExpanded={isExpanded}
|
||||
>
|
||||
<Card>
|
||||
{Object.prototype.hasOwnProperty.call(error, 'response')
|
||||
? renderNetworkError()
|
||||
: renderStack()}
|
||||
</Card>
|
||||
</Expandable>
|
||||
);
|
||||
}
|
||||
|
||||
ErrorDetail.propTypes = {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { mountWithContexts } from '../../../testUtils/enzymeHelpers';
|
||||
|
||||
import ErrorDetail from './ErrorDetail';
|
||||
@@ -39,7 +40,7 @@ describe('ErrorDetail', () => {
|
||||
}
|
||||
/>
|
||||
);
|
||||
wrapper.find('ExpandableSection').prop('onToggle')();
|
||||
act(() => wrapper.find('ExpandableSection').prop('onToggle')());
|
||||
wrapper.update();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -31,35 +31,31 @@ const ToolbarItem = styled(PFToolbarItem)`
|
||||
|
||||
// TODO: Recommend renaming this component to avoid confusion
|
||||
// with ExpandingContainer
|
||||
class ExpandCollapse extends React.Component {
|
||||
render() {
|
||||
const { isCompact, onCompact, onExpand, i18n } = this.props;
|
||||
|
||||
return (
|
||||
<Fragment>
|
||||
<ToolbarItem>
|
||||
<Button
|
||||
variant="plain"
|
||||
aria-label={i18n._(t`Collapse`)}
|
||||
onClick={onCompact}
|
||||
isActive={isCompact}
|
||||
>
|
||||
<BarsIcon />
|
||||
</Button>
|
||||
</ToolbarItem>
|
||||
<ToolbarItem>
|
||||
<Button
|
||||
variant="plain"
|
||||
aria-label={i18n._(t`Expand`)}
|
||||
onClick={onExpand}
|
||||
isActive={!isCompact}
|
||||
>
|
||||
<EqualsIcon />
|
||||
</Button>
|
||||
</ToolbarItem>
|
||||
</Fragment>
|
||||
);
|
||||
}
|
||||
function ExpandCollapse({ isCompact, onCompact, onExpand, i18n }) {
|
||||
return (
|
||||
<Fragment>
|
||||
<ToolbarItem>
|
||||
<Button
|
||||
variant="plain"
|
||||
aria-label={i18n._(t`Collapse`)}
|
||||
onClick={onCompact}
|
||||
isActive={isCompact}
|
||||
>
|
||||
<BarsIcon />
|
||||
</Button>
|
||||
</ToolbarItem>
|
||||
<ToolbarItem>
|
||||
<Button
|
||||
variant="plain"
|
||||
aria-label={i18n._(t`Expand`)}
|
||||
onClick={onExpand}
|
||||
isActive={!isCompact}
|
||||
>
|
||||
<EqualsIcon />
|
||||
</Button>
|
||||
</ToolbarItem>
|
||||
</Fragment>
|
||||
);
|
||||
}
|
||||
|
||||
ExpandCollapse.propTypes = {
|
||||
|
||||
@@ -12,7 +12,15 @@ import {
|
||||
import { EyeIcon, EyeSlashIcon } from '@patternfly/react-icons';
|
||||
|
||||
function PasswordInput(props) {
|
||||
const { id, name, validate, isRequired, isDisabled, i18n } = props;
|
||||
const {
|
||||
autocomplete,
|
||||
id,
|
||||
name,
|
||||
validate,
|
||||
isRequired,
|
||||
isDisabled,
|
||||
i18n,
|
||||
} = props;
|
||||
const [inputType, setInputType] = useState('password');
|
||||
const [field, meta] = useField({ name, validate });
|
||||
|
||||
@@ -38,6 +46,7 @@ function PasswordInput(props) {
|
||||
</Button>
|
||||
</Tooltip>
|
||||
<TextInput
|
||||
autoComplete={autocomplete}
|
||||
id={id}
|
||||
placeholder={field.value === '$encrypted$' ? 'ENCRYPTED' : undefined}
|
||||
{...field}
|
||||
@@ -55,6 +64,7 @@ function PasswordInput(props) {
|
||||
}
|
||||
|
||||
PasswordInput.propTypes = {
|
||||
autocomplete: PropTypes.string,
|
||||
id: PropTypes.string.isRequired,
|
||||
name: PropTypes.string.isRequired,
|
||||
validate: PropTypes.func,
|
||||
@@ -63,6 +73,7 @@ PasswordInput.propTypes = {
|
||||
};
|
||||
|
||||
PasswordInput.defaultProps = {
|
||||
autocomplete: 'new-password',
|
||||
validate: () => {},
|
||||
isRequired: false,
|
||||
isDisabled: false,
|
||||
|
||||
@@ -7,8 +7,15 @@ import { KebabifiedContext } from '../../contexts/Kebabified';
|
||||
import AlertModal from '../AlertModal';
|
||||
import { Job } from '../../types';
|
||||
|
||||
function cannotCancel(job) {
|
||||
return !job.summary_fields.user_capabilities.start;
|
||||
function cannotCancelBecausePermissions(job) {
|
||||
return (
|
||||
!job.summary_fields.user_capabilities.start &&
|
||||
['pending', 'waiting', 'running'].includes(job.status)
|
||||
);
|
||||
}
|
||||
|
||||
function cannotCancelBecauseNotRunning(job) {
|
||||
return !['pending', 'waiting', 'running'].includes(job.status);
|
||||
}
|
||||
|
||||
function JobListCancelButton({ i18n, jobsToCancel, onCancel }) {
|
||||
@@ -33,20 +40,40 @@ function JobListCancelButton({ i18n, jobsToCancel, onCancel }) {
|
||||
}, [isKebabified, isModalOpen, onKebabModalChange]);
|
||||
|
||||
const renderTooltip = () => {
|
||||
const jobsUnableToCancel = jobsToCancel
|
||||
.filter(cannotCancel)
|
||||
const cannotCancelPermissions = jobsToCancel
|
||||
.filter(cannotCancelBecausePermissions)
|
||||
.map(job => job.name);
|
||||
const numJobsUnableToCancel = jobsUnableToCancel.length;
|
||||
const cannotCancelNotRunning = jobsToCancel
|
||||
.filter(cannotCancelBecauseNotRunning)
|
||||
.map(job => job.name);
|
||||
const numJobsUnableToCancel = cannotCancelPermissions.concat(
|
||||
cannotCancelNotRunning
|
||||
).length;
|
||||
if (numJobsUnableToCancel > 0) {
|
||||
return (
|
||||
<div>
|
||||
{i18n._(
|
||||
'{numJobsUnableToCancel, plural, one {You do not have permission to cancel the following job:} other {You do not have permission to cancel the following jobs:}}',
|
||||
{
|
||||
numJobsUnableToCancel,
|
||||
}
|
||||
{cannotCancelPermissions.length > 0 && (
|
||||
<div>
|
||||
{i18n._(
|
||||
'{numJobsUnableToCancel, plural, one {You do not have permission to cancel the following job:} other {You do not have permission to cancel the following jobs:}}',
|
||||
{
|
||||
numJobsUnableToCancel: cannotCancelPermissions.length,
|
||||
}
|
||||
)}
|
||||
{' '.concat(cannotCancelPermissions.join(', '))}
|
||||
</div>
|
||||
)}
|
||||
{cannotCancelNotRunning.length > 0 && (
|
||||
<div>
|
||||
{i18n._(
|
||||
'{numJobsUnableToCancel, plural, one {You cannot cancel the following job because it is not running:} other {You cannot cancel the following jobs because they are not running:}}',
|
||||
{
|
||||
numJobsUnableToCancel: cannotCancelNotRunning.length,
|
||||
}
|
||||
)}
|
||||
{' '.concat(cannotCancelNotRunning.join(', '))}
|
||||
</div>
|
||||
)}
|
||||
{' '.concat(jobsUnableToCancel.join(', '))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -62,7 +89,9 @@ function JobListCancelButton({ i18n, jobsToCancel, onCancel }) {
|
||||
};
|
||||
|
||||
const isDisabled =
|
||||
jobsToCancel.length === 0 || jobsToCancel.some(cannotCancel);
|
||||
jobsToCancel.length === 0 ||
|
||||
jobsToCancel.some(cannotCancelBecausePermissions) ||
|
||||
jobsToCancel.some(cannotCancelBecauseNotRunning);
|
||||
|
||||
const cancelJobText = i18n._(
|
||||
'{zeroOrOneJobSelected, plural, one {Cancel job} other {Cancel jobs}}',
|
||||
|
||||
@@ -30,6 +30,29 @@ describe('<JobListCancelButton />', () => {
|
||||
start: false,
|
||||
},
|
||||
},
|
||||
status: 'running',
|
||||
},
|
||||
]}
|
||||
/>
|
||||
);
|
||||
expect(wrapper.find('JobListCancelButton button').props().disabled).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
test('should be disabled when selected job is not running', () => {
|
||||
wrapper = mountWithContexts(
|
||||
<JobListCancelButton
|
||||
jobsToCancel={[
|
||||
{
|
||||
id: 1,
|
||||
name: 'some job',
|
||||
summary_fields: {
|
||||
user_capabilities: {
|
||||
delete: false,
|
||||
start: false,
|
||||
},
|
||||
},
|
||||
status: 'successful',
|
||||
},
|
||||
]}
|
||||
/>
|
||||
@@ -51,6 +74,7 @@ describe('<JobListCancelButton />', () => {
|
||||
start: true,
|
||||
},
|
||||
},
|
||||
status: 'running',
|
||||
},
|
||||
]}
|
||||
/>
|
||||
@@ -73,6 +97,7 @@ describe('<JobListCancelButton />', () => {
|
||||
start: true,
|
||||
},
|
||||
},
|
||||
status: 'running',
|
||||
},
|
||||
]}
|
||||
onCancel={onCancel}
|
||||
|
||||
@@ -39,7 +39,7 @@ function JobListItem({
|
||||
project_update: i18n._(t`Source Control Update`),
|
||||
inventory_update: i18n._(t`Inventory Sync`),
|
||||
job: i18n._(t`Playbook Run`),
|
||||
command: i18n._(t`Command`),
|
||||
ad_hoc_command: i18n._(t`Command`),
|
||||
management_job: i18n._(t`Management Job`),
|
||||
workflow_job: i18n._(t`Workflow Job`),
|
||||
};
|
||||
|
||||
@@ -44,7 +44,7 @@ describe('useWsJobs hook', () => {
|
||||
|
||||
test('should establish websocket connection', async () => {
|
||||
global.document.cookie = 'csrftoken=abc123';
|
||||
const mockServer = new WS('wss://localhost/websocket/');
|
||||
const mockServer = new WS('ws://localhost/websocket/');
|
||||
|
||||
const jobs = [{ id: 1 }];
|
||||
await act(async () => {
|
||||
@@ -67,7 +67,7 @@ describe('useWsJobs hook', () => {
|
||||
|
||||
test('should update job status', async () => {
|
||||
global.document.cookie = 'csrftoken=abc123';
|
||||
const mockServer = new WS('wss://localhost/websocket/');
|
||||
const mockServer = new WS('ws://localhost/websocket/');
|
||||
|
||||
const jobs = [{ id: 1, status: 'running' }];
|
||||
await act(async () => {
|
||||
@@ -105,7 +105,7 @@ describe('useWsJobs hook', () => {
|
||||
|
||||
test('should fetch new job', async () => {
|
||||
global.document.cookie = 'csrftoken=abc123';
|
||||
const mockServer = new WS('wss://localhost/websocket/');
|
||||
const mockServer = new WS('ws://localhost/websocket/');
|
||||
const jobs = [{ id: 1 }];
|
||||
const fetch = jest.fn(() => []);
|
||||
await act(async () => {
|
||||
|
||||
@@ -25,6 +25,8 @@ function canLaunchWithoutPrompt(launchData) {
|
||||
!launchData.ask_limit_on_launch &&
|
||||
!launchData.ask_scm_branch_on_launch &&
|
||||
!launchData.survey_enabled &&
|
||||
(!launchData.passwords_needed_to_start ||
|
||||
launchData.passwords_needed_to_start.length === 0) &&
|
||||
(!launchData.variables_needed_to_start ||
|
||||
launchData.variables_needed_to_start.length === 0)
|
||||
);
|
||||
@@ -44,6 +46,7 @@ class LaunchButton extends React.Component {
|
||||
showLaunchPrompt: false,
|
||||
launchConfig: null,
|
||||
launchError: false,
|
||||
surveyConfig: null,
|
||||
};
|
||||
|
||||
this.handleLaunch = this.handleLaunch.bind(this);
|
||||
@@ -67,15 +70,28 @@ class LaunchButton extends React.Component {
|
||||
resource.type === 'workflow_job_template'
|
||||
? WorkflowJobTemplatesAPI.readLaunch(resource.id)
|
||||
: JobTemplatesAPI.readLaunch(resource.id);
|
||||
const readSurvey =
|
||||
resource.type === 'workflow_job_template'
|
||||
? WorkflowJobTemplatesAPI.readSurvey(resource.id)
|
||||
: JobTemplatesAPI.readSurvey(resource.id);
|
||||
try {
|
||||
const { data: launchConfig } = await readLaunch;
|
||||
|
||||
let surveyConfig = null;
|
||||
|
||||
if (launchConfig.survey_enabled) {
|
||||
const { data } = await readSurvey;
|
||||
|
||||
surveyConfig = data;
|
||||
}
|
||||
|
||||
if (canLaunchWithoutPrompt(launchConfig)) {
|
||||
this.launchWithParams({});
|
||||
} else {
|
||||
this.setState({
|
||||
showLaunchPrompt: true,
|
||||
launchConfig,
|
||||
surveyConfig,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
@@ -86,17 +102,20 @@ class LaunchButton extends React.Component {
|
||||
async launchWithParams(params) {
|
||||
try {
|
||||
const { history, resource } = this.props;
|
||||
const jobPromise =
|
||||
resource.type === 'workflow_job_template'
|
||||
? WorkflowJobTemplatesAPI.launch(resource.id, params || {})
|
||||
: JobTemplatesAPI.launch(resource.id, params || {});
|
||||
let jobPromise;
|
||||
|
||||
if (resource.type === 'job_template') {
|
||||
jobPromise = JobTemplatesAPI.launch(resource.id, params || {});
|
||||
} else if (resource.type === 'workflow_job_template') {
|
||||
jobPromise = WorkflowJobTemplatesAPI.launch(resource.id, params || {});
|
||||
} else if (resource.type === 'job') {
|
||||
jobPromise = JobsAPI.relaunch(resource.id, params || {});
|
||||
} else if (resource.type === 'workflow_job') {
|
||||
jobPromise = WorkflowJobsAPI.relaunch(resource.id, params || {});
|
||||
}
|
||||
|
||||
const { data: job } = await jobPromise;
|
||||
history.push(
|
||||
`/${
|
||||
resource.type === 'workflow_job_template' ? 'jobs/workflow' : 'jobs'
|
||||
}/${job.id}/output`
|
||||
);
|
||||
history.push(`/jobs/${job.id}/output`);
|
||||
} catch (launchError) {
|
||||
this.setState({ launchError });
|
||||
}
|
||||
@@ -113,20 +132,15 @@ class LaunchButton extends React.Component {
|
||||
readRelaunch = InventorySourcesAPI.readLaunchUpdate(
|
||||
resource.inventory_source
|
||||
);
|
||||
relaunch = InventorySourcesAPI.launchUpdate(resource.inventory_source);
|
||||
} else if (resource.type === 'project_update') {
|
||||
// We'll need to handle the scenario where the project no longer exists
|
||||
readRelaunch = ProjectsAPI.readLaunchUpdate(resource.project);
|
||||
relaunch = ProjectsAPI.launchUpdate(resource.project);
|
||||
} else if (resource.type === 'workflow_job') {
|
||||
readRelaunch = WorkflowJobsAPI.readRelaunch(resource.id);
|
||||
relaunch = WorkflowJobsAPI.relaunch(resource.id);
|
||||
} else if (resource.type === 'ad_hoc_command') {
|
||||
readRelaunch = AdHocCommandsAPI.readRelaunch(resource.id);
|
||||
relaunch = AdHocCommandsAPI.relaunch(resource.id);
|
||||
} else if (resource.type === 'job') {
|
||||
readRelaunch = JobsAPI.readRelaunch(resource.id);
|
||||
relaunch = JobsAPI.relaunch(resource.id);
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -135,11 +149,22 @@ class LaunchButton extends React.Component {
|
||||
!relaunchConfig.passwords_needed_to_start ||
|
||||
relaunchConfig.passwords_needed_to_start.length === 0
|
||||
) {
|
||||
if (resource.type === 'inventory_update') {
|
||||
relaunch = InventorySourcesAPI.launchUpdate(
|
||||
resource.inventory_source
|
||||
);
|
||||
} else if (resource.type === 'project_update') {
|
||||
relaunch = ProjectsAPI.launchUpdate(resource.project);
|
||||
} else if (resource.type === 'workflow_job') {
|
||||
relaunch = WorkflowJobsAPI.relaunch(resource.id);
|
||||
} else if (resource.type === 'ad_hoc_command') {
|
||||
relaunch = AdHocCommandsAPI.relaunch(resource.id);
|
||||
} else if (resource.type === 'job') {
|
||||
relaunch = JobsAPI.relaunch(resource.id);
|
||||
}
|
||||
const { data: job } = await relaunch;
|
||||
history.push(`/jobs/${job.id}/output`);
|
||||
} else {
|
||||
// TODO: restructure (async?) to send launch command after prompts
|
||||
// TODO: does relaunch need different prompt treatment than launch?
|
||||
this.setState({
|
||||
showLaunchPrompt: true,
|
||||
launchConfig: relaunchConfig,
|
||||
@@ -151,7 +176,12 @@ class LaunchButton extends React.Component {
|
||||
}
|
||||
|
||||
render() {
|
||||
const { launchError, showLaunchPrompt, launchConfig } = this.state;
|
||||
const {
|
||||
launchError,
|
||||
showLaunchPrompt,
|
||||
launchConfig,
|
||||
surveyConfig,
|
||||
} = this.state;
|
||||
const { resource, i18n, children } = this.props;
|
||||
return (
|
||||
<Fragment>
|
||||
@@ -172,7 +202,8 @@ class LaunchButton extends React.Component {
|
||||
)}
|
||||
{showLaunchPrompt && (
|
||||
<LaunchPrompt
|
||||
config={launchConfig}
|
||||
launchConfig={launchConfig}
|
||||
surveyConfig={surveyConfig}
|
||||
resource={resource}
|
||||
onLaunch={this.launchWithParams}
|
||||
onCancel={() => this.setState({ showLaunchPrompt: false })}
|
||||
|
||||
@@ -4,10 +4,16 @@ import { mountWithContexts } from '../../../testUtils/enzymeHelpers';
|
||||
import { sleep } from '../../../testUtils/testUtils';
|
||||
|
||||
import LaunchButton from './LaunchButton';
|
||||
import { JobTemplatesAPI, WorkflowJobTemplatesAPI } from '../../api';
|
||||
import {
|
||||
InventorySourcesAPI,
|
||||
JobsAPI,
|
||||
JobTemplatesAPI,
|
||||
ProjectsAPI,
|
||||
WorkflowJobsAPI,
|
||||
WorkflowJobTemplatesAPI,
|
||||
} from '../../api';
|
||||
|
||||
jest.mock('../../api/models/WorkflowJobTemplates');
|
||||
jest.mock('../../api/models/JobTemplates');
|
||||
jest.mock('../../api');
|
||||
|
||||
describe('LaunchButton', () => {
|
||||
JobTemplatesAPI.readLaunch.mockResolvedValue({
|
||||
@@ -22,10 +28,14 @@ describe('LaunchButton', () => {
|
||||
},
|
||||
});
|
||||
|
||||
const children = ({ handleLaunch }) => (
|
||||
const launchButton = ({ handleLaunch }) => (
|
||||
<button type="submit" onClick={() => handleLaunch()} />
|
||||
);
|
||||
|
||||
const relaunchButton = ({ handleRelaunch }) => (
|
||||
<button type="submit" onClick={() => handleRelaunch()} />
|
||||
);
|
||||
|
||||
const resource = {
|
||||
id: 1,
|
||||
type: 'job_template',
|
||||
@@ -35,7 +45,7 @@ describe('LaunchButton', () => {
|
||||
|
||||
test('renders the expected content', () => {
|
||||
const wrapper = mountWithContexts(
|
||||
<LaunchButton resource={resource}>{children}</LaunchButton>
|
||||
<LaunchButton resource={resource}>{launchButton}</LaunchButton>
|
||||
);
|
||||
expect(wrapper).toHaveLength(1);
|
||||
});
|
||||
@@ -51,7 +61,7 @@ describe('LaunchButton', () => {
|
||||
},
|
||||
});
|
||||
const wrapper = mountWithContexts(
|
||||
<LaunchButton resource={resource}>{children}</LaunchButton>,
|
||||
<LaunchButton resource={resource}>{launchButton}</LaunchButton>,
|
||||
{
|
||||
context: {
|
||||
router: { history },
|
||||
@@ -87,7 +97,7 @@ describe('LaunchButton', () => {
|
||||
type: 'workflow_job_template',
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
{launchButton}
|
||||
</LaunchButton>,
|
||||
{
|
||||
context: {
|
||||
@@ -100,12 +110,162 @@ describe('LaunchButton', () => {
|
||||
expect(WorkflowJobTemplatesAPI.readLaunch).toHaveBeenCalledWith(1);
|
||||
await sleep(0);
|
||||
expect(WorkflowJobTemplatesAPI.launch).toHaveBeenCalledWith(1, {});
|
||||
expect(history.location.pathname).toEqual('/jobs/workflow/9000/output');
|
||||
expect(history.location.pathname).toEqual('/jobs/9000/output');
|
||||
});
|
||||
|
||||
test('should relaunch job correctly', async () => {
|
||||
JobsAPI.readRelaunch.mockResolvedValue({
|
||||
data: {
|
||||
can_start_without_user_input: true,
|
||||
},
|
||||
});
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/jobs/9000'],
|
||||
});
|
||||
JobsAPI.relaunch.mockResolvedValue({
|
||||
data: {
|
||||
id: 9000,
|
||||
},
|
||||
});
|
||||
const wrapper = mountWithContexts(
|
||||
<LaunchButton
|
||||
resource={{
|
||||
id: 1,
|
||||
type: 'job',
|
||||
}}
|
||||
>
|
||||
{relaunchButton}
|
||||
</LaunchButton>,
|
||||
{
|
||||
context: {
|
||||
router: { history },
|
||||
},
|
||||
}
|
||||
);
|
||||
const button = wrapper.find('button');
|
||||
button.prop('onClick')();
|
||||
expect(JobsAPI.readRelaunch).toHaveBeenCalledWith(1);
|
||||
await sleep(0);
|
||||
expect(JobsAPI.relaunch).toHaveBeenCalledWith(1);
|
||||
expect(history.location.pathname).toEqual('/jobs/9000/output');
|
||||
});
|
||||
|
||||
test('should relaunch workflow job correctly', async () => {
|
||||
WorkflowJobsAPI.readRelaunch.mockResolvedValue({
|
||||
data: {
|
||||
can_start_without_user_input: true,
|
||||
},
|
||||
});
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/jobs/9000'],
|
||||
});
|
||||
WorkflowJobsAPI.relaunch.mockResolvedValue({
|
||||
data: {
|
||||
id: 9000,
|
||||
},
|
||||
});
|
||||
const wrapper = mountWithContexts(
|
||||
<LaunchButton
|
||||
resource={{
|
||||
id: 1,
|
||||
type: 'workflow_job',
|
||||
}}
|
||||
>
|
||||
{relaunchButton}
|
||||
</LaunchButton>,
|
||||
{
|
||||
context: {
|
||||
router: { history },
|
||||
},
|
||||
}
|
||||
);
|
||||
const button = wrapper.find('button');
|
||||
button.prop('onClick')();
|
||||
expect(WorkflowJobsAPI.readRelaunch).toHaveBeenCalledWith(1);
|
||||
await sleep(0);
|
||||
expect(WorkflowJobsAPI.relaunch).toHaveBeenCalledWith(1);
|
||||
expect(history.location.pathname).toEqual('/jobs/9000/output');
|
||||
});
|
||||
|
||||
test('should relaunch project sync correctly', async () => {
|
||||
ProjectsAPI.readLaunchUpdate.mockResolvedValue({
|
||||
data: {
|
||||
can_start_without_user_input: true,
|
||||
},
|
||||
});
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/jobs/9000'],
|
||||
});
|
||||
ProjectsAPI.launchUpdate.mockResolvedValue({
|
||||
data: {
|
||||
id: 9000,
|
||||
},
|
||||
});
|
||||
const wrapper = mountWithContexts(
|
||||
<LaunchButton
|
||||
resource={{
|
||||
id: 1,
|
||||
project: 5,
|
||||
type: 'project_update',
|
||||
}}
|
||||
>
|
||||
{relaunchButton}
|
||||
</LaunchButton>,
|
||||
{
|
||||
context: {
|
||||
router: { history },
|
||||
},
|
||||
}
|
||||
);
|
||||
const button = wrapper.find('button');
|
||||
button.prop('onClick')();
|
||||
expect(ProjectsAPI.readLaunchUpdate).toHaveBeenCalledWith(5);
|
||||
await sleep(0);
|
||||
expect(ProjectsAPI.launchUpdate).toHaveBeenCalledWith(5);
|
||||
expect(history.location.pathname).toEqual('/jobs/9000/output');
|
||||
});
|
||||
|
||||
test('should relaunch project sync correctly', async () => {
|
||||
InventorySourcesAPI.readLaunchUpdate.mockResolvedValue({
|
||||
data: {
|
||||
can_start_without_user_input: true,
|
||||
},
|
||||
});
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/jobs/9000'],
|
||||
});
|
||||
InventorySourcesAPI.launchUpdate.mockResolvedValue({
|
||||
data: {
|
||||
id: 9000,
|
||||
},
|
||||
});
|
||||
const wrapper = mountWithContexts(
|
||||
<LaunchButton
|
||||
resource={{
|
||||
id: 1,
|
||||
inventory_source: 5,
|
||||
type: 'inventory_update',
|
||||
}}
|
||||
>
|
||||
{relaunchButton}
|
||||
</LaunchButton>,
|
||||
{
|
||||
context: {
|
||||
router: { history },
|
||||
},
|
||||
}
|
||||
);
|
||||
const button = wrapper.find('button');
|
||||
button.prop('onClick')();
|
||||
expect(InventorySourcesAPI.readLaunchUpdate).toHaveBeenCalledWith(5);
|
||||
await sleep(0);
|
||||
expect(InventorySourcesAPI.launchUpdate).toHaveBeenCalledWith(5);
|
||||
expect(history.location.pathname).toEqual('/jobs/9000/output');
|
||||
});
|
||||
|
||||
test('displays error modal after unsuccessful launch', async () => {
|
||||
const wrapper = mountWithContexts(
|
||||
<LaunchButton resource={resource}>{children}</LaunchButton>
|
||||
<LaunchButton resource={resource}>{launchButton}</LaunchButton>
|
||||
);
|
||||
JobTemplatesAPI.launch.mockRejectedValue(
|
||||
new Error({
|
||||
|
||||
@@ -6,23 +6,31 @@ import { Formik, useFormikContext } from 'formik';
|
||||
import ContentError from '../ContentError';
|
||||
import ContentLoading from '../ContentLoading';
|
||||
import { useDismissableError } from '../../util/useRequest';
|
||||
import mergeExtraVars from './mergeExtraVars';
|
||||
import mergeExtraVars from '../../util/prompt/mergeExtraVars';
|
||||
import getSurveyValues from '../../util/prompt/getSurveyValues';
|
||||
import useLaunchSteps from './useLaunchSteps';
|
||||
import AlertModal from '../AlertModal';
|
||||
import getSurveyValues from './getSurveyValues';
|
||||
|
||||
function PromptModalForm({ onSubmit, onCancel, i18n, config, resource }) {
|
||||
const { values, setTouched, validateForm } = useFormikContext();
|
||||
function PromptModalForm({
|
||||
launchConfig,
|
||||
i18n,
|
||||
onCancel,
|
||||
onSubmit,
|
||||
resource,
|
||||
surveyConfig,
|
||||
}) {
|
||||
const { setFieldTouched, values } = useFormikContext();
|
||||
|
||||
const {
|
||||
steps,
|
||||
isReady,
|
||||
validateStep,
|
||||
visitStep,
|
||||
visitAllSteps,
|
||||
contentError,
|
||||
} = useLaunchSteps(config, resource, i18n);
|
||||
} = useLaunchSteps(launchConfig, surveyConfig, resource, i18n);
|
||||
|
||||
const handleSave = () => {
|
||||
const handleSubmit = () => {
|
||||
const postValues = {};
|
||||
const setValue = (key, value) => {
|
||||
if (typeof value !== 'undefined' && value !== null) {
|
||||
@@ -30,6 +38,7 @@ function PromptModalForm({ onSubmit, onCancel, i18n, config, resource }) {
|
||||
}
|
||||
};
|
||||
const surveyValues = getSurveyValues(values);
|
||||
setValue('credential_passwords', values.credential_passwords);
|
||||
setValue('inventory_id', values.inventory?.id);
|
||||
setValue(
|
||||
'credentials',
|
||||
@@ -39,7 +48,7 @@ function PromptModalForm({ onSubmit, onCancel, i18n, config, resource }) {
|
||||
setValue('limit', values.limit);
|
||||
setValue('job_tags', values.job_tags);
|
||||
setValue('skip_tags', values.skip_tags);
|
||||
const extraVars = config.ask_variables_on_launch
|
||||
const extraVars = launchConfig.ask_variables_on_launch
|
||||
? values.extra_vars || '---'
|
||||
: resource.extra_vars;
|
||||
setValue('extra_vars', mergeExtraVars(extraVars, surveyValues));
|
||||
@@ -68,22 +77,25 @@ function PromptModalForm({ onSubmit, onCancel, i18n, config, resource }) {
|
||||
<Wizard
|
||||
isOpen
|
||||
onClose={onCancel}
|
||||
onSave={handleSave}
|
||||
onSave={handleSubmit}
|
||||
onBack={async nextStep => {
|
||||
validateStep(nextStep.id);
|
||||
}}
|
||||
onNext={async (nextStep, prevStep) => {
|
||||
if (nextStep.id === 'preview') {
|
||||
visitAllSteps(setTouched);
|
||||
visitAllSteps(setFieldTouched);
|
||||
} else {
|
||||
visitStep(prevStep.prevId);
|
||||
visitStep(prevStep.prevId, setFieldTouched);
|
||||
validateStep(nextStep.id);
|
||||
}
|
||||
await validateForm();
|
||||
}}
|
||||
onGoToStep={async (nextStep, prevStep) => {
|
||||
if (nextStep.id === 'preview') {
|
||||
visitAllSteps(setTouched);
|
||||
visitAllSteps(setFieldTouched);
|
||||
} else {
|
||||
visitStep(prevStep.prevId);
|
||||
visitStep(prevStep.prevId, setFieldTouched);
|
||||
validateStep(nextStep.id);
|
||||
}
|
||||
await validateForm();
|
||||
}}
|
||||
title={i18n._(t`Prompts`)}
|
||||
steps={
|
||||
@@ -103,28 +115,22 @@ function PromptModalForm({ onSubmit, onCancel, i18n, config, resource }) {
|
||||
);
|
||||
}
|
||||
|
||||
function LaunchPrompt({ config, resource = {}, onLaunch, onCancel, i18n }) {
|
||||
function LaunchPrompt({
|
||||
launchConfig,
|
||||
i18n,
|
||||
onCancel,
|
||||
onLaunch,
|
||||
resource = {},
|
||||
surveyConfig,
|
||||
}) {
|
||||
return (
|
||||
<Formik
|
||||
initialValues={{
|
||||
verbosity: resource.verbosity || 0,
|
||||
inventory: resource.summary_fields?.inventory || null,
|
||||
credentials: resource.summary_fields?.credentials || null,
|
||||
diff_mode: resource.diff_mode || false,
|
||||
extra_vars: resource.extra_vars || '---',
|
||||
job_type: resource.job_type || '',
|
||||
job_tags: resource.job_tags || '',
|
||||
skip_tags: resource.skip_tags || '',
|
||||
scm_branch: resource.scm_branch || '',
|
||||
limit: resource.limit || '',
|
||||
}}
|
||||
onSubmit={values => onLaunch(values)}
|
||||
>
|
||||
<Formik initialValues={{}} onSubmit={values => onLaunch(values)}>
|
||||
<PromptModalForm
|
||||
onSubmit={values => onLaunch(values)}
|
||||
onCancel={onCancel}
|
||||
i18n={i18n}
|
||||
config={config}
|
||||
launchConfig={launchConfig}
|
||||
surveyConfig={surveyConfig}
|
||||
resource={resource}
|
||||
/>
|
||||
</Formik>
|
||||
|
||||
@@ -76,28 +76,65 @@ describe('LaunchPrompt', () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<LaunchPrompt
|
||||
config={{
|
||||
launchConfig={{
|
||||
...config,
|
||||
ask_inventory_on_launch: true,
|
||||
ask_credential_on_launch: true,
|
||||
ask_scm_branch_on_launch: true,
|
||||
survey_enabled: true,
|
||||
passwords_needed_to_start: ['ssh_password'],
|
||||
defaults: {
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
passwords_needed: ['ssh_password'],
|
||||
},
|
||||
],
|
||||
},
|
||||
}}
|
||||
resource={{
|
||||
...resource,
|
||||
summary_fields: {
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
}}
|
||||
resource={resource}
|
||||
onLaunch={noop}
|
||||
onCancel={noop}
|
||||
surveyConfig={{
|
||||
name: '',
|
||||
description: '',
|
||||
spec: [
|
||||
{
|
||||
choices: '',
|
||||
default: '',
|
||||
max: 1024,
|
||||
min: 0,
|
||||
new_question: false,
|
||||
question_description: '',
|
||||
question_name: 'foo',
|
||||
required: true,
|
||||
type: 'text',
|
||||
variable: 'foo',
|
||||
},
|
||||
],
|
||||
}}
|
||||
/>
|
||||
);
|
||||
});
|
||||
const wizard = await waitForElement(wrapper, 'Wizard');
|
||||
const steps = wizard.prop('steps');
|
||||
|
||||
expect(steps).toHaveLength(5);
|
||||
expect(steps).toHaveLength(6);
|
||||
expect(steps[0].name.props.children).toEqual('Inventory');
|
||||
expect(steps[1].name).toEqual('Credentials');
|
||||
expect(steps[2].name).toEqual('Other Prompts');
|
||||
expect(steps[3].name.props.children).toEqual('Survey');
|
||||
expect(steps[4].name).toEqual('Preview');
|
||||
expect(steps[1].name.props.children).toEqual('Credentials');
|
||||
expect(steps[2].name.props.children).toEqual('Credential passwords');
|
||||
expect(steps[3].name.props.children).toEqual('Other prompts');
|
||||
expect(steps[4].name.props.children).toEqual('Survey');
|
||||
expect(steps[5].name.props.children).toEqual('Preview');
|
||||
});
|
||||
|
||||
test('should add inventory step', async () => {
|
||||
@@ -105,7 +142,7 @@ describe('LaunchPrompt', () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<LaunchPrompt
|
||||
config={{
|
||||
launchConfig={{
|
||||
...config,
|
||||
ask_inventory_on_launch: true,
|
||||
}}
|
||||
@@ -129,7 +166,7 @@ describe('LaunchPrompt', () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<LaunchPrompt
|
||||
config={{
|
||||
launchConfig={{
|
||||
...config,
|
||||
ask_credential_on_launch: true,
|
||||
}}
|
||||
@@ -143,7 +180,7 @@ describe('LaunchPrompt', () => {
|
||||
const steps = wizard.prop('steps');
|
||||
|
||||
expect(steps).toHaveLength(2);
|
||||
expect(steps[0].name).toEqual('Credentials');
|
||||
expect(steps[0].name.props.children).toEqual('Credentials');
|
||||
expect(isElementOfType(steps[0].component, CredentialsStep)).toEqual(true);
|
||||
expect(isElementOfType(steps[1].component, PreviewStep)).toEqual(true);
|
||||
});
|
||||
@@ -153,7 +190,7 @@ describe('LaunchPrompt', () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<LaunchPrompt
|
||||
config={{
|
||||
launchConfig={{
|
||||
...config,
|
||||
ask_verbosity_on_launch: true,
|
||||
}}
|
||||
@@ -167,7 +204,7 @@ describe('LaunchPrompt', () => {
|
||||
const steps = wizard.prop('steps');
|
||||
|
||||
expect(steps).toHaveLength(2);
|
||||
expect(steps[0].name).toEqual('Other Prompts');
|
||||
expect(steps[0].name.props.children).toEqual('Other prompts');
|
||||
expect(isElementOfType(steps[0].component, OtherPromptsStep)).toEqual(true);
|
||||
expect(isElementOfType(steps[1].component, PreviewStep)).toEqual(true);
|
||||
});
|
||||
|
||||
@@ -0,0 +1,131 @@
|
||||
import React from 'react';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { Form } from '@patternfly/react-core';
|
||||
import { useFormikContext } from 'formik';
|
||||
import { PasswordField } from '../../FormField';
|
||||
|
||||
function CredentialPasswordsStep({ launchConfig, i18n }) {
|
||||
const {
|
||||
values: { credentials },
|
||||
} = useFormikContext();
|
||||
|
||||
const vaultsThatPrompt = [];
|
||||
let showcredentialPasswordSsh = false;
|
||||
let showcredentialPasswordPrivilegeEscalation = false;
|
||||
let showcredentialPasswordPrivateKeyPassphrase = false;
|
||||
|
||||
if (
|
||||
!launchConfig.ask_credential_on_launch &&
|
||||
launchConfig.passwords_needed_to_start
|
||||
) {
|
||||
launchConfig.passwords_needed_to_start.forEach(password => {
|
||||
if (password === 'ssh_password') {
|
||||
showcredentialPasswordSsh = true;
|
||||
} else if (password === 'become_password') {
|
||||
showcredentialPasswordPrivilegeEscalation = true;
|
||||
} else if (password === 'ssh_key_unlock') {
|
||||
showcredentialPasswordPrivateKeyPassphrase = true;
|
||||
} else if (password.startsWith('vault_password')) {
|
||||
const vaultId = password.split(/\.(.+)/)[1] || '';
|
||||
vaultsThatPrompt.push(vaultId);
|
||||
}
|
||||
});
|
||||
} else if (credentials) {
|
||||
credentials.forEach(credential => {
|
||||
if (!credential.inputs) {
|
||||
const launchConfigCredential = launchConfig.defaults.credentials.find(
|
||||
defaultCred => defaultCred.id === credential.id
|
||||
);
|
||||
|
||||
if (launchConfigCredential?.passwords_needed.length > 0) {
|
||||
if (
|
||||
launchConfigCredential.passwords_needed.includes('ssh_password')
|
||||
) {
|
||||
showcredentialPasswordSsh = true;
|
||||
}
|
||||
if (
|
||||
launchConfigCredential.passwords_needed.includes('become_password')
|
||||
) {
|
||||
showcredentialPasswordPrivilegeEscalation = true;
|
||||
}
|
||||
if (
|
||||
launchConfigCredential.passwords_needed.includes('ssh_key_unlock')
|
||||
) {
|
||||
showcredentialPasswordPrivateKeyPassphrase = true;
|
||||
}
|
||||
|
||||
const vaultPasswordIds = launchConfigCredential.passwords_needed
|
||||
.filter(passwordNeeded =>
|
||||
passwordNeeded.startsWith('vault_password')
|
||||
)
|
||||
.map(vaultPassword => vaultPassword.split(/\.(.+)/)[1] || '');
|
||||
|
||||
vaultsThatPrompt.push(...vaultPasswordIds);
|
||||
}
|
||||
} else {
|
||||
if (credential?.inputs?.password === 'ASK') {
|
||||
showcredentialPasswordSsh = true;
|
||||
}
|
||||
|
||||
if (credential?.inputs?.become_password === 'ASK') {
|
||||
showcredentialPasswordPrivilegeEscalation = true;
|
||||
}
|
||||
|
||||
if (credential?.inputs?.ssh_key_unlock === 'ASK') {
|
||||
showcredentialPasswordPrivateKeyPassphrase = true;
|
||||
}
|
||||
|
||||
if (credential?.inputs?.vault_password === 'ASK') {
|
||||
vaultsThatPrompt.push(credential.inputs.vault_id);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<Form>
|
||||
{showcredentialPasswordSsh && (
|
||||
<PasswordField
|
||||
id="launch-ssh-password"
|
||||
label={i18n._(t`SSH password`)}
|
||||
name="credential_passwords.ssh_password"
|
||||
isRequired
|
||||
/>
|
||||
)}
|
||||
{showcredentialPasswordPrivateKeyPassphrase && (
|
||||
<PasswordField
|
||||
id="launch-private-key-passphrase"
|
||||
label={i18n._(t`Private key passphrase`)}
|
||||
name="credential_passwords.ssh_key_unlock"
|
||||
isRequired
|
||||
/>
|
||||
)}
|
||||
{showcredentialPasswordPrivilegeEscalation && (
|
||||
<PasswordField
|
||||
id="launch-privilege-escalation-password"
|
||||
label={i18n._(t`Privilege escalation password`)}
|
||||
name="credential_passwords.become_password"
|
||||
isRequired
|
||||
/>
|
||||
)}
|
||||
{vaultsThatPrompt.map(credId => (
|
||||
<PasswordField
|
||||
id={`launch-vault-password-${credId}`}
|
||||
key={credId}
|
||||
label={
|
||||
credId === ''
|
||||
? i18n._(t`Vault password`)
|
||||
: i18n._(t`Vault password | ${credId}`)
|
||||
}
|
||||
name={`credential_passwords['vault_password${
|
||||
credId !== '' ? `.${credId}` : ''
|
||||
}']`}
|
||||
isRequired
|
||||
/>
|
||||
))}
|
||||
</Form>
|
||||
);
|
||||
}
|
||||
|
||||
export default withI18n()(CredentialPasswordsStep);
|
||||
@@ -0,0 +1,603 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { Formik } from 'formik';
|
||||
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
|
||||
import CredentialPasswordsStep from './CredentialPasswordsStep';
|
||||
|
||||
describe('CredentialPasswordsStep', () => {
|
||||
describe('JT default credentials (no credential replacement) and creds are promptable', () => {
|
||||
test('should render ssh password field when JT has default machine cred', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik
|
||||
initialValues={{
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
},
|
||||
],
|
||||
}}
|
||||
>
|
||||
<CredentialPasswordsStep
|
||||
launchConfig={{
|
||||
ask_credential_on_launch: true,
|
||||
defaults: {
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
passwords_needed: ['ssh_password'],
|
||||
},
|
||||
],
|
||||
},
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('PasswordField#launch-ssh-password')).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-private-key-passphrase')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-privilege-escalation-password')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField[id^="launch-vault-password-"]')
|
||||
).toHaveLength(0);
|
||||
});
|
||||
test('should render become password field when JT has default machine cred', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik
|
||||
initialValues={{
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
},
|
||||
],
|
||||
}}
|
||||
>
|
||||
<CredentialPasswordsStep
|
||||
launchConfig={{
|
||||
ask_credential_on_launch: true,
|
||||
defaults: {
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
passwords_needed: ['become_password'],
|
||||
},
|
||||
],
|
||||
},
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('PasswordField#launch-ssh-password')).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-private-key-passphrase')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-privilege-escalation-password')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField[id^="launch-vault-password-"]')
|
||||
).toHaveLength(0);
|
||||
});
|
||||
test('should render private key passphrase field when JT has default machine cred', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik
|
||||
initialValues={{
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
},
|
||||
],
|
||||
}}
|
||||
>
|
||||
<CredentialPasswordsStep
|
||||
launchConfig={{
|
||||
defaults: {
|
||||
ask_credential_on_launch: true,
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
passwords_needed: ['ssh_key_unlock'],
|
||||
},
|
||||
],
|
||||
},
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('PasswordField#launch-ssh-password')).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-private-key-passphrase')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-privilege-escalation-password')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField[id^="launch-vault-password-"]')
|
||||
).toHaveLength(0);
|
||||
});
|
||||
test('should render vault password field when JT has default vault cred', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik
|
||||
initialValues={{
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
},
|
||||
],
|
||||
}}
|
||||
>
|
||||
<CredentialPasswordsStep
|
||||
launchConfig={{
|
||||
ask_credential_on_launch: true,
|
||||
defaults: {
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
passwords_needed: ['vault_password.1'],
|
||||
},
|
||||
],
|
||||
},
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('PasswordField#launch-ssh-password')).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-private-key-passphrase')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-privilege-escalation-password')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField[id^="launch-vault-password-"]')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-vault-password-1')
|
||||
).toHaveLength(1);
|
||||
});
|
||||
test('should render all password field when JT has default vault cred and machine cred', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik
|
||||
initialValues={{
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
},
|
||||
],
|
||||
}}
|
||||
>
|
||||
<CredentialPasswordsStep
|
||||
launchConfig={{
|
||||
ask_credential_on_launch: true,
|
||||
defaults: {
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
passwords_needed: [
|
||||
'ssh_password',
|
||||
'become_password',
|
||||
'ssh_key_unlock',
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
passwords_needed: ['vault_password.1'],
|
||||
},
|
||||
],
|
||||
},
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('PasswordField#launch-ssh-password')).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-private-key-passphrase')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-privilege-escalation-password')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField[id^="launch-vault-password-"]')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-vault-password-1')
|
||||
).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
describe('Credentials have been replaced and creds are promptable', () => {
|
||||
test('should render ssh password field when replacement machine cred prompts for it', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik
|
||||
initialValues={{
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
inputs: {
|
||||
password: 'ASK',
|
||||
become_password: null,
|
||||
ssh_key_unlock: null,
|
||||
vault_password: null,
|
||||
},
|
||||
},
|
||||
],
|
||||
}}
|
||||
>
|
||||
<CredentialPasswordsStep
|
||||
launchConfig={{
|
||||
ask_credential_on_launch: true,
|
||||
defaults: {
|
||||
credentials: [],
|
||||
},
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('PasswordField#launch-ssh-password')).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-private-key-passphrase')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-privilege-escalation-password')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField[id^="launch-vault-password-"]')
|
||||
).toHaveLength(0);
|
||||
});
|
||||
test('should render become password field when replacement machine cred prompts for it', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik
|
||||
initialValues={{
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
inputs: {
|
||||
password: null,
|
||||
become_password: 'ASK',
|
||||
ssh_key_unlock: null,
|
||||
vault_password: null,
|
||||
},
|
||||
},
|
||||
],
|
||||
}}
|
||||
>
|
||||
<CredentialPasswordsStep
|
||||
launchConfig={{
|
||||
ask_credential_on_launch: true,
|
||||
defaults: {
|
||||
credentials: [],
|
||||
},
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('PasswordField#launch-ssh-password')).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-private-key-passphrase')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-privilege-escalation-password')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField[id^="launch-vault-password-"]')
|
||||
).toHaveLength(0);
|
||||
});
|
||||
test('should render private key passphrase field when replacement machine cred prompts for it', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik
|
||||
initialValues={{
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
inputs: {
|
||||
password: null,
|
||||
become_password: null,
|
||||
ssh_key_unlock: 'ASK',
|
||||
vault_password: null,
|
||||
},
|
||||
},
|
||||
],
|
||||
}}
|
||||
>
|
||||
<CredentialPasswordsStep
|
||||
launchConfig={{
|
||||
ask_credential_on_launch: true,
|
||||
defaults: {
|
||||
credentials: [],
|
||||
},
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('PasswordField#launch-ssh-password')).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-private-key-passphrase')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-privilege-escalation-password')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField[id^="launch-vault-password-"]')
|
||||
).toHaveLength(0);
|
||||
});
|
||||
test('should render vault password field when replacement vault cred prompts for it', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik
|
||||
initialValues={{
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
inputs: {
|
||||
password: null,
|
||||
become_password: null,
|
||||
ssh_key_unlock: null,
|
||||
vault_password: 'ASK',
|
||||
vault_id: 'foobar',
|
||||
},
|
||||
},
|
||||
],
|
||||
}}
|
||||
>
|
||||
<CredentialPasswordsStep
|
||||
launchConfig={{
|
||||
ask_credential_on_launch: true,
|
||||
defaults: {
|
||||
credentials: [],
|
||||
},
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('PasswordField#launch-ssh-password')).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-private-key-passphrase')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-privilege-escalation-password')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField[id^="launch-vault-password-"]')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-vault-password-foobar')
|
||||
).toHaveLength(1);
|
||||
});
|
||||
test('should render all password fields when replacement vault and machine creds prompt for it', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik
|
||||
initialValues={{
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
inputs: {
|
||||
password: 'ASK',
|
||||
become_password: 'ASK',
|
||||
ssh_key_unlock: 'ASK',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
inputs: {
|
||||
password: null,
|
||||
become_password: null,
|
||||
ssh_key_unlock: null,
|
||||
vault_password: 'ASK',
|
||||
vault_id: 'foobar',
|
||||
},
|
||||
},
|
||||
],
|
||||
}}
|
||||
>
|
||||
<CredentialPasswordsStep
|
||||
launchConfig={{
|
||||
ask_credential_on_launch: true,
|
||||
defaults: {
|
||||
credentials: [],
|
||||
},
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('PasswordField#launch-ssh-password')).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-private-key-passphrase')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-privilege-escalation-password')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField[id^="launch-vault-password-"]')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-vault-password-foobar')
|
||||
).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
describe('Credentials have been replaced and creds are not promptable', () => {
|
||||
test('should render ssh password field when required', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={{}}>
|
||||
<CredentialPasswordsStep
|
||||
launchConfig={{
|
||||
ask_credential_on_launch: false,
|
||||
passwords_needed_to_start: ['ssh_password'],
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('PasswordField#launch-ssh-password')).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-private-key-passphrase')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-privilege-escalation-password')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField[id^="launch-vault-password-"]')
|
||||
).toHaveLength(0);
|
||||
});
|
||||
test('should render become password field when required', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={{}}>
|
||||
<CredentialPasswordsStep
|
||||
launchConfig={{
|
||||
ask_credential_on_launch: false,
|
||||
passwords_needed_to_start: ['become_password'],
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('PasswordField#launch-ssh-password')).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-private-key-passphrase')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-privilege-escalation-password')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField[id^="launch-vault-password-"]')
|
||||
).toHaveLength(0);
|
||||
});
|
||||
test('should render private key passphrase field when required', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={{}}>
|
||||
<CredentialPasswordsStep
|
||||
launchConfig={{
|
||||
ask_credential_on_launch: false,
|
||||
passwords_needed_to_start: ['ssh_key_unlock'],
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('PasswordField#launch-ssh-password')).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-private-key-passphrase')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-privilege-escalation-password')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField[id^="launch-vault-password-"]')
|
||||
).toHaveLength(0);
|
||||
});
|
||||
test('should render vault password field when required', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={{}}>
|
||||
<CredentialPasswordsStep
|
||||
launchConfig={{
|
||||
ask_credential_on_launch: false,
|
||||
passwords_needed_to_start: ['vault_password.foobar'],
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('PasswordField#launch-ssh-password')).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-private-key-passphrase')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-privilege-escalation-password')
|
||||
).toHaveLength(0);
|
||||
expect(
|
||||
wrapper.find('PasswordField[id^="launch-vault-password-"]')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-vault-password-foobar')
|
||||
).toHaveLength(1);
|
||||
});
|
||||
test('should render all password fields when required', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={{}}>
|
||||
<CredentialPasswordsStep
|
||||
launchConfig={{
|
||||
ask_credential_on_launch: false,
|
||||
passwords_needed_to_start: [
|
||||
'ssh_password',
|
||||
'become_password',
|
||||
'ssh_key_unlock',
|
||||
'vault_password.foobar',
|
||||
],
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
expect(wrapper.find('PasswordField#launch-ssh-password')).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-private-key-passphrase')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-privilege-escalation-password')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField[id^="launch-vault-password-"]')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('PasswordField#launch-vault-password-foobar')
|
||||
).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -3,6 +3,7 @@ import { useHistory } from 'react-router-dom';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { useField } from 'formik';
|
||||
import { Alert } from '@patternfly/react-core';
|
||||
import { InventoriesAPI } from '../../../api';
|
||||
import { getQSConfig, parseQueryString } from '../../../util/qs';
|
||||
import useRequest from '../../../util/useRequest';
|
||||
@@ -17,9 +18,10 @@ const QS_CONFIG = getQSConfig('inventory', {
|
||||
});
|
||||
|
||||
function InventoryStep({ i18n }) {
|
||||
const [field, , helpers] = useField({
|
||||
const [field, meta, helpers] = useField({
|
||||
name: 'inventory',
|
||||
});
|
||||
|
||||
const history = useHistory();
|
||||
|
||||
const {
|
||||
@@ -65,40 +67,45 @@ function InventoryStep({ i18n }) {
|
||||
}
|
||||
|
||||
return (
|
||||
<OptionsList
|
||||
value={field.value ? [field.value] : []}
|
||||
options={inventories}
|
||||
optionCount={count}
|
||||
searchColumns={[
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
key: 'name__icontains',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Created By (Username)`),
|
||||
key: 'created_by__username__icontains',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Modified By (Username)`),
|
||||
key: 'modified_by__username__icontains',
|
||||
},
|
||||
]}
|
||||
sortColumns={[
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
key: 'name',
|
||||
},
|
||||
]}
|
||||
searchableKeys={searchableKeys}
|
||||
relatedSearchableKeys={relatedSearchableKeys}
|
||||
header={i18n._(t`Inventory`)}
|
||||
name="inventory"
|
||||
qsConfig={QS_CONFIG}
|
||||
readOnly
|
||||
selectItem={helpers.setValue}
|
||||
deselectItem={() => field.onChange(null)}
|
||||
/>
|
||||
<>
|
||||
<OptionsList
|
||||
value={field.value ? [field.value] : []}
|
||||
options={inventories}
|
||||
optionCount={count}
|
||||
searchColumns={[
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
key: 'name__icontains',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Created By (Username)`),
|
||||
key: 'created_by__username__icontains',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Modified By (Username)`),
|
||||
key: 'modified_by__username__icontains',
|
||||
},
|
||||
]}
|
||||
sortColumns={[
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
key: 'name',
|
||||
},
|
||||
]}
|
||||
searchableKeys={searchableKeys}
|
||||
relatedSearchableKeys={relatedSearchableKeys}
|
||||
header={i18n._(t`Inventory`)}
|
||||
name="inventory"
|
||||
qsConfig={QS_CONFIG}
|
||||
readOnly
|
||||
selectItem={helpers.setValue}
|
||||
deselectItem={() => field.onChange(null)}
|
||||
/>
|
||||
{meta.touched && meta.error && (
|
||||
<Alert variant="danger" isInline title={meta.error} />
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -20,11 +20,11 @@ const FieldHeader = styled.div`
|
||||
}
|
||||
`;
|
||||
|
||||
function OtherPromptsStep({ config, i18n }) {
|
||||
function OtherPromptsStep({ launchConfig, i18n }) {
|
||||
return (
|
||||
<Form>
|
||||
{config.ask_job_type_on_launch && <JobTypeField i18n={i18n} />}
|
||||
{config.ask_limit_on_launch && (
|
||||
{launchConfig.ask_job_type_on_launch && <JobTypeField i18n={i18n} />}
|
||||
{launchConfig.ask_limit_on_launch && (
|
||||
<FormField
|
||||
id="prompt-limit"
|
||||
name="limit"
|
||||
@@ -35,7 +35,7 @@ function OtherPromptsStep({ config, i18n }) {
|
||||
information and examples on patterns.`)}
|
||||
/>
|
||||
)}
|
||||
{config.ask_scm_branch_on_launch && (
|
||||
{launchConfig.ask_scm_branch_on_launch && (
|
||||
<FormField
|
||||
id="prompt-scm-branch"
|
||||
name="scm_branch"
|
||||
@@ -45,9 +45,11 @@ function OtherPromptsStep({ config, i18n }) {
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
{config.ask_verbosity_on_launch && <VerbosityField i18n={i18n} />}
|
||||
{config.ask_diff_mode_on_launch && <ShowChangesToggle i18n={i18n} />}
|
||||
{config.ask_tags_on_launch && (
|
||||
{launchConfig.ask_verbosity_on_launch && <VerbosityField i18n={i18n} />}
|
||||
{launchConfig.ask_diff_mode_on_launch && (
|
||||
<ShowChangesToggle i18n={i18n} />
|
||||
)}
|
||||
{launchConfig.ask_tags_on_launch && (
|
||||
<TagField
|
||||
id="prompt-job-tags"
|
||||
name="job_tags"
|
||||
@@ -59,7 +61,7 @@ function OtherPromptsStep({ config, i18n }) {
|
||||
documentation for details on the usage of tags.`)}
|
||||
/>
|
||||
)}
|
||||
{config.ask_skip_tags_on_launch && (
|
||||
{launchConfig.ask_skip_tags_on_launch && (
|
||||
<TagField
|
||||
id="prompt-skip-tags"
|
||||
name="skip_tags"
|
||||
@@ -71,7 +73,7 @@ function OtherPromptsStep({ config, i18n }) {
|
||||
documentation for details on the usage of tags.`)}
|
||||
/>
|
||||
)}
|
||||
{config.ask_variables_on_launch && (
|
||||
{launchConfig.ask_variables_on_launch && (
|
||||
<VariablesField
|
||||
id="prompt-variables"
|
||||
name="extra_vars"
|
||||
|
||||
@@ -11,7 +11,7 @@ describe('OtherPromptsStep', () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={{ job_type: 'run' }}>
|
||||
<OtherPromptsStep
|
||||
config={{
|
||||
launchConfig={{
|
||||
ask_job_type_on_launch: true,
|
||||
}}
|
||||
/>
|
||||
@@ -34,7 +34,7 @@ describe('OtherPromptsStep', () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik>
|
||||
<OtherPromptsStep
|
||||
config={{
|
||||
launchConfig={{
|
||||
ask_limit_on_launch: true,
|
||||
}}
|
||||
/>
|
||||
@@ -54,7 +54,7 @@ describe('OtherPromptsStep', () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik>
|
||||
<OtherPromptsStep
|
||||
config={{
|
||||
launchConfig={{
|
||||
ask_scm_branch_on_launch: true,
|
||||
}}
|
||||
/>
|
||||
@@ -74,7 +74,7 @@ describe('OtherPromptsStep', () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={{ verbosity: '' }}>
|
||||
<OtherPromptsStep
|
||||
config={{
|
||||
launchConfig={{
|
||||
ask_verbosity_on_launch: true,
|
||||
}}
|
||||
/>
|
||||
@@ -94,7 +94,7 @@ describe('OtherPromptsStep', () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={{ diff_mode: true }}>
|
||||
<OtherPromptsStep
|
||||
config={{
|
||||
launchConfig={{
|
||||
ask_diff_mode_on_launch: true,
|
||||
}}
|
||||
/>
|
||||
|
||||
@@ -6,8 +6,10 @@ import { t } from '@lingui/macro';
|
||||
import { useFormikContext } from 'formik';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import yaml from 'js-yaml';
|
||||
import mergeExtraVars, { maskPasswords } from '../mergeExtraVars';
|
||||
import getSurveyValues from '../getSurveyValues';
|
||||
import mergeExtraVars, {
|
||||
maskPasswords,
|
||||
} from '../../../util/prompt/mergeExtraVars';
|
||||
import getSurveyValues from '../../../util/prompt/getSurveyValues';
|
||||
import PromptDetail from '../../PromptDetail';
|
||||
|
||||
const ExclamationCircleIcon = styled(PFExclamationCircleIcon)`
|
||||
@@ -23,18 +25,25 @@ const ErrorMessageWrapper = styled.div`
|
||||
margin-bottom: 10px;
|
||||
`;
|
||||
|
||||
function PreviewStep({ resource, config, survey, formErrors, i18n }) {
|
||||
function PreviewStep({
|
||||
resource,
|
||||
launchConfig,
|
||||
surveyConfig,
|
||||
formErrors,
|
||||
i18n,
|
||||
}) {
|
||||
const { values } = useFormikContext();
|
||||
const surveyValues = getSurveyValues(values);
|
||||
|
||||
const overrides = { ...values };
|
||||
const overrides = {
|
||||
...values,
|
||||
};
|
||||
|
||||
if (config.ask_variables_on_launch || config.survey_enabled) {
|
||||
const initialExtraVars = config.ask_variables_on_launch
|
||||
? values.extra_vars || '---'
|
||||
: resource.extra_vars;
|
||||
if (survey && survey.spec) {
|
||||
const passwordFields = survey.spec
|
||||
if (launchConfig.ask_variables_on_launch || launchConfig.survey_enabled) {
|
||||
const initialExtraVars =
|
||||
launchConfig.ask_variables_on_launch && (overrides.extra_vars || '---');
|
||||
if (surveyConfig?.spec) {
|
||||
const passwordFields = surveyConfig.spec
|
||||
.filter(q => q.type === 'password')
|
||||
.map(q => q.variable);
|
||||
const masked = maskPasswords(surveyValues, passwordFields);
|
||||
@@ -42,7 +51,9 @@ function PreviewStep({ resource, config, survey, formErrors, i18n }) {
|
||||
mergeExtraVars(initialExtraVars, masked)
|
||||
);
|
||||
} else {
|
||||
overrides.extra_vars = initialExtraVars;
|
||||
overrides.extra_vars = yaml.safeDump(
|
||||
mergeExtraVars(initialExtraVars, {})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -62,7 +73,7 @@ function PreviewStep({ resource, config, survey, formErrors, i18n }) {
|
||||
)}
|
||||
<PromptDetail
|
||||
resource={resource}
|
||||
launchConfig={config}
|
||||
launchConfig={launchConfig}
|
||||
overrides={overrides}
|
||||
/>
|
||||
</Fragment>
|
||||
|
||||
@@ -36,11 +36,11 @@ describe('PreviewStep', () => {
|
||||
<Formik initialValues={{ limit: '4', survey_foo: 'abc' }}>
|
||||
<PreviewStep
|
||||
resource={resource}
|
||||
config={{
|
||||
launchConfig={{
|
||||
ask_limit_on_launch: true,
|
||||
survey_enabled: true,
|
||||
}}
|
||||
survey={survey}
|
||||
surveyConfig={survey}
|
||||
formErrors={formErrors}
|
||||
/>
|
||||
</Formik>
|
||||
@@ -64,7 +64,7 @@ describe('PreviewStep', () => {
|
||||
<Formik initialValues={{ limit: '4' }}>
|
||||
<PreviewStep
|
||||
resource={resource}
|
||||
config={{
|
||||
launchConfig={{
|
||||
ask_limit_on_launch: true,
|
||||
}}
|
||||
formErrors={formErrors}
|
||||
@@ -80,7 +80,32 @@ describe('PreviewStep', () => {
|
||||
limit: '4',
|
||||
});
|
||||
});
|
||||
test('should handle extra vars with survey', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={{ extra_vars: 'one: 1', survey_foo: 'abc' }}>
|
||||
<PreviewStep
|
||||
resource={resource}
|
||||
launchConfig={{
|
||||
ask_variables_on_launch: true,
|
||||
survey_enabled: true,
|
||||
}}
|
||||
surveyConfig={survey}
|
||||
formErrors={formErrors}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
const detail = wrapper.find('PromptDetail');
|
||||
expect(detail).toHaveLength(1);
|
||||
expect(detail.prop('resource')).toEqual(resource);
|
||||
expect(detail.prop('overrides')).toEqual({
|
||||
extra_vars: 'one: 1\nfoo: abc\n',
|
||||
survey_foo: 'abc',
|
||||
});
|
||||
});
|
||||
test('should handle extra vars without survey', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
@@ -88,7 +113,7 @@ describe('PreviewStep', () => {
|
||||
<Formik initialValues={{ extra_vars: 'one: 1' }}>
|
||||
<PreviewStep
|
||||
resource={resource}
|
||||
config={{
|
||||
launchConfig={{
|
||||
ask_variables_on_launch: true,
|
||||
}}
|
||||
formErrors={formErrors}
|
||||
@@ -101,10 +126,9 @@ describe('PreviewStep', () => {
|
||||
expect(detail).toHaveLength(1);
|
||||
expect(detail.prop('resource')).toEqual(resource);
|
||||
expect(detail.prop('overrides')).toEqual({
|
||||
extra_vars: 'one: 1',
|
||||
extra_vars: 'one: 1\n',
|
||||
});
|
||||
});
|
||||
|
||||
test('should remove survey with empty array value', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
@@ -115,7 +139,7 @@ describe('PreviewStep', () => {
|
||||
>
|
||||
<PreviewStep
|
||||
resource={resource}
|
||||
config={{
|
||||
launchConfig={{
|
||||
ask_variables_on_launch: true,
|
||||
}}
|
||||
formErrors={formErrors}
|
||||
@@ -128,7 +152,7 @@ describe('PreviewStep', () => {
|
||||
expect(detail).toHaveLength(1);
|
||||
expect(detail.prop('resource')).toEqual(resource);
|
||||
expect(detail.prop('overrides')).toEqual({
|
||||
extra_vars: 'one: 1',
|
||||
extra_vars: 'one: 1\n',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -14,13 +14,13 @@ const ExclamationCircleIcon = styled(PFExclamationCircleIcon)`
|
||||
margin-left: 10px;
|
||||
`;
|
||||
|
||||
function StepName({ hasErrors, children, i18n }) {
|
||||
function StepName({ hasErrors, children, i18n, id }) {
|
||||
if (!hasErrors) {
|
||||
return children;
|
||||
return <div id={id}>{children}</div>;
|
||||
}
|
||||
return (
|
||||
<>
|
||||
<AlertText>
|
||||
<AlertText id={id}>
|
||||
{children}
|
||||
<Tooltip
|
||||
position="right"
|
||||
|
||||
@@ -22,7 +22,7 @@ import {
|
||||
} from '../../../util/validators';
|
||||
import { Survey } from '../../../types';
|
||||
|
||||
function SurveyStep({ survey, i18n }) {
|
||||
function SurveyStep({ surveyConfig, i18n }) {
|
||||
const fieldTypes = {
|
||||
text: TextField,
|
||||
textarea: TextField,
|
||||
@@ -34,7 +34,7 @@ function SurveyStep({ survey, i18n }) {
|
||||
};
|
||||
return (
|
||||
<Form>
|
||||
{survey.spec.map(question => {
|
||||
{surveyConfig.spec.map(question => {
|
||||
const Field = fieldTypes[question.type];
|
||||
return (
|
||||
<Field key={question.variable} question={question} i18n={i18n} />
|
||||
@@ -44,7 +44,7 @@ function SurveyStep({ survey, i18n }) {
|
||||
);
|
||||
}
|
||||
SurveyStep.propTypes = {
|
||||
survey: Survey.isRequired,
|
||||
surveyConfig: Survey.isRequired,
|
||||
};
|
||||
|
||||
function TextField({ question, i18n }) {
|
||||
@@ -130,7 +130,8 @@ function MultiSelectField({ question, i18n }) {
|
||||
<FormGroup
|
||||
fieldId={id}
|
||||
helperTextInvalid={
|
||||
meta.error || i18n._(t`Must select a value for this field.`)
|
||||
meta.error ||
|
||||
i18n._(t`At least one value must be selected for this field.`)
|
||||
}
|
||||
isRequired={question.required}
|
||||
validated={isValid ? 'default' : 'error'}
|
||||
|
||||
@@ -0,0 +1,254 @@
|
||||
import React from 'react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { useFormikContext } from 'formik';
|
||||
import CredentialPasswordsStep from './CredentialPasswordsStep';
|
||||
import StepName from './StepName';
|
||||
|
||||
const STEP_ID = 'credentialPasswords';
|
||||
|
||||
const isValueMissing = val => {
|
||||
return !val || val === '';
|
||||
};
|
||||
|
||||
export default function useCredentialPasswordsStep(
|
||||
launchConfig,
|
||||
i18n,
|
||||
showStep,
|
||||
visitedSteps
|
||||
) {
|
||||
const { values, setFieldError } = useFormikContext();
|
||||
const hasError =
|
||||
Object.keys(visitedSteps).includes(STEP_ID) &&
|
||||
checkForError(launchConfig, values);
|
||||
|
||||
return {
|
||||
step: showStep
|
||||
? {
|
||||
id: STEP_ID,
|
||||
name: (
|
||||
<StepName hasErrors={hasError} id="credential-passwords-step">
|
||||
{i18n._(t`Credential passwords`)}
|
||||
</StepName>
|
||||
),
|
||||
component: (
|
||||
<CredentialPasswordsStep launchConfig={launchConfig} i18n={i18n} />
|
||||
),
|
||||
enableNext: true,
|
||||
}
|
||||
: null,
|
||||
initialValues: getInitialValues(launchConfig, values.credentials),
|
||||
isReady: true,
|
||||
contentError: null,
|
||||
hasError,
|
||||
setTouched: setFieldTouched => {
|
||||
Object.keys(values.credential_passwords).forEach(credentialValueKey =>
|
||||
setFieldTouched(
|
||||
`credential_passwords['${credentialValueKey}']`,
|
||||
true,
|
||||
false
|
||||
)
|
||||
);
|
||||
},
|
||||
validate: () => {
|
||||
const setPasswordFieldError = fieldName => {
|
||||
setFieldError(fieldName, i18n._(t`This field may not be blank`));
|
||||
};
|
||||
|
||||
if (
|
||||
!launchConfig.ask_credential_on_launch &&
|
||||
launchConfig.passwords_needed_to_start
|
||||
) {
|
||||
launchConfig.passwords_needed_to_start.forEach(password => {
|
||||
if (isValueMissing(values.credential_passwords[password])) {
|
||||
setPasswordFieldError(`credential_passwords['${password}']`);
|
||||
}
|
||||
});
|
||||
} else if (values.credentials) {
|
||||
values.credentials.forEach(credential => {
|
||||
if (!credential.inputs) {
|
||||
const launchConfigCredential = launchConfig.defaults.credentials.find(
|
||||
defaultCred => defaultCred.id === credential.id
|
||||
);
|
||||
|
||||
if (launchConfigCredential?.passwords_needed.length > 0) {
|
||||
launchConfigCredential.passwords_needed.forEach(password => {
|
||||
if (isValueMissing(values.credential_passwords[password])) {
|
||||
setPasswordFieldError(`credential_passwords['${password}']`);
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
if (
|
||||
credential?.inputs?.password === 'ASK' &&
|
||||
isValueMissing(values.credential_passwords.ssh_password)
|
||||
) {
|
||||
setPasswordFieldError('credential_passwords.ssh_password');
|
||||
}
|
||||
|
||||
if (
|
||||
credential?.inputs?.become_password === 'ASK' &&
|
||||
isValueMissing(values.credential_passwords.become_password)
|
||||
) {
|
||||
setPasswordFieldError('credential_passwords.become_password');
|
||||
}
|
||||
|
||||
if (
|
||||
credential?.inputs?.ssh_key_unlock === 'ASK' &&
|
||||
isValueMissing(values.credential_passwords.ssh_key_unlock)
|
||||
) {
|
||||
setPasswordFieldError('credential_passwords.ssh_key_unlock');
|
||||
}
|
||||
|
||||
if (
|
||||
credential?.inputs?.vault_password === 'ASK' &&
|
||||
isValueMissing(
|
||||
values.credential_passwords[
|
||||
`vault_password${
|
||||
credential.inputs.vault_id !== ''
|
||||
? `.${credential.inputs.vault_id}`
|
||||
: ''
|
||||
}`
|
||||
]
|
||||
)
|
||||
) {
|
||||
setPasswordFieldError(
|
||||
`credential_passwords['vault_password${
|
||||
credential.inputs.vault_id !== ''
|
||||
? `.${credential.inputs.vault_id}`
|
||||
: ''
|
||||
}']`
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function getInitialValues(launchConfig, selectedCredentials = []) {
|
||||
const initialValues = {
|
||||
credential_passwords: {},
|
||||
};
|
||||
|
||||
if (!launchConfig) {
|
||||
return initialValues;
|
||||
}
|
||||
|
||||
if (
|
||||
!launchConfig.ask_credential_on_launch &&
|
||||
launchConfig.passwords_needed_to_start
|
||||
) {
|
||||
launchConfig.passwords_needed_to_start.forEach(password => {
|
||||
initialValues.credential_passwords[password] = '';
|
||||
});
|
||||
return initialValues;
|
||||
}
|
||||
|
||||
selectedCredentials.forEach(credential => {
|
||||
if (!credential.inputs) {
|
||||
const launchConfigCredential = launchConfig.defaults.credentials.find(
|
||||
defaultCred => defaultCred.id === credential.id
|
||||
);
|
||||
|
||||
if (launchConfigCredential?.passwords_needed.length > 0) {
|
||||
launchConfigCredential.passwords_needed.forEach(password => {
|
||||
initialValues.credential_passwords[password] = '';
|
||||
});
|
||||
}
|
||||
} else {
|
||||
if (credential?.inputs?.password === 'ASK') {
|
||||
initialValues.credential_passwords.ssh_password = '';
|
||||
}
|
||||
|
||||
if (credential?.inputs?.become_password === 'ASK') {
|
||||
initialValues.credential_passwords.become_password = '';
|
||||
}
|
||||
|
||||
if (credential?.inputs?.ssh_key_unlock === 'ASK') {
|
||||
initialValues.credential_passwords.ssh_key_unlock = '';
|
||||
}
|
||||
|
||||
if (credential?.inputs?.vault_password === 'ASK') {
|
||||
if (!credential.inputs.vault_id || credential.inputs.vault_id === '') {
|
||||
initialValues.credential_passwords.vault_password = '';
|
||||
} else {
|
||||
initialValues.credential_passwords[
|
||||
`vault_password.${credential.inputs.vault_id}`
|
||||
] = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return initialValues;
|
||||
}
|
||||
|
||||
function checkForError(launchConfig, values) {
|
||||
let hasError = false;
|
||||
|
||||
if (
|
||||
!launchConfig.ask_credential_on_launch &&
|
||||
launchConfig.passwords_needed_to_start
|
||||
) {
|
||||
launchConfig.passwords_needed_to_start.forEach(password => {
|
||||
if (isValueMissing(values.credential_passwords[password])) {
|
||||
hasError = true;
|
||||
}
|
||||
});
|
||||
} else if (values.credentials) {
|
||||
values.credentials.forEach(credential => {
|
||||
if (!credential.inputs) {
|
||||
const launchConfigCredential = launchConfig.defaults.credentials.find(
|
||||
defaultCred => defaultCred.id === credential.id
|
||||
);
|
||||
|
||||
if (launchConfigCredential?.passwords_needed.length > 0) {
|
||||
launchConfigCredential.passwords_needed.forEach(password => {
|
||||
if (isValueMissing(values.credential_passwords[password])) {
|
||||
hasError = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
if (
|
||||
credential?.inputs?.password === 'ASK' &&
|
||||
isValueMissing(values.credential_passwords.ssh_password)
|
||||
) {
|
||||
hasError = true;
|
||||
}
|
||||
|
||||
if (
|
||||
credential?.inputs?.become_password === 'ASK' &&
|
||||
isValueMissing(values.credential_passwords.become_password)
|
||||
) {
|
||||
hasError = true;
|
||||
}
|
||||
|
||||
if (
|
||||
credential?.inputs?.ssh_key_unlock === 'ASK' &&
|
||||
isValueMissing(values.credential_passwords.ssh_key_unlock)
|
||||
) {
|
||||
hasError = true;
|
||||
}
|
||||
|
||||
if (
|
||||
credential?.inputs?.vault_password === 'ASK' &&
|
||||
isValueMissing(
|
||||
values.credential_passwords[
|
||||
`vault_password${
|
||||
credential.inputs.vault_id !== ''
|
||||
? `.${credential.inputs.vault_id}`
|
||||
: ''
|
||||
}`
|
||||
]
|
||||
)
|
||||
) {
|
||||
hasError = true;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return hasError;
|
||||
}
|
||||
@@ -1,30 +1,47 @@
|
||||
import React from 'react';
|
||||
import { t } from '@lingui/macro';
|
||||
import CredentialsStep from './CredentialsStep';
|
||||
import StepName from './StepName';
|
||||
|
||||
const STEP_ID = 'credentials';
|
||||
|
||||
export default function useCredentialsStep(config, i18n) {
|
||||
export default function useCredentialsStep(launchConfig, resource, i18n) {
|
||||
return {
|
||||
step: getStep(config, i18n),
|
||||
step: getStep(launchConfig, i18n),
|
||||
initialValues: getInitialValues(launchConfig, resource),
|
||||
isReady: true,
|
||||
contentError: null,
|
||||
formError: null,
|
||||
setTouched: setFieldsTouched => {
|
||||
setFieldsTouched({
|
||||
credentials: true,
|
||||
});
|
||||
hasError: false,
|
||||
setTouched: setFieldTouched => {
|
||||
setFieldTouched('credentials', true, false);
|
||||
},
|
||||
validate: () => {},
|
||||
};
|
||||
}
|
||||
|
||||
function getStep(config, i18n) {
|
||||
if (!config.ask_credential_on_launch) {
|
||||
function getStep(launchConfig, i18n) {
|
||||
if (!launchConfig.ask_credential_on_launch) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
id: STEP_ID,
|
||||
name: i18n._(t`Credentials`),
|
||||
key: 4,
|
||||
name: (
|
||||
<StepName hasErrors={false} id="credentials-step">
|
||||
{i18n._(t`Credentials`)}
|
||||
</StepName>
|
||||
),
|
||||
component: <CredentialsStep i18n={i18n} />,
|
||||
enableNext: true,
|
||||
};
|
||||
}
|
||||
|
||||
function getInitialValues(launchConfig, resource) {
|
||||
if (!launchConfig.ask_credential_on_launch) {
|
||||
return {};
|
||||
}
|
||||
|
||||
return {
|
||||
credentials: resource?.summary_fields?.credentials || [],
|
||||
};
|
||||
}
|
||||
|
||||
@@ -6,35 +6,44 @@ import StepName from './StepName';
|
||||
|
||||
const STEP_ID = 'inventory';
|
||||
|
||||
export default function useInventoryStep(config, visitedSteps, i18n) {
|
||||
const [, meta] = useField('inventory');
|
||||
export default function useInventoryStep(
|
||||
launchConfig,
|
||||
resource,
|
||||
i18n,
|
||||
visitedSteps
|
||||
) {
|
||||
const [, meta, helpers] = useField('inventory');
|
||||
const formError =
|
||||
!resource || resource?.type === 'workflow_job_template'
|
||||
? false
|
||||
: Object.keys(visitedSteps).includes(STEP_ID) &&
|
||||
meta.touched &&
|
||||
!meta.value;
|
||||
|
||||
return {
|
||||
step: getStep(config, meta, i18n, visitedSteps),
|
||||
step: getStep(launchConfig, i18n, formError),
|
||||
initialValues: getInitialValues(launchConfig, resource),
|
||||
isReady: true,
|
||||
contentError: null,
|
||||
formError: !meta.value,
|
||||
setTouched: setFieldsTouched => {
|
||||
setFieldsTouched({
|
||||
inventory: true,
|
||||
});
|
||||
hasError: launchConfig.ask_inventory_on_launch && formError,
|
||||
setTouched: setFieldTouched => {
|
||||
setFieldTouched('inventory', true, false);
|
||||
},
|
||||
validate: () => {
|
||||
if (meta.touched && !meta.value && resource.type === 'job_template') {
|
||||
helpers.setError(i18n._(t`An inventory must be selected`));
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
function getStep(config, meta, i18n, visitedSteps) {
|
||||
if (!config.ask_inventory_on_launch) {
|
||||
function getStep(launchConfig, i18n, formError) {
|
||||
if (!launchConfig.ask_inventory_on_launch) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
id: STEP_ID,
|
||||
key: 3,
|
||||
name: (
|
||||
<StepName
|
||||
hasErrors={
|
||||
Object.keys(visitedSteps).includes(STEP_ID) &&
|
||||
(!meta.value || meta.error)
|
||||
}
|
||||
>
|
||||
<StepName hasErrors={formError} id="inventory-step">
|
||||
{i18n._(t`Inventory`)}
|
||||
</StepName>
|
||||
),
|
||||
@@ -42,3 +51,13 @@ function getStep(config, meta, i18n, visitedSteps) {
|
||||
enableNext: true,
|
||||
};
|
||||
}
|
||||
|
||||
function getInitialValues(launchConfig, resource) {
|
||||
if (!launchConfig.ask_inventory_on_launch) {
|
||||
return {};
|
||||
}
|
||||
|
||||
return {
|
||||
inventory: resource?.summary_fields?.inventory || null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,49 +1,103 @@
|
||||
import React from 'react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { jsonToYaml, parseVariableField } from '../../../util/yaml';
|
||||
import OtherPromptsStep from './OtherPromptsStep';
|
||||
import StepName from './StepName';
|
||||
|
||||
const STEP_ID = 'other';
|
||||
|
||||
export default function useOtherPrompt(config, i18n) {
|
||||
const getVariablesData = resource => {
|
||||
if (resource?.extra_data) {
|
||||
return jsonToYaml(JSON.stringify(resource.extra_data));
|
||||
}
|
||||
if (resource?.extra_vars && resource?.extra_vars !== '---') {
|
||||
return jsonToYaml(JSON.stringify(parseVariableField(resource.extra_vars)));
|
||||
}
|
||||
return '---';
|
||||
};
|
||||
|
||||
export default function useOtherPromptsStep(launchConfig, resource, i18n) {
|
||||
return {
|
||||
step: getStep(config, i18n),
|
||||
step: getStep(launchConfig, i18n),
|
||||
initialValues: getInitialValues(launchConfig, resource),
|
||||
isReady: true,
|
||||
contentError: null,
|
||||
formError: null,
|
||||
setTouched: setFieldsTouched => {
|
||||
setFieldsTouched({
|
||||
job_type: true,
|
||||
limit: true,
|
||||
verbosity: true,
|
||||
diff_mode: true,
|
||||
job_tags: true,
|
||||
skip_tags: true,
|
||||
extra_vars: true,
|
||||
});
|
||||
hasError: false,
|
||||
setTouched: setFieldTouched => {
|
||||
[
|
||||
'job_type',
|
||||
'limit',
|
||||
'verbosity',
|
||||
'diff_mode',
|
||||
'job_tags',
|
||||
'skip_tags',
|
||||
'extra_vars',
|
||||
].forEach(field => setFieldTouched(field, true, false));
|
||||
},
|
||||
validate: () => {},
|
||||
};
|
||||
}
|
||||
|
||||
function getStep(config, i18n) {
|
||||
if (!shouldShowPrompt(config)) {
|
||||
function getStep(launchConfig, i18n) {
|
||||
if (!shouldShowPrompt(launchConfig)) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
id: STEP_ID,
|
||||
name: i18n._(t`Other Prompts`),
|
||||
component: <OtherPromptsStep config={config} i18n={i18n} />,
|
||||
key: 5,
|
||||
name: (
|
||||
<StepName hasErrors={false} id="other-prompts-step">
|
||||
{i18n._(t`Other prompts`)}
|
||||
</StepName>
|
||||
),
|
||||
component: <OtherPromptsStep launchConfig={launchConfig} i18n={i18n} />,
|
||||
enableNext: true,
|
||||
};
|
||||
}
|
||||
|
||||
function shouldShowPrompt(config) {
|
||||
function shouldShowPrompt(launchConfig) {
|
||||
return (
|
||||
config.ask_job_type_on_launch ||
|
||||
config.ask_limit_on_launch ||
|
||||
config.ask_verbosity_on_launch ||
|
||||
config.ask_tags_on_launch ||
|
||||
config.ask_skip_tags_on_launch ||
|
||||
config.ask_variables_on_launch ||
|
||||
config.ask_scm_branch_on_launch ||
|
||||
config.ask_diff_mode_on_launch
|
||||
launchConfig.ask_job_type_on_launch ||
|
||||
launchConfig.ask_limit_on_launch ||
|
||||
launchConfig.ask_verbosity_on_launch ||
|
||||
launchConfig.ask_tags_on_launch ||
|
||||
launchConfig.ask_skip_tags_on_launch ||
|
||||
launchConfig.ask_variables_on_launch ||
|
||||
launchConfig.ask_scm_branch_on_launch ||
|
||||
launchConfig.ask_diff_mode_on_launch
|
||||
);
|
||||
}
|
||||
|
||||
function getInitialValues(launchConfig, resource) {
|
||||
const initialValues = {};
|
||||
|
||||
if (!launchConfig) {
|
||||
return initialValues;
|
||||
}
|
||||
|
||||
if (launchConfig.ask_job_type_on_launch) {
|
||||
initialValues.job_type = resource?.job_type || '';
|
||||
}
|
||||
if (launchConfig.ask_limit_on_launch) {
|
||||
initialValues.limit = resource?.limit || '';
|
||||
}
|
||||
if (launchConfig.ask_verbosity_on_launch) {
|
||||
initialValues.verbosity = resource?.verbosity || 0;
|
||||
}
|
||||
if (launchConfig.ask_tags_on_launch) {
|
||||
initialValues.job_tags = resource?.job_tags || '';
|
||||
}
|
||||
if (launchConfig.ask_skip_tags_on_launch) {
|
||||
initialValues.skip_tags = resource?.skip_tags || '';
|
||||
}
|
||||
if (launchConfig.ask_variables_on_launch) {
|
||||
initialValues.extra_vars = getVariablesData(resource);
|
||||
}
|
||||
if (launchConfig.ask_scm_branch_on_launch) {
|
||||
initialValues.scm_branch = resource?.scm_branch || '';
|
||||
}
|
||||
if (launchConfig.ask_diff_mode_on_launch) {
|
||||
initialValues.diff_mode = resource?.diff_mode || false;
|
||||
}
|
||||
return initialValues;
|
||||
}
|
||||
|
||||
@@ -1,55 +1,43 @@
|
||||
import React from 'react';
|
||||
import { useFormikContext } from 'formik';
|
||||
import { t } from '@lingui/macro';
|
||||
import PreviewStep from './PreviewStep';
|
||||
import StepName from './StepName';
|
||||
|
||||
const STEP_ID = 'preview';
|
||||
|
||||
export default function usePreviewStep(
|
||||
config,
|
||||
launchConfig,
|
||||
i18n,
|
||||
resource,
|
||||
survey,
|
||||
surveyConfig,
|
||||
hasErrors,
|
||||
i18n
|
||||
showStep
|
||||
) {
|
||||
const { values: formikValues, errors } = useFormikContext();
|
||||
|
||||
const formErrorsContent = [];
|
||||
if (config.ask_inventory_on_launch && !formikValues.inventory) {
|
||||
formErrorsContent.push({
|
||||
inventory: true,
|
||||
});
|
||||
}
|
||||
const hasSurveyError = Object.keys(errors).find(e => e.includes('survey'));
|
||||
if (
|
||||
config.survey_enabled &&
|
||||
(config.variables_needed_to_start ||
|
||||
config.variables_needed_to_start.length === 0) &&
|
||||
hasSurveyError
|
||||
) {
|
||||
formErrorsContent.push({
|
||||
survey: true,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
step: {
|
||||
id: STEP_ID,
|
||||
name: i18n._(t`Preview`),
|
||||
component: (
|
||||
<PreviewStep
|
||||
config={config}
|
||||
resource={resource}
|
||||
survey={survey}
|
||||
formErrors={hasErrors}
|
||||
/>
|
||||
),
|
||||
enableNext: !hasErrors,
|
||||
nextButtonText: i18n._(t`Launch`),
|
||||
},
|
||||
step: showStep
|
||||
? {
|
||||
id: STEP_ID,
|
||||
name: (
|
||||
<StepName hasErrors={false} id="preview-step">
|
||||
{i18n._(t`Preview`)}
|
||||
</StepName>
|
||||
),
|
||||
component: (
|
||||
<PreviewStep
|
||||
launchConfig={launchConfig}
|
||||
resource={resource}
|
||||
surveyConfig={surveyConfig}
|
||||
formErrors={hasErrors}
|
||||
/>
|
||||
),
|
||||
enableNext: !hasErrors,
|
||||
nextButtonText: i18n._(t`Launch`),
|
||||
}
|
||||
: null,
|
||||
initialValues: {},
|
||||
isReady: true,
|
||||
error: null,
|
||||
setTouched: () => {},
|
||||
validate: () => {},
|
||||
};
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user