mirror of
https://github.com/ansible/awx.git
synced 2026-02-06 03:54:44 -03:30
Compare commits
437 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
37ee95314a | ||
|
|
28c3fa517e | ||
|
|
3dd21d720e | ||
|
|
9cfecb5590 | ||
|
|
2742612be9 | ||
|
|
4f4a4e2394 | ||
|
|
edd9972435 | ||
|
|
9fdec9b31b | ||
|
|
a93ee86581 | ||
|
|
020246736c | ||
|
|
8d3ce206cd | ||
|
|
28e27c5196 | ||
|
|
c56352daa4 | ||
|
|
5eea4e8881 | ||
|
|
58c821f3e1 | ||
|
|
5cad0d243a | ||
|
|
0aaa2d8c8d | ||
|
|
921feb561d | ||
|
|
5b0bb4939f | ||
|
|
144cffe009 | ||
|
|
af11055e5c | ||
|
|
c0cb546c3c | ||
|
|
a800c8cd00 | ||
|
|
f8a23f20aa | ||
|
|
46edd151e0 | ||
|
|
ba4b6bdbb7 | ||
|
|
1e24d8b5fa | ||
|
|
41586ea3a6 | ||
|
|
ded5577832 | ||
|
|
cce5f26e34 | ||
|
|
1940c834cb | ||
|
|
08381577f5 | ||
|
|
669d67b8fb | ||
|
|
8a0be5b111 | ||
|
|
9e30f004d3 | ||
|
|
62bf61b2a2 | ||
|
|
f62dfe85cc | ||
|
|
97acba8fe9 | ||
|
|
cec7cb393d | ||
|
|
e9b254b9d2 | ||
|
|
222fecc5f6 | ||
|
|
c833676863 | ||
|
|
7e9835f6ee | ||
|
|
5940f6de2c | ||
|
|
a899a147e1 | ||
|
|
e0c8f3e541 | ||
|
|
68a0bbe125 | ||
|
|
8592bf3e39 | ||
|
|
4787e69afb | ||
|
|
8f5afc83ce | ||
|
|
b1a90d445b | ||
|
|
8954e6e556 | ||
|
|
7bfc99a615 | ||
|
|
f159a6508e | ||
|
|
4d7b5adf12 | ||
|
|
6e648cf72f | ||
|
|
24a50ea076 | ||
|
|
2d2add009b | ||
|
|
fd068695ef | ||
|
|
b19360ac9b | ||
|
|
7c3c1f5a29 | ||
|
|
a902afcf73 | ||
|
|
501568340b | ||
|
|
1d32917ceb | ||
|
|
2d455800c4 | ||
|
|
37491fa4b9 | ||
|
|
f41852c3ee | ||
|
|
b565ed2077 | ||
|
|
86bafb52f6 | ||
|
|
11b1d0e84c | ||
|
|
f47325a532 | ||
|
|
1a261782c7 | ||
|
|
5a1599b440 | ||
|
|
72248db76d | ||
|
|
21268b779f | ||
|
|
8926f635df | ||
|
|
e19194b883 | ||
|
|
fa1c33da7e | ||
|
|
d30ecb6fb3 | ||
|
|
8ed5964871 | ||
|
|
a989c624c7 | ||
|
|
7f01de26a1 | ||
|
|
e3b5d64aa7 | ||
|
|
eba0e4fd77 | ||
|
|
d3c80eef4d | ||
|
|
3683dfab37 | ||
|
|
8e3931de37 | ||
|
|
29a582f869 | ||
|
|
be0a7a2aa9 | ||
|
|
d0d8d1c66c | ||
|
|
8a8a48a4ff | ||
|
|
b0aa795b10 | ||
|
|
017064aecf | ||
|
|
7311ddf722 | ||
|
|
69835e9895 | ||
|
|
85960d9035 | ||
|
|
c8ceb62269 | ||
|
|
1acca459ef | ||
|
|
ee6fda9f8a | ||
|
|
a95632c349 | ||
|
|
ed3b6385f1 | ||
|
|
3518fb0c17 | ||
|
|
1289f141d6 | ||
|
|
8464ec5c49 | ||
|
|
3bc5975b90 | ||
|
|
af7e9cb533 | ||
|
|
af2a8f9831 | ||
|
|
f99a43ffa6 | ||
|
|
262d99fde6 | ||
|
|
63f56d33aa | ||
|
|
9cabf3ef4d | ||
|
|
2855be9d26 | ||
|
|
2524e8af47 | ||
|
|
f957ef7249 | ||
|
|
4551859248 | ||
|
|
2a4912df3e | ||
|
|
daa312d7ee | ||
|
|
e95938715a | ||
|
|
f5d4f7858a | ||
|
|
25e0efd0b7 | ||
|
|
47a007caee | ||
|
|
cd6d2ed53a | ||
|
|
4de61204c4 | ||
|
|
6b21f2042b | ||
|
|
7820517734 | ||
|
|
2ba1288284 | ||
|
|
149f8a21a6 | ||
|
|
602f2951b9 | ||
|
|
b003f42e22 | ||
|
|
2ee2cd0bd9 | ||
|
|
a79f2ff07a | ||
|
|
75bb7cce22 | ||
|
|
52a253ad18 | ||
|
|
0f74a05fea | ||
|
|
440691387b | ||
|
|
27e6c2d47d | ||
|
|
8b69b08991 | ||
|
|
8714bde1b4 | ||
|
|
28b84d0d71 | ||
|
|
c6111fface | ||
|
|
98e8a09ad3 | ||
|
|
3f9af8fe69 | ||
|
|
dbe949a2c2 | ||
|
|
a296f64696 | ||
|
|
ee18400a33 | ||
|
|
98a4e85db4 | ||
|
|
f7f1bdf9c9 | ||
|
|
69cf915a20 | ||
|
|
9440785bdd | ||
|
|
ca7c840d8c | ||
|
|
f85bcae89f | ||
|
|
a0e31b9c01 | ||
|
|
c414fd68a0 | ||
|
|
2830cdfdeb | ||
|
|
07e9b46643 | ||
|
|
1f01521213 | ||
|
|
8587461ac9 | ||
|
|
e54e5280f2 | ||
|
|
516a44ce73 | ||
|
|
e52cebc28e | ||
|
|
bb5136cdae | ||
|
|
b0db2b7bec | ||
|
|
1000dc10fb | ||
|
|
2a4b009f04 | ||
|
|
8cdd42307c | ||
|
|
269558876e | ||
|
|
bba680671b | ||
|
|
f70a76109c | ||
|
|
5d54877183 | ||
|
|
f7dac8e68d | ||
|
|
39648b4f0b | ||
|
|
b942fde59a | ||
|
|
ce82b87d9f | ||
|
|
70391f96ae | ||
|
|
2329c1b797 | ||
|
|
470159b4d7 | ||
|
|
e740340793 | ||
|
|
4d5507d344 | ||
|
|
d350551547 | ||
|
|
7fd79b8e54 | ||
|
|
eb12f45e8e | ||
|
|
fb047b1267 | ||
|
|
d31c528257 | ||
|
|
996d7ce054 | ||
|
|
7040fcfd88 | ||
|
|
88ca4b63e6 | ||
|
|
c0af3c537b | ||
|
|
f8afae308a | ||
|
|
4cd0d60711 | ||
|
|
955d57bce6 | ||
|
|
589d27c88c | ||
|
|
eafb751ecc | ||
|
|
30ea66023f | ||
|
|
9843e21632 | ||
|
|
6002beb231 | ||
|
|
9c6e42fd1b | ||
|
|
eeab4b90a5 | ||
|
|
7827a2aedd | ||
|
|
a7f1a36ed8 | ||
|
|
d651786206 | ||
|
|
19e4758be1 | ||
|
|
fe9de0d4cc | ||
|
|
80147acc1c | ||
|
|
4acdf8584b | ||
|
|
cf607691ac | ||
|
|
d7adcfb119 | ||
|
|
97d26728e4 | ||
|
|
6403895eae | ||
|
|
8b26ff1fe6 | ||
|
|
9ddd020348 | ||
|
|
a2d1c32da3 | ||
|
|
af18aa8456 | ||
|
|
188b23e88f | ||
|
|
63bed7a30d | ||
|
|
fd93964953 | ||
|
|
1f9f86974a | ||
|
|
6a86af5b43 | ||
|
|
6a503e152a | ||
|
|
b7227113be | ||
|
|
907da2ae61 | ||
|
|
6f76b15d92 | ||
|
|
9d6fbd6c78 | ||
|
|
edb4dac652 | ||
|
|
42898b94e2 | ||
|
|
943543354a | ||
|
|
2da22ccd8a | ||
|
|
9cab5a5046 | ||
|
|
e270a692b7 | ||
|
|
677a8dae7b | ||
|
|
6eeb32a447 | ||
|
|
e57991d498 | ||
|
|
4242bd55c2 | ||
|
|
e8fb466f0f | ||
|
|
869fcbf483 | ||
|
|
6abeaf2c55 | ||
|
|
f734918d3e | ||
|
|
91f2e0c32b | ||
|
|
88d6dd96fa | ||
|
|
7feac5ecd6 | ||
|
|
193ec21149 | ||
|
|
14e62057da | ||
|
|
a26c0dfb8a | ||
|
|
6b4219badb | ||
|
|
1f598e1b12 | ||
|
|
7ddd4d74c0 | ||
|
|
6ad6f48ff0 | ||
|
|
d736adbedc | ||
|
|
c881762c97 | ||
|
|
be5d067148 | ||
|
|
189a10e35a | ||
|
|
285e9c2f62 | ||
|
|
054de87f8e | ||
|
|
7de8a8700c | ||
|
|
4f7669dec1 | ||
|
|
25a1bc7a33 | ||
|
|
955ef3e9cb | ||
|
|
0e8f2307fc | ||
|
|
bcfd2d6aa4 | ||
|
|
7e52f4682c | ||
|
|
9c218fa5f5 | ||
|
|
508aed67de | ||
|
|
0bf1116ef8 | ||
|
|
45df5ba9c4 | ||
|
|
b90a296d41 | ||
|
|
d40143a63d | ||
|
|
db40d550be | ||
|
|
da661e45ae | ||
|
|
58160b9eb4 | ||
|
|
05b28efd9c | ||
|
|
0b433ebb1c | ||
|
|
5b3f5bf37d | ||
|
|
397c0092a0 | ||
|
|
362fdaeecc | ||
|
|
606c3c3595 | ||
|
|
42705c9eb0 | ||
|
|
c2ba495824 | ||
|
|
85a1c88653 | ||
|
|
c4d704bee1 | ||
|
|
60d499e11c | ||
|
|
bb48ef40be | ||
|
|
771ca2400a | ||
|
|
735d44816b | ||
|
|
e346493921 | ||
|
|
bd39fab17a | ||
|
|
ce30594b30 | ||
|
|
2021c2a596 | ||
|
|
ecd1d09c9a | ||
|
|
7dbde8d82c | ||
|
|
4e64b17712 | ||
|
|
cc4c514103 | ||
|
|
ab8726dafa | ||
|
|
2cefba6f96 | ||
|
|
592043fa70 | ||
|
|
59477aa221 | ||
|
|
279fe53837 | ||
|
|
bb319136e4 | ||
|
|
b0f68d97da | ||
|
|
a46462eede | ||
|
|
646e403fbd | ||
|
|
64c846cfc1 | ||
|
|
8e07269738 | ||
|
|
6fc815937b | ||
|
|
014c995a8f | ||
|
|
c1bb62cc36 | ||
|
|
f5cf7c204f | ||
|
|
6d08e21511 | ||
|
|
8b881d195d | ||
|
|
5c9ff51248 | ||
|
|
3f64768ba8 | ||
|
|
fd24918ba8 | ||
|
|
f04e7067e8 | ||
|
|
9a91c0bfb2 | ||
|
|
c06188da56 | ||
|
|
7433aab258 | ||
|
|
37a715c680 | ||
|
|
3d9eb3b600 | ||
|
|
99511de728 | ||
|
|
82b1b85fa4 | ||
|
|
2aa29420ee | ||
|
|
9e331fe029 | ||
|
|
591cdb6015 | ||
|
|
bc244b3600 | ||
|
|
dbe3863b04 | ||
|
|
ae021c37e3 | ||
|
|
8baa9d8458 | ||
|
|
3c888475a5 | ||
|
|
29b567d6e1 | ||
|
|
00aa1ad295 | ||
|
|
4f3213715e | ||
|
|
0389e72197 | ||
|
|
0732795ecc | ||
|
|
a26df3135b | ||
|
|
a904aea519 | ||
|
|
6bd5053ae8 | ||
|
|
8b00b8c9c2 | ||
|
|
2b9acd78c8 | ||
|
|
d7f0642f48 | ||
|
|
8bbae0cc3a | ||
|
|
c00f1505d7 | ||
|
|
a08e6691fb | ||
|
|
98bc499498 | ||
|
|
6d0c42a91a | ||
|
|
79c5a62279 | ||
|
|
3bb671f3f2 | ||
|
|
0b9c5c410a | ||
|
|
d77d5a7734 | ||
|
|
0a00a3104a | ||
|
|
ab36129395 | ||
|
|
e99500cf16 | ||
|
|
299497ea12 | ||
|
|
843c22c6b1 | ||
|
|
86b49b6fe2 | ||
|
|
9489f00ca4 | ||
|
|
6d60e7dadc | ||
|
|
346b9b9e3e | ||
|
|
99384b1db9 | ||
|
|
d1b5a60bb9 | ||
|
|
d57258878d | ||
|
|
48414f6dab | ||
|
|
ff0186f72b | ||
|
|
a682565758 | ||
|
|
0dee2e5973 | ||
|
|
929f4bfb81 | ||
|
|
ac474e2108 | ||
|
|
d6722c2106 | ||
|
|
6eef0b82bd | ||
|
|
fb4343d75e | ||
|
|
a867a32b4e | ||
|
|
3060505110 | ||
|
|
5d68f796aa | ||
|
|
15036ff970 | ||
|
|
32783f7aaf | ||
|
|
8699a8fbc2 | ||
|
|
b4cde80fa9 | ||
|
|
eb4db4ed43 | ||
|
|
649aafb454 | ||
|
|
b6c272e946 | ||
|
|
9fe2211f82 | ||
|
|
4704e24c24 | ||
|
|
e5f293ce52 | ||
|
|
d64b898390 | ||
|
|
498c525b34 | ||
|
|
bb184f8ffb | ||
|
|
7f537dbedf | ||
|
|
f9b8a69f7b | ||
|
|
bc228b8d77 | ||
|
|
7710ad2e57 | ||
|
|
9f2c9b13d7 | ||
|
|
6940704deb | ||
|
|
6b9cacb85f | ||
|
|
cfa0fdaa12 | ||
|
|
4423e6edae | ||
|
|
13faa0ed2e | ||
|
|
42336355bb | ||
|
|
c18aa90534 | ||
|
|
39460fb3d3 | ||
|
|
4f51c1d2c9 | ||
|
|
04ccff0e3f | ||
|
|
2242119182 | ||
|
|
5cba34c34d | ||
|
|
33a699b8ae | ||
|
|
344a4bb238 | ||
|
|
0beda08cf9 | ||
|
|
2264a98c04 | ||
|
|
d19a9db523 | ||
|
|
4b76332daf | ||
|
|
db38339179 | ||
|
|
5eddcdd5f5 | ||
|
|
3480d2da59 | ||
|
|
e60e6c7d08 | ||
|
|
55356ebb51 | ||
|
|
7f4bbbe5c5 | ||
|
|
49b1ce6e8c | ||
|
|
caaefef900 | ||
|
|
96576b0e3d | ||
|
|
288ce123ca | ||
|
|
140dbbaa7d | ||
|
|
e9d11be680 | ||
|
|
d7f117e83f | ||
|
|
eef1246e0b | ||
|
|
65e38aa37d | ||
|
|
c7b23aac9b | ||
|
|
b4ea60eb79 | ||
|
|
24c738c6d8 | ||
|
|
0c26734d7d | ||
|
|
d9b613ccb3 | ||
|
|
831bf9124f | ||
|
|
0b31cad2db | ||
|
|
059e744774 | ||
|
|
2b3c57755c | ||
|
|
0eb526919f | ||
|
|
a8f56f78e9 | ||
|
|
f7ad3d78eb | ||
|
|
5bfe89be6e | ||
|
|
47661fad51 | ||
|
|
9a38971d47 | ||
|
|
c4e697879d |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -31,6 +31,7 @@ awx/ui/templates/ui/installing.html
|
||||
awx/ui_next/node_modules/
|
||||
awx/ui_next/coverage/
|
||||
awx/ui_next/build/locales/_build
|
||||
rsyslog.pid
|
||||
/tower-license
|
||||
/tower-license/**
|
||||
tools/prometheus/data
|
||||
|
||||
25
CHANGELOG.md
25
CHANGELOG.md
@@ -2,6 +2,29 @@
|
||||
|
||||
This is a list of high-level changes for each release of AWX. A full list of commits can be found at `https://github.com/ansible/awx/releases/tag/<version>`.
|
||||
|
||||
## 11.1.0 (Apr 22, 2020)
|
||||
- Changed rsyslogd to persist queued events to disk (to prevent a risk of out-of-memory errors) (https://github.com/ansible/awx/issues/6746)
|
||||
- Added the ability to configure the destination and maximum disk size of rsyslogd spool (in the event of a log aggregator outage) (https://github.com/ansible/awx/pull/6763)
|
||||
- Added the ability to discover playbooks in project clones from symlinked directories (https://github.com/ansible/awx/pull/6773)
|
||||
- Fixed a bug that caused certain log aggregator settings to break logging integration (https://github.com/ansible/awx/issues/6760)
|
||||
- Fixed a bug that caused playbook execution in container groups to sometimes unexpectedly deadlock (https://github.com/ansible/awx/issues/6692)
|
||||
- Improved stability of the new redis clustering implementation (https://github.com/ansible/awx/pull/6739 https://github.com/ansible/awx/pull/6720)
|
||||
- Improved stability of the new rsyslogd-based logging implementation (https://github.com/ansible/awx/pull/6796)
|
||||
|
||||
## 11.0.0 (Apr 16, 2020)
|
||||
- As of AWX 11.0.0, Kubernetes-based deployments use a Deployment rather than a StatefulSet.
|
||||
- Reimplemented external logging support using rsyslogd to improve reliability and address a number of issues (https://github.com/ansible/awx/issues/5155)
|
||||
- Changed activity stream logs to include summary fields for related objects (https://github.com/ansible/awx/issues/1761)
|
||||
- Added code to more gracefully attempt to reconnect to redis if it restarts/becomes unavailable (https://github.com/ansible/awx/pull/6670)
|
||||
- Fixed a bug that caused REFRESH_TOKEN_EXPIRE_SECONDS to not properly be respected for OAuth2.0 refresh tokens generated by AWX (https://github.com/ansible/awx/issues/6630)
|
||||
- Fixed a bug that broke schedules containing RRULES with very old DTSTART dates (https://github.com/ansible/awx/pull/6550)
|
||||
- Fixed a bug that broke installs on older versions of Ansible packaged with certain Linux distributions (https://github.com/ansible/awx/issues/5501)
|
||||
- Fixed a bug that caused the activity stream to sometimes report the incorrect actor when associating user membership on SAML login (https://github.com/ansible/awx/pull/6525)
|
||||
- Fixed a bug in AWX's Grafana notification support when annotation tags are omitted (https://github.com/ansible/awx/issues/6580)
|
||||
- Fixed a bug that prevented some users from searching for Source Control credentials in the AWX user interface (https://github.com/ansible/awx/issues/6600)
|
||||
- Fixed a bug that prevented disassociating orphaned users from credentials (https://github.com/ansible/awx/pull/6554)
|
||||
- Updated Twisted to address CVE-2020-10108 and CVE-2020-10109.
|
||||
|
||||
## 10.0.0 (Mar 30, 2020)
|
||||
- As of AWX 10.0.0, the official AWX CLI no longer supports Python 2 (it requires at least Python 3.6) (https://github.com/ansible/awx/pull/6327)
|
||||
- AWX no longer relies on RabbitMQ; Redis is added as a new dependency (https://github.com/ansible/awx/issues/5443)
|
||||
@@ -95,7 +118,7 @@ This is a list of high-level changes for each release of AWX. A full list of com
|
||||
- Fixed a bug in the CLI which incorrectly parsed launch time arguments for `awx job_templates launch` and `awx workflow_job_templates launch` (https://github.com/ansible/awx/issues/5093).
|
||||
- Fixed a bug that caused inventory updates using "sourced from a project" to stop working (https://github.com/ansible/awx/issues/4750).
|
||||
- Fixed a bug that caused Slack notifications to sometimes show the wrong bot avatar (https://github.com/ansible/awx/pull/5125).
|
||||
- Fixed a bug that prevented the use of digits in Tower's URL settings (https://github.com/ansible/awx/issues/5081).
|
||||
- Fixed a bug that prevented the use of digits in AWX's URL settings (https://github.com/ansible/awx/issues/5081).
|
||||
|
||||
## 8.0.0 (Oct 21, 2019)
|
||||
|
||||
|
||||
@@ -215,18 +215,23 @@ Using `docker exec`, this will create a session in the running *awx* container,
|
||||
If you want to start and use the development environment, you'll first need to bootstrap it by running the following command:
|
||||
|
||||
```bash
|
||||
(container)# /bootstrap_development.sh
|
||||
(container)# /usr/bin/bootstrap_development.sh
|
||||
```
|
||||
|
||||
The above will do all the setup tasks, including running database migrations, so it may take a couple minutes.
|
||||
The above will do all the setup tasks, including running database migrations, so it may take a couple minutes. Once it's done it
|
||||
will drop you back to the shell.
|
||||
|
||||
Now you can start each service individually, or start all services in a pre-configured tmux session like so:
|
||||
In order to launch all developer services:
|
||||
|
||||
```bash
|
||||
(container)# cd /awx_devel
|
||||
(container)# make server
|
||||
(container)# /usr/bin/launch_awx.sh
|
||||
```
|
||||
|
||||
`launch_awx.sh` also calls `bootstrap_development.sh` so if all you are doing is launching the supervisor to start all services, you don't
|
||||
need to call `bootstrap_development.sh` first.
|
||||
|
||||
|
||||
|
||||
### Post Build Steps
|
||||
|
||||
Before you can log in and use the system, you will need to create an admin user. Optionally, you may also want to load some demo data.
|
||||
|
||||
@@ -82,7 +82,7 @@ The system that runs the AWX service will need to satisfy the following requirem
|
||||
- At least 2 cpu cores
|
||||
- At least 20GB of space
|
||||
- Running Docker, Openshift, or Kubernetes
|
||||
- If you choose to use an external PostgreSQL database, please note that the minimum version is 9.6+.
|
||||
- If you choose to use an external PostgreSQL database, please note that the minimum version is 10+.
|
||||
|
||||
### AWX Tunables
|
||||
|
||||
@@ -477,7 +477,7 @@ Before starting the install process, review the [inventory](./installer/inventor
|
||||
|
||||
*ssl_certificate*
|
||||
|
||||
> Optionally, provide the path to a file that contains a certificate and its private key.
|
||||
> Optionally, provide the path to a file that contains a certificate and its private key. This needs to be a .pem-file
|
||||
|
||||
*docker_compose_dir*
|
||||
|
||||
|
||||
21
Makefile
21
Makefile
@@ -18,7 +18,6 @@ COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
COMPOSE_HOST ?= $(shell hostname)
|
||||
|
||||
VENV_BASE ?= /venv
|
||||
COLLECTION_VENV ?= /awx_devel/awx_collection_test_venv
|
||||
SCL_PREFIX ?=
|
||||
CELERY_SCHEDULE_FILE ?= /var/lib/awx/beat.db
|
||||
|
||||
@@ -365,11 +364,6 @@ test:
|
||||
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py2,py3
|
||||
awx-manage check_migrations --dry-run --check -n 'vNNN_missing_migration_file'
|
||||
|
||||
prepare_collection_venv:
|
||||
rm -rf $(COLLECTION_VENV)
|
||||
mkdir $(COLLECTION_VENV)
|
||||
$(VENV_BASE)/awx/bin/pip install --target=$(COLLECTION_VENV) git+https://github.com/ansible/tower-cli.git
|
||||
|
||||
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
||||
COLLECTION_TEST_TARGET ?=
|
||||
COLLECTION_PACKAGE ?= awx
|
||||
@@ -380,12 +374,12 @@ test_collection:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
PYTHONPATH=$(COLLECTION_VENV):$PYTHONPATH:/usr/lib/python3.6/site-packages py.test $(COLLECTION_TEST_DIRS)
|
||||
PYTHONPATH=$PYTHONPATH:/usr/lib/python3.6/site-packages py.test $(COLLECTION_TEST_DIRS)
|
||||
|
||||
flake8_collection:
|
||||
flake8 awx_collection/ # Different settings, in main exclude list
|
||||
|
||||
test_collection_all: prepare_collection_venv test_collection flake8_collection
|
||||
test_collection_all: test_collection flake8_collection
|
||||
|
||||
# WARNING: symlinking a collection is fundamentally unstable
|
||||
# this is for rapid development iteration with playbooks, do not use with other test targets
|
||||
@@ -650,7 +644,6 @@ detect-schema-change: genschema
|
||||
diff -u -b reference-schema.json schema.json
|
||||
|
||||
docker-compose-clean: awx/projects
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm -w /awx_devel --service-ports awx make clean
|
||||
cd tools && TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose rm -sf
|
||||
|
||||
docker-compose-build: awx-devel-build
|
||||
@@ -668,11 +661,12 @@ docker-compose-isolated-build: awx-devel-build
|
||||
docker tag ansible/awx_isolated $(DEV_DOCKER_TAG_BASE)/awx_isolated:$(COMPOSE_TAG)
|
||||
#docker push $(DEV_DOCKER_TAG_BASE)/awx_isolated:$(COMPOSE_TAG)
|
||||
|
||||
MACHINE?=default
|
||||
docker-clean:
|
||||
eval $$(docker-machine env $(MACHINE))
|
||||
$(foreach container_id,$(shell docker ps -f name=tools_awx -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||
-docker images | grep "awx_devel" | awk '{print $$1 ":" $$2}' | xargs docker rmi
|
||||
docker images | grep "awx_devel" | awk '{print $$1 ":" $$2}' | xargs docker rmi
|
||||
|
||||
docker-clean-volumes: docker-compose-clean
|
||||
docker volume rm tools_awx_db
|
||||
|
||||
docker-refresh: docker-clean docker-compose
|
||||
|
||||
@@ -686,9 +680,6 @@ docker-compose-cluster-elk: docker-auth awx/projects
|
||||
prometheus:
|
||||
docker run -u0 --net=tools_default --link=`docker ps | egrep -o "tools_awx(_run)?_([^ ]+)?"`:awxweb --volume `pwd`/tools/prometheus:/prometheus --name prometheus -d -p 0.0.0.0:9090:9090 prom/prometheus --web.enable-lifecycle --config.file=/prometheus/prometheus.yml
|
||||
|
||||
minishift-dev:
|
||||
ansible-playbook -i localhost, -e devtree_directory=$(CURDIR) tools/clusterdevel/start_minishift_dev.yml
|
||||
|
||||
clean-elk:
|
||||
docker stop tools_kibana_1
|
||||
docker stop tools_logstash_1
|
||||
|
||||
@@ -45,7 +45,10 @@ from awx.main.utils import (
|
||||
get_search_fields,
|
||||
getattrd,
|
||||
get_object_or_400,
|
||||
decrypt_field
|
||||
decrypt_field,
|
||||
get_awx_version,
|
||||
get_licenser,
|
||||
StubLicense
|
||||
)
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
|
||||
@@ -197,6 +200,8 @@ class APIView(views.APIView):
|
||||
logger.warning(status_msg)
|
||||
response = super(APIView, self).finalize_response(request, response, *args, **kwargs)
|
||||
time_started = getattr(self, 'time_started', None)
|
||||
response['X-API-Product-Version'] = get_awx_version()
|
||||
response['X-API-Product-Name'] = 'AWX' if isinstance(get_licenser(), StubLicense) else 'Red Hat Ansible Tower'
|
||||
response['X-API-Node'] = settings.CLUSTER_HOST_ID
|
||||
if time_started:
|
||||
time_elapsed = time.time() - self.time_started
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
from collections import OrderedDict
|
||||
from uuid import UUID
|
||||
|
||||
# Django
|
||||
from django.core.exceptions import PermissionDenied
|
||||
@@ -86,6 +87,8 @@ class Metadata(metadata.SimpleMetadata):
|
||||
# FIXME: Still isn't showing all default values?
|
||||
try:
|
||||
default = field.get_default()
|
||||
if type(default) is UUID:
|
||||
default = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
|
||||
if field.field_name == 'TOWER_URL_BASE' and default == 'https://towerhost':
|
||||
default = '{}://{}'.format(self.request.scheme, self.request.get_host())
|
||||
field_info['default'] = default
|
||||
|
||||
@@ -2034,11 +2034,6 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
|
||||
res['credentials'] = self.reverse('api:inventory_source_credentials_list', kwargs={'pk': obj.pk})
|
||||
return res
|
||||
|
||||
def get_group(self, obj): # TODO: remove in 3.3
|
||||
if obj.deprecated_group:
|
||||
return obj.deprecated_group.id
|
||||
return None
|
||||
|
||||
def build_relational_field(self, field_name, relation_info):
|
||||
field_class, field_kwargs = super(InventorySourceSerializer, self).build_relational_field(field_name, relation_info)
|
||||
# SCM Project and inventory are read-only unless creating a new inventory.
|
||||
@@ -3616,9 +3611,11 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
elif self.instance:
|
||||
ujt = self.instance.unified_job_template
|
||||
if ujt is None:
|
||||
if 'workflow_job_template' in attrs:
|
||||
return {'workflow_job_template': attrs['workflow_job_template']}
|
||||
return {}
|
||||
ret = {}
|
||||
for fd in ('workflow_job_template', 'identifier'):
|
||||
if fd in attrs:
|
||||
ret[fd] = attrs[fd]
|
||||
return ret
|
||||
|
||||
# build additional field survey_passwords to track redacted variables
|
||||
password_dict = {}
|
||||
@@ -3671,7 +3668,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
attrs.get('survey_passwords', {}).pop(key, None)
|
||||
else:
|
||||
errors.setdefault('extra_vars', []).append(
|
||||
_('"$encrypted$ is a reserved keyword, may not be used for {var_name}."'.format(key))
|
||||
_('"$encrypted$ is a reserved keyword, may not be used for {}."'.format(key))
|
||||
)
|
||||
|
||||
# Launch configs call extra_vars extra_data for historical reasons
|
||||
@@ -4539,6 +4536,8 @@ class SchedulePreviewSerializer(BaseSerializer):
|
||||
try:
|
||||
Schedule.rrulestr(rrule_value)
|
||||
except Exception as e:
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
raise serializers.ValidationError(_("rrule parsing failed validation: {}").format(e))
|
||||
return value
|
||||
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
# Test Logging Configuration
|
||||
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
from datetime import timedelta
|
||||
|
||||
from django.utils.timezone import now
|
||||
from django.conf import settings
|
||||
from django.conf.urls import url
|
||||
|
||||
from oauthlib import oauth2
|
||||
from oauth2_provider import views
|
||||
|
||||
from awx.main.models import RefreshToken
|
||||
from awx.api.views import (
|
||||
ApiOAuthAuthorizationRootView,
|
||||
)
|
||||
@@ -14,6 +18,21 @@ from awx.api.views import (
|
||||
class TokenView(views.TokenView):
|
||||
|
||||
def create_token_response(self, request):
|
||||
# Django OAuth2 Toolkit has a bug whereby refresh tokens are *never*
|
||||
# properly expired (ugh):
|
||||
#
|
||||
# https://github.com/jazzband/django-oauth-toolkit/issues/746
|
||||
#
|
||||
# This code detects and auto-expires them on refresh grant
|
||||
# requests.
|
||||
if request.POST.get('grant_type') == 'refresh_token' and 'refresh_token' in request.POST:
|
||||
refresh_token = RefreshToken.objects.filter(
|
||||
token=request.POST['refresh_token']
|
||||
).first()
|
||||
if refresh_token:
|
||||
expire_seconds = settings.OAUTH2_PROVIDER.get('REFRESH_TOKEN_EXPIRE_SECONDS', 0)
|
||||
if refresh_token.created + timedelta(seconds=expire_seconds) < now():
|
||||
return request.build_absolute_uri(), {}, 'The refresh token has expired.', '403'
|
||||
try:
|
||||
return super(TokenView, self).create_token_response(request)
|
||||
except oauth2.AccessDeniedError as e:
|
||||
|
||||
@@ -1092,7 +1092,7 @@ class UserRolesList(SubListAttachDetachAPIView):
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
if role.content_type == credential_content_type:
|
||||
if role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@@ -4415,7 +4415,7 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
if role.content_type == credential_content_type:
|
||||
if role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
@@ -172,9 +172,9 @@ class URLField(CharField):
|
||||
netloc = '{}:{}'.format(netloc, url_parts.port)
|
||||
if url_parts.username:
|
||||
if url_parts.password:
|
||||
netloc = '{}:{}@{}' % (url_parts.username, url_parts.password, netloc)
|
||||
netloc = '{}:{}@{}'.format(url_parts.username, url_parts.password, netloc)
|
||||
else:
|
||||
netloc = '{}@{}' % (url_parts.username, netloc)
|
||||
netloc = '{}@{}'.format(url_parts.username, netloc)
|
||||
value = urlparse.urlunsplit([url_parts.scheme, netloc, url_parts.path, url_parts.query, url_parts.fragment])
|
||||
except Exception:
|
||||
raise # If something fails here, just fall through and let the validators check it.
|
||||
|
||||
@@ -410,7 +410,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
field = self.registry.get_setting_field(name)
|
||||
if field.read_only:
|
||||
logger.warning('Attempt to set read only setting "%s".', name)
|
||||
raise ImproperlyConfigured('Setting "%s" is read only.'.format(name))
|
||||
raise ImproperlyConfigured('Setting "{}" is read only.'.format(name))
|
||||
|
||||
try:
|
||||
data = field.to_representation(value)
|
||||
@@ -441,7 +441,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
field = self.registry.get_setting_field(name)
|
||||
if field.read_only:
|
||||
logger.warning('Attempt to delete read only setting "%s".', name)
|
||||
raise ImproperlyConfigured('Setting "%s" is read only.'.format(name))
|
||||
raise ImproperlyConfigured('Setting "{}" is read only.'.format(name))
|
||||
for setting in Setting.objects.filter(key=name, user__isnull=True):
|
||||
setting.delete()
|
||||
# pre_delete handler will delete from cache.
|
||||
|
||||
@@ -325,17 +325,3 @@ def test_setting_singleton_delete_no_read_only_fields(api_request, dummy_setting
|
||||
)
|
||||
assert response.data['FOO_BAR'] == 23
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_logging_test(api_request):
|
||||
with mock.patch('awx.conf.views.AWXProxyHandler.perform_test') as mock_func:
|
||||
api_request(
|
||||
'post',
|
||||
reverse('api:setting_logging_test'),
|
||||
data={'LOG_AGGREGATOR_HOST': 'http://foobar', 'LOG_AGGREGATOR_TYPE': 'logstash'}
|
||||
)
|
||||
call = mock_func.call_args_list[0]
|
||||
args, kwargs = call
|
||||
given_settings = kwargs['custom_settings']
|
||||
assert given_settings.LOG_AGGREGATOR_HOST == 'http://foobar'
|
||||
assert given_settings.LOG_AGGREGATOR_TYPE == 'logstash'
|
||||
|
||||
@@ -3,7 +3,11 @@
|
||||
|
||||
# Python
|
||||
import collections
|
||||
import logging
|
||||
import subprocess
|
||||
import sys
|
||||
import socket
|
||||
from socket import SHUT_RDWR
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -11,7 +15,7 @@ from django.http import Http404
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import PermissionDenied, ValidationError
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import serializers
|
||||
from rest_framework import status
|
||||
@@ -26,7 +30,6 @@ from awx.api.generics import (
|
||||
from awx.api.permissions import IsSuperUser
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.utils import camelcase_to_underscore
|
||||
from awx.main.utils.handlers import AWXProxyHandler, LoggingConnectivityException
|
||||
from awx.main.tasks import handle_setting_changes
|
||||
from awx.conf.models import Setting
|
||||
from awx.conf.serializers import SettingCategorySerializer, SettingSingletonSerializer
|
||||
@@ -161,40 +164,47 @@ class SettingLoggingTest(GenericAPIView):
|
||||
filter_backends = []
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
defaults = dict()
|
||||
for key in settings_registry.get_registered_settings(category_slug='logging'):
|
||||
try:
|
||||
defaults[key] = settings_registry.get_setting_field(key).get_default()
|
||||
except serializers.SkipField:
|
||||
defaults[key] = None
|
||||
obj = type('Settings', (object,), defaults)()
|
||||
serializer = self.get_serializer(obj, data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
# Special validation specific to logging test.
|
||||
errors = {}
|
||||
for key in ['LOG_AGGREGATOR_TYPE', 'LOG_AGGREGATOR_HOST']:
|
||||
if not request.data.get(key, ''):
|
||||
errors[key] = 'This field is required.'
|
||||
if errors:
|
||||
raise ValidationError(errors)
|
||||
|
||||
if request.data.get('LOG_AGGREGATOR_PASSWORD', '').startswith('$encrypted$'):
|
||||
serializer.validated_data['LOG_AGGREGATOR_PASSWORD'] = getattr(
|
||||
settings, 'LOG_AGGREGATOR_PASSWORD', ''
|
||||
)
|
||||
# Error if logging is not enabled
|
||||
enabled = getattr(settings, 'LOG_AGGREGATOR_ENABLED', False)
|
||||
if not enabled:
|
||||
return Response({'error': 'Logging not enabled'}, status=status.HTTP_409_CONFLICT)
|
||||
|
||||
# Send test message to configured logger based on db settings
|
||||
logging.getLogger('awx').error('AWX Connection Test Message')
|
||||
|
||||
hostname = getattr(settings, 'LOG_AGGREGATOR_HOST', None)
|
||||
protocol = getattr(settings, 'LOG_AGGREGATOR_PROTOCOL', None)
|
||||
|
||||
try:
|
||||
class MockSettings:
|
||||
pass
|
||||
mock_settings = MockSettings()
|
||||
for k, v in serializer.validated_data.items():
|
||||
setattr(mock_settings, k, v)
|
||||
AWXProxyHandler().perform_test(custom_settings=mock_settings)
|
||||
if mock_settings.LOG_AGGREGATOR_PROTOCOL.upper() == 'UDP':
|
||||
return Response(status=status.HTTP_201_CREATED)
|
||||
except LoggingConnectivityException as e:
|
||||
return Response({'error': str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
subprocess.check_output(
|
||||
['rsyslogd', '-N1', '-f', '/var/lib/awx/rsyslog/rsyslog.conf'],
|
||||
stderr=subprocess.STDOUT
|
||||
)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
return Response({'error': exc.output}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Check to ensure port is open at host
|
||||
if protocol in ['udp', 'tcp']:
|
||||
port = getattr(settings, 'LOG_AGGREGATOR_PORT', None)
|
||||
# Error if port is not set when using UDP/TCP
|
||||
if not port:
|
||||
return Response({'error': 'Port required for ' + protocol}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
# if http/https by this point, domain is reacheable
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
|
||||
if protocol == 'udp':
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
else:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
s.settimeout(.5)
|
||||
s.connect((hostname, int(port)))
|
||||
s.shutdown(SHUT_RDWR)
|
||||
s.close()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
except Exception as e:
|
||||
return Response({'error': str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
# Create view functions for all of the class-based views to simplify inclusion
|
||||
|
||||
@@ -11,7 +11,6 @@ from functools import reduce
|
||||
from django.conf import settings
|
||||
from django.db.models import Q, Prefetch
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
|
||||
@@ -405,14 +404,6 @@ class BaseAccess(object):
|
||||
# Cannot copy manual project without errors
|
||||
user_capabilities[display_method] = False
|
||||
continue
|
||||
elif display_method in ['start', 'schedule'] and isinstance(obj, Group): # TODO: remove in 3.3
|
||||
try:
|
||||
if obj.deprecated_inventory_source and not obj.deprecated_inventory_source._can_update():
|
||||
user_capabilities[display_method] = False
|
||||
continue
|
||||
except Group.deprecated_inventory_source.RelatedObjectDoesNotExist:
|
||||
user_capabilities[display_method] = False
|
||||
continue
|
||||
elif display_method in ['start', 'schedule'] and isinstance(obj, (Project)):
|
||||
if obj.scm_type == '':
|
||||
user_capabilities[display_method] = False
|
||||
@@ -650,8 +641,8 @@ class UserAccess(BaseAccess):
|
||||
# in these cases only superusers can modify orphan users
|
||||
return False
|
||||
return not obj.roles.all().exclude(
|
||||
content_type=ContentType.objects.get_for_model(User)
|
||||
).filter(ancestors__in=self.user.roles.all()).exists()
|
||||
ancestors__in=self.user.roles.all()
|
||||
).exists()
|
||||
else:
|
||||
return self.is_all_org_admin(obj)
|
||||
|
||||
@@ -1434,7 +1425,7 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
||||
Users who are able to create deploy jobs can also run normal and check (dry run) jobs.
|
||||
'''
|
||||
if not data: # So the browseable API will work
|
||||
return Organization.accessible_objects(self.user, 'job_template_admin_role').exists()
|
||||
return Project.accessible_objects(self.user, 'use_role').exists()
|
||||
|
||||
# if reference_obj is provided, determine if it can be copied
|
||||
reference_obj = data.get('reference_obj', None)
|
||||
@@ -1503,11 +1494,6 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
||||
if data is None:
|
||||
return True
|
||||
|
||||
# standard type of check for organization - cannot change the value
|
||||
# unless posessing the respective job_template_admin_role, otherwise non-blocking
|
||||
if not self.check_related('organization', Organization, data, obj=obj, role_field='job_template_admin_role'):
|
||||
return False
|
||||
|
||||
data = dict(data)
|
||||
|
||||
if self.changes_are_non_sensitive(obj, data):
|
||||
|
||||
@@ -11,6 +11,7 @@ from prometheus_client import (
|
||||
Counter,
|
||||
Enum,
|
||||
CollectorRegistry,
|
||||
parser,
|
||||
)
|
||||
|
||||
from django.conf import settings
|
||||
@@ -30,6 +31,11 @@ def now_seconds():
|
||||
return dt_to_seconds(datetime.datetime.now())
|
||||
|
||||
|
||||
def safe_name(s):
|
||||
# Replace all non alpha-numeric characters with _
|
||||
return re.sub('[^0-9a-zA-Z]+', '_', s)
|
||||
|
||||
|
||||
# Second granularity; Per-minute
|
||||
class FixedSlidingWindow():
|
||||
def __init__(self, start_time=None):
|
||||
@@ -99,7 +105,8 @@ class BroadcastWebsocketStatsManager():
|
||||
Stringified verion of all the stats
|
||||
'''
|
||||
redis_conn = redis.Redis.from_url(settings.BROKER_URL)
|
||||
return redis_conn.get(BROADCAST_WEBSOCKET_REDIS_KEY_NAME)
|
||||
stats_str = redis_conn.get(BROADCAST_WEBSOCKET_REDIS_KEY_NAME) or b''
|
||||
return parser.text_string_to_metric_families(stats_str.decode('UTF-8'))
|
||||
|
||||
|
||||
class BroadcastWebsocketStats():
|
||||
@@ -109,8 +116,8 @@ class BroadcastWebsocketStats():
|
||||
self._registry = CollectorRegistry()
|
||||
|
||||
# TODO: More robust replacement
|
||||
self.name = self.safe_name(self._local_hostname)
|
||||
self.remote_name = self.safe_name(self._remote_hostname)
|
||||
self.name = safe_name(self._local_hostname)
|
||||
self.remote_name = safe_name(self._remote_hostname)
|
||||
|
||||
self._messages_received_total = Counter(f'awx_{self.remote_name}_messages_received_total',
|
||||
'Number of messages received, to be forwarded, by the broadcast websocket system',
|
||||
@@ -122,6 +129,7 @@ class BroadcastWebsocketStats():
|
||||
'Websocket broadcast connection',
|
||||
states=['disconnected', 'connected'],
|
||||
registry=self._registry)
|
||||
self._connection.state('disconnected')
|
||||
self._connection_start = Gauge(f'awx_{self.remote_name}_connection_start',
|
||||
'Time the connection was established',
|
||||
registry=self._registry)
|
||||
@@ -131,10 +139,6 @@ class BroadcastWebsocketStats():
|
||||
registry=self._registry)
|
||||
self._internal_messages_received_per_minute = FixedSlidingWindow()
|
||||
|
||||
def safe_name(self, s):
|
||||
# Replace all non alpha-numeric characters with _
|
||||
return re.sub('[^0-9a-zA-Z]+', '_', s)
|
||||
|
||||
def unregister(self):
|
||||
self._registry.unregister(f'awx_{self.remote_name}_messages_received')
|
||||
self._registry.unregister(f'awx_{self.remote_name}_connection')
|
||||
|
||||
@@ -122,22 +122,27 @@ def cred_type_counts(since):
|
||||
return counts
|
||||
|
||||
|
||||
@register('inventory_counts', '1.0')
|
||||
@register('inventory_counts', '1.2')
|
||||
def inventory_counts(since):
|
||||
counts = {}
|
||||
for inv in models.Inventory.objects.filter(kind='').annotate(num_sources=Count('inventory_sources', distinct=True),
|
||||
num_hosts=Count('hosts', distinct=True)).only('id', 'name', 'kind'):
|
||||
source_list = []
|
||||
for source in inv.inventory_sources.filter().annotate(num_hosts=Count('hosts', distinct=True)).values('name','source', 'num_hosts'):
|
||||
source_list.append(source)
|
||||
counts[inv.id] = {'name': inv.name,
|
||||
'kind': inv.kind,
|
||||
'hosts': inv.num_hosts,
|
||||
'sources': inv.num_sources
|
||||
'sources': inv.num_sources,
|
||||
'source_list': source_list
|
||||
}
|
||||
|
||||
for smart_inv in models.Inventory.objects.filter(kind='smart'):
|
||||
counts[smart_inv.id] = {'name': smart_inv.name,
|
||||
'kind': smart_inv.kind,
|
||||
'num_hosts': smart_inv.hosts.count(),
|
||||
'num_sources': smart_inv.inventory_sources.count()
|
||||
'hosts': smart_inv.hosts.count(),
|
||||
'sources': 0,
|
||||
'source_list': []
|
||||
}
|
||||
return counts
|
||||
|
||||
@@ -222,10 +227,12 @@ def query_info(since, collection_type):
|
||||
|
||||
|
||||
# Copies Job Events from db to a .csv to be shipped
|
||||
@table_version('events_table.csv', '1.0')
|
||||
@table_version('events_table.csv', '1.1')
|
||||
@table_version('unified_jobs_table.csv', '1.0')
|
||||
@table_version('unified_job_template_table.csv', '1.0')
|
||||
def copy_tables(since, full_path):
|
||||
@table_version('workflow_job_node_table.csv', '1.0')
|
||||
@table_version('workflow_job_template_node_table.csv', '1.0')
|
||||
def copy_tables(since, full_path, subset=None):
|
||||
def _copy_table(table, query, path):
|
||||
file_path = os.path.join(path, table + '_table.csv')
|
||||
file = open(file_path, 'w', encoding='utf-8')
|
||||
@@ -249,10 +256,16 @@ def copy_tables(since, full_path):
|
||||
main_jobevent.job_id,
|
||||
main_jobevent.host_id,
|
||||
main_jobevent.host_name
|
||||
, CAST(main_jobevent.event_data::json->>'start' AS TIMESTAMP WITH TIME ZONE) AS start,
|
||||
CAST(main_jobevent.event_data::json->>'end' AS TIMESTAMP WITH TIME ZONE) AS end,
|
||||
main_jobevent.event_data::json->'duration' AS duration,
|
||||
main_jobevent.event_data::json->'res'->'warnings' AS warnings,
|
||||
main_jobevent.event_data::json->'res'->'deprecations' AS deprecations
|
||||
FROM main_jobevent
|
||||
WHERE main_jobevent.created > {}
|
||||
ORDER BY main_jobevent.id ASC) TO STDOUT WITH CSV HEADER'''.format(since.strftime("'%Y-%m-%d %H:%M:%S'"))
|
||||
_copy_table(table='events', query=events_query, path=full_path)
|
||||
if not subset or 'events' in subset:
|
||||
_copy_table(table='events', query=events_query, path=full_path)
|
||||
|
||||
unified_job_query = '''COPY (SELECT main_unifiedjob.id,
|
||||
main_unifiedjob.polymorphic_ctype_id,
|
||||
@@ -276,11 +289,12 @@ def copy_tables(since, full_path):
|
||||
main_unifiedjob.instance_group_id
|
||||
FROM main_unifiedjob
|
||||
JOIN django_content_type ON main_unifiedjob.polymorphic_ctype_id = django_content_type.id
|
||||
JOIN main_organization ON main_organization.id = main_unifiedjob.organization_id
|
||||
WHERE main_unifiedjob.created > {}
|
||||
AND main_unifiedjob.launch_type != 'sync'
|
||||
LEFT JOIN main_organization ON main_organization.id = main_unifiedjob.organization_id
|
||||
WHERE (main_unifiedjob.created > {0} OR main_unifiedjob.finished > {0})
|
||||
AND main_unifiedjob.launch_type != 'sync'
|
||||
ORDER BY main_unifiedjob.id ASC) TO STDOUT WITH CSV HEADER'''.format(since.strftime("'%Y-%m-%d %H:%M:%S'"))
|
||||
_copy_table(table='unified_jobs', query=unified_job_query, path=full_path)
|
||||
if not subset or 'unified_jobs' in subset:
|
||||
_copy_table(table='unified_jobs', query=unified_job_query, path=full_path)
|
||||
|
||||
unified_job_template_query = '''COPY (SELECT main_unifiedjobtemplate.id,
|
||||
main_unifiedjobtemplate.polymorphic_ctype_id,
|
||||
@@ -299,6 +313,71 @@ def copy_tables(since, full_path):
|
||||
main_unifiedjobtemplate.status
|
||||
FROM main_unifiedjobtemplate, django_content_type
|
||||
WHERE main_unifiedjobtemplate.polymorphic_ctype_id = django_content_type.id
|
||||
ORDER BY main_unifiedjobtemplate.id ASC) TO STDOUT WITH CSV HEADER'''.format(since.strftime("'%Y-%m-%d %H:%M:%S'"))
|
||||
_copy_table(table='unified_job_template', query=unified_job_template_query, path=full_path)
|
||||
ORDER BY main_unifiedjobtemplate.id ASC) TO STDOUT WITH CSV HEADER'''
|
||||
if not subset or 'unified_job_template' in subset:
|
||||
_copy_table(table='unified_job_template', query=unified_job_template_query, path=full_path)
|
||||
|
||||
workflow_job_node_query = '''COPY (SELECT main_workflowjobnode.id,
|
||||
main_workflowjobnode.created,
|
||||
main_workflowjobnode.modified,
|
||||
main_workflowjobnode.job_id,
|
||||
main_workflowjobnode.unified_job_template_id,
|
||||
main_workflowjobnode.workflow_job_id,
|
||||
main_workflowjobnode.inventory_id,
|
||||
success_nodes.nodes AS success_nodes,
|
||||
failure_nodes.nodes AS failure_nodes,
|
||||
always_nodes.nodes AS always_nodes,
|
||||
main_workflowjobnode.do_not_run,
|
||||
main_workflowjobnode.all_parents_must_converge
|
||||
FROM main_workflowjobnode
|
||||
LEFT JOIN (
|
||||
SELECT from_workflowjobnode_id, ARRAY_AGG(to_workflowjobnode_id) AS nodes
|
||||
FROM main_workflowjobnode_success_nodes
|
||||
GROUP BY from_workflowjobnode_id
|
||||
) success_nodes ON main_workflowjobnode.id = success_nodes.from_workflowjobnode_id
|
||||
LEFT JOIN (
|
||||
SELECT from_workflowjobnode_id, ARRAY_AGG(to_workflowjobnode_id) AS nodes
|
||||
FROM main_workflowjobnode_failure_nodes
|
||||
GROUP BY from_workflowjobnode_id
|
||||
) failure_nodes ON main_workflowjobnode.id = failure_nodes.from_workflowjobnode_id
|
||||
LEFT JOIN (
|
||||
SELECT from_workflowjobnode_id, ARRAY_AGG(to_workflowjobnode_id) AS nodes
|
||||
FROM main_workflowjobnode_always_nodes
|
||||
GROUP BY from_workflowjobnode_id
|
||||
) always_nodes ON main_workflowjobnode.id = always_nodes.from_workflowjobnode_id
|
||||
WHERE main_workflowjobnode.modified > {}
|
||||
ORDER BY main_workflowjobnode.id ASC) TO STDOUT WITH CSV HEADER'''.format(since.strftime("'%Y-%m-%d %H:%M:%S'"))
|
||||
if not subset or 'workflow_job_node' in subset:
|
||||
_copy_table(table='workflow_job_node', query=workflow_job_node_query, path=full_path)
|
||||
|
||||
workflow_job_template_node_query = '''COPY (SELECT main_workflowjobtemplatenode.id,
|
||||
main_workflowjobtemplatenode.created,
|
||||
main_workflowjobtemplatenode.modified,
|
||||
main_workflowjobtemplatenode.unified_job_template_id,
|
||||
main_workflowjobtemplatenode.workflow_job_template_id,
|
||||
main_workflowjobtemplatenode.inventory_id,
|
||||
success_nodes.nodes AS success_nodes,
|
||||
failure_nodes.nodes AS failure_nodes,
|
||||
always_nodes.nodes AS always_nodes,
|
||||
main_workflowjobtemplatenode.all_parents_must_converge
|
||||
FROM main_workflowjobtemplatenode
|
||||
LEFT JOIN (
|
||||
SELECT from_workflowjobtemplatenode_id, ARRAY_AGG(to_workflowjobtemplatenode_id) AS nodes
|
||||
FROM main_workflowjobtemplatenode_success_nodes
|
||||
GROUP BY from_workflowjobtemplatenode_id
|
||||
) success_nodes ON main_workflowjobtemplatenode.id = success_nodes.from_workflowjobtemplatenode_id
|
||||
LEFT JOIN (
|
||||
SELECT from_workflowjobtemplatenode_id, ARRAY_AGG(to_workflowjobtemplatenode_id) AS nodes
|
||||
FROM main_workflowjobtemplatenode_failure_nodes
|
||||
GROUP BY from_workflowjobtemplatenode_id
|
||||
) failure_nodes ON main_workflowjobtemplatenode.id = failure_nodes.from_workflowjobtemplatenode_id
|
||||
LEFT JOIN (
|
||||
SELECT from_workflowjobtemplatenode_id, ARRAY_AGG(to_workflowjobtemplatenode_id) AS nodes
|
||||
FROM main_workflowjobtemplatenode_always_nodes
|
||||
GROUP BY from_workflowjobtemplatenode_id
|
||||
) always_nodes ON main_workflowjobtemplatenode.id = always_nodes.from_workflowjobtemplatenode_id
|
||||
ORDER BY main_workflowjobtemplatenode.id ASC) TO STDOUT WITH CSV HEADER'''
|
||||
if not subset or 'workflow_job_template_node' in subset:
|
||||
_copy_table(table='workflow_job_template_node', query=workflow_job_template_node_query, path=full_path)
|
||||
|
||||
return
|
||||
|
||||
@@ -667,7 +667,7 @@ register(
|
||||
allow_blank=True,
|
||||
default='',
|
||||
label=_('Logging Aggregator Username'),
|
||||
help_text=_('Username for external log aggregator (if required).'),
|
||||
help_text=_('Username for external log aggregator (if required; HTTP/s only).'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
required=False,
|
||||
@@ -679,7 +679,7 @@ register(
|
||||
default='',
|
||||
encrypted=True,
|
||||
label=_('Logging Aggregator Password/Token'),
|
||||
help_text=_('Password or authentication token for external log aggregator (if required).'),
|
||||
help_text=_('Password or authentication token for external log aggregator (if required; HTTP/s only).'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
required=False,
|
||||
@@ -787,6 +787,29 @@ register(
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
register(
|
||||
'LOG_AGGREGATOR_MAX_DISK_USAGE_GB',
|
||||
field_class=fields.IntegerField,
|
||||
default=1,
|
||||
min_value=1,
|
||||
label=_('Maximum disk persistance for external log aggregation (in GB)'),
|
||||
help_text=_('Amount of data to store (in gigabytes) during an outage of '
|
||||
'the external log aggregator (defaults to 1). '
|
||||
'Equivalent to the rsyslogd queue.maxdiskspace setting.'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
register(
|
||||
'LOG_AGGREGATOR_MAX_DISK_USAGE_PATH',
|
||||
field_class=fields.CharField,
|
||||
default='/var/lib/awx',
|
||||
label=_('File system location for rsyslogd disk persistence'),
|
||||
help_text=_('Location to persist logs that should be retried after an outage '
|
||||
'of the external log aggregator (defaults to /var/lib/awx). '
|
||||
'Equivalent to the rsyslogd queue.spoolDirectory setting.'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
|
||||
|
||||
register(
|
||||
|
||||
@@ -38,7 +38,7 @@ ENV_BLACKLIST = frozenset((
|
||||
'AD_HOC_COMMAND_ID', 'REST_API_URL', 'REST_API_TOKEN', 'MAX_EVENT_RES',
|
||||
'CALLBACK_QUEUE', 'CALLBACK_CONNECTION', 'CACHE',
|
||||
'JOB_CALLBACK_DEBUG', 'INVENTORY_HOSTVARS',
|
||||
'AWX_HOST', 'PROJECT_REVISION'
|
||||
'AWX_HOST', 'PROJECT_REVISION', 'SUPERVISOR_WEB_CONFIG_PATH'
|
||||
))
|
||||
|
||||
# loggers that may be called in process of emitting a log
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import json
|
||||
import logging
|
||||
import datetime
|
||||
import time
|
||||
import hmac
|
||||
import asyncio
|
||||
|
||||
@@ -29,7 +29,7 @@ class WebsocketSecretAuthHelper:
|
||||
|
||||
@classmethod
|
||||
def construct_secret(cls):
|
||||
nonce_serialized = "{}".format(int((datetime.datetime.utcnow() - datetime.datetime.fromtimestamp(0)).total_seconds()))
|
||||
nonce_serialized = f"{int(time.time())}"
|
||||
payload_dict = {
|
||||
'secret': settings.BROADCAST_WEBSOCKET_SECRET,
|
||||
'nonce': nonce_serialized
|
||||
@@ -70,10 +70,12 @@ class WebsocketSecretAuthHelper:
|
||||
raise ValueError("Invalid secret")
|
||||
|
||||
# Avoid timing attack and check the nonce after all the heavy lifting
|
||||
now = datetime.datetime.utcnow()
|
||||
nonce_parsed = datetime.datetime.fromtimestamp(int(nonce_parsed))
|
||||
if (now - nonce_parsed).total_seconds() > nonce_tolerance:
|
||||
raise ValueError("Potential replay attack or machine(s) time out of sync.")
|
||||
now = int(time.time())
|
||||
nonce_parsed = int(nonce_parsed)
|
||||
nonce_diff = now - nonce_parsed
|
||||
if abs(nonce_diff) > nonce_tolerance:
|
||||
logger.warn(f"Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.")
|
||||
raise ValueError("Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.")
|
||||
|
||||
return True
|
||||
|
||||
@@ -93,19 +95,17 @@ class BroadcastConsumer(AsyncJsonWebsocketConsumer):
|
||||
try:
|
||||
WebsocketSecretAuthHelper.is_authorized(self.scope)
|
||||
except Exception:
|
||||
# TODO: log ip of connected client
|
||||
logger.warn("Broadcast client failed to authorize.")
|
||||
logger.warn(f"client '{self.channel_name}' failed to authorize against the broadcast endpoint.")
|
||||
await self.close()
|
||||
return
|
||||
|
||||
# TODO: log ip of connected client
|
||||
logger.info(f"Broadcast client connected.")
|
||||
await self.accept()
|
||||
await self.channel_layer.group_add(settings.BROADCAST_WEBSOCKET_GROUP_NAME, self.channel_name)
|
||||
logger.info(f"client '{self.channel_name}' joined the broadcast group.")
|
||||
|
||||
async def disconnect(self, code):
|
||||
# TODO: log ip of disconnected client
|
||||
logger.info("Client disconnected")
|
||||
logger.info("client '{self.channel_name}' disconnected from the broadcast group.")
|
||||
await self.channel_layer.group_discard(settings.BROADCAST_WEBSOCKET_GROUP_NAME, self.channel_name)
|
||||
|
||||
async def internal_message(self, event):
|
||||
await self.send(event['text'])
|
||||
@@ -130,6 +130,14 @@ class EventConsumer(AsyncJsonWebsocketConsumer):
|
||||
await self.send_json({"close": True})
|
||||
await self.close()
|
||||
|
||||
async def disconnect(self, code):
|
||||
current_groups = set(self.scope['session'].pop('groups') if 'groups' in self.scope['session'] else [])
|
||||
for group_name in current_groups:
|
||||
await self.channel_layer.group_discard(
|
||||
group_name,
|
||||
self.channel_name,
|
||||
)
|
||||
|
||||
@database_sync_to_async
|
||||
def user_can_see_object_id(self, user_access, oid):
|
||||
# At this point user is a channels.auth.UserLazyObject object
|
||||
@@ -187,7 +195,6 @@ class EventConsumer(AsyncJsonWebsocketConsumer):
|
||||
group_name,
|
||||
self.channel_name
|
||||
)
|
||||
logger.debug(f"Channel {self.channel_name} left groups {old_groups} and joined {new_groups_exclusive}")
|
||||
self.scope['session']['groups'] = new_groups
|
||||
await self.send_json({
|
||||
"groups_current": list(new_groups),
|
||||
@@ -213,31 +220,6 @@ def _dump_payload(payload):
|
||||
return None
|
||||
|
||||
|
||||
async def emit_channel_notification_async(group, payload):
|
||||
from awx.main.wsbroadcast import wrap_broadcast_msg # noqa
|
||||
|
||||
payload_dumped = _dump_payload(payload)
|
||||
if payload_dumped is None:
|
||||
return
|
||||
|
||||
channel_layer = get_channel_layer()
|
||||
await channel_layer.group_send(
|
||||
group,
|
||||
{
|
||||
"type": "internal.message",
|
||||
"text": payload_dumped
|
||||
},
|
||||
)
|
||||
|
||||
await channel_layer.group_send(
|
||||
settings.BROADCAST_WEBSOCKET_GROUP_NAME,
|
||||
{
|
||||
"type": "internal.message",
|
||||
"text": wrap_broadcast_msg(group, payload_dumped),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def emit_channel_notification(group, payload):
|
||||
from awx.main.wsbroadcast import wrap_broadcast_msg # noqa
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ class Scheduler(Scheduler):
|
||||
|
||||
def run():
|
||||
ppid = os.getppid()
|
||||
logger.warn(f'periodic beat started')
|
||||
logger.warn('periodic beat started')
|
||||
while True:
|
||||
if os.getppid() != ppid:
|
||||
# if the parent PID changes, this process has been orphaned
|
||||
|
||||
@@ -118,9 +118,14 @@ class AWXConsumerRedis(AWXConsumerBase):
|
||||
|
||||
queue = redis.Redis.from_url(settings.BROKER_URL)
|
||||
while True:
|
||||
res = queue.blpop(self.queues)
|
||||
res = json.loads(res[1])
|
||||
self.process_task(res)
|
||||
try:
|
||||
res = queue.blpop(self.queues)
|
||||
res = json.loads(res[1])
|
||||
self.process_task(res)
|
||||
except redis.exceptions.RedisError:
|
||||
logger.exception("encountered an error communicating with redis")
|
||||
except (json.JSONDecodeError, KeyError):
|
||||
logger.exception("failed to decode JSON message from redis")
|
||||
if self.should_stop:
|
||||
return
|
||||
|
||||
|
||||
@@ -91,7 +91,7 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
for e in events:
|
||||
try:
|
||||
if (
|
||||
isinstance(exc, IntegrityError),
|
||||
isinstance(exc, IntegrityError) and
|
||||
getattr(e, 'host_id', '')
|
||||
):
|
||||
# this is one potential IntegrityError we can
|
||||
|
||||
@@ -268,13 +268,6 @@ class IsolatedManager(object):
|
||||
# in the final sync
|
||||
self.consume_events()
|
||||
|
||||
# emit an EOF event
|
||||
event_data = {
|
||||
'event': 'EOF',
|
||||
'final_counter': len(self.handled_events)
|
||||
}
|
||||
self.event_handler(event_data)
|
||||
|
||||
return status, rc
|
||||
|
||||
def consume_events(self):
|
||||
@@ -287,7 +280,7 @@ class IsolatedManager(object):
|
||||
if os.path.exists(events_path):
|
||||
for event in set(os.listdir(events_path)) - self.handled_events:
|
||||
path = os.path.join(events_path, event)
|
||||
if os.path.exists(path):
|
||||
if os.path.exists(path) and os.path.isfile(path):
|
||||
try:
|
||||
event_data = json.load(
|
||||
open(os.path.join(events_path, event), 'r')
|
||||
@@ -420,8 +413,4 @@ class IsolatedManager(object):
|
||||
status, rc = self.dispatch(playbook, module, module_args)
|
||||
if status == 'successful':
|
||||
status, rc = self.check()
|
||||
else:
|
||||
# emit an EOF event
|
||||
event_data = {'event': 'EOF', 'final_counter': 0}
|
||||
self.event_handler(event_data)
|
||||
return status, rc
|
||||
|
||||
@@ -496,12 +496,6 @@ class Command(BaseCommand):
|
||||
group_names = all_group_names[offset:(offset + self._batch_size)]
|
||||
for group_pk in groups_qs.filter(name__in=group_names).values_list('pk', flat=True):
|
||||
del_group_pks.discard(group_pk)
|
||||
if self.inventory_source.deprecated_group_id in del_group_pks: # TODO: remove in 3.3
|
||||
logger.warning(
|
||||
'Group "%s" from v1 API is not deleted by overwrite',
|
||||
self.inventory_source.deprecated_group.name
|
||||
)
|
||||
del_group_pks.discard(self.inventory_source.deprecated_group_id)
|
||||
# Now delete all remaining groups in batches.
|
||||
all_del_pks = sorted(list(del_group_pks))
|
||||
for offset in range(0, len(all_del_pks), self._batch_size):
|
||||
@@ -534,12 +528,6 @@ class Command(BaseCommand):
|
||||
# Set of all host pks managed by this inventory source
|
||||
all_source_host_pks = self._existing_host_pks()
|
||||
for db_group in db_groups.all():
|
||||
if self.inventory_source.deprecated_group_id == db_group.id: # TODO: remove in 3.3
|
||||
logger.debug(
|
||||
'Group "%s" from v1 API child group/host connections preserved',
|
||||
db_group.name
|
||||
)
|
||||
continue
|
||||
# Delete child group relationships not present in imported data.
|
||||
db_children = db_group.children
|
||||
db_children_name_pk_map = dict(db_children.values_list('name', 'pk'))
|
||||
|
||||
@@ -7,7 +7,6 @@ from django.core.cache import cache as django_cache
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection as django_connection
|
||||
|
||||
from awx.main.utils.handlers import AWXProxyHandler
|
||||
from awx.main.dispatch import get_local_queuename, reaper
|
||||
from awx.main.dispatch.control import Control
|
||||
from awx.main.dispatch.pool import AutoscalePool
|
||||
@@ -56,11 +55,6 @@ class Command(BaseCommand):
|
||||
reaper.reap()
|
||||
consumer = None
|
||||
|
||||
# don't ship external logs inside the dispatcher's parent process
|
||||
# this exists to work around a race condition + deadlock bug on fork
|
||||
# in cpython itself:
|
||||
# https://bugs.python.org/issue37429
|
||||
AWXProxyHandler.disable()
|
||||
try:
|
||||
queues = ['tower_broadcast_all', get_local_queuename()]
|
||||
consumer = AWXConsumerPG(
|
||||
|
||||
@@ -2,10 +2,20 @@
|
||||
# All Rights Reserved.
|
||||
import logging
|
||||
import asyncio
|
||||
import datetime
|
||||
import re
|
||||
import redis
|
||||
from datetime import datetime as dt
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Q
|
||||
|
||||
from awx.main.analytics.broadcast_websocket import (
|
||||
BroadcastWebsocketStatsManager,
|
||||
safe_name,
|
||||
)
|
||||
from awx.main.wsbroadcast import BroadcastWebsocketManager
|
||||
from awx.main.models.ha import Instance
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.wsbroadcast')
|
||||
@@ -14,7 +24,106 @@ logger = logging.getLogger('awx.main.wsbroadcast')
|
||||
class Command(BaseCommand):
|
||||
help = 'Launch the websocket broadcaster'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--status', dest='status', action='store_true',
|
||||
help='print the internal state of any running broadcast websocket')
|
||||
|
||||
@classmethod
|
||||
def display_len(cls, s):
|
||||
return len(re.sub('\x1b.*?m', '', s))
|
||||
|
||||
@classmethod
|
||||
def _format_lines(cls, host_stats, padding=5):
|
||||
widths = [0 for i in host_stats[0]]
|
||||
for entry in host_stats:
|
||||
for i, e in enumerate(entry):
|
||||
if Command.display_len(e) > widths[i]:
|
||||
widths[i] = Command.display_len(e)
|
||||
paddings = [padding for i in widths]
|
||||
|
||||
lines = []
|
||||
for entry in host_stats:
|
||||
line = ""
|
||||
for pad, width, value in zip(paddings, widths, entry):
|
||||
if len(value) > Command.display_len(value):
|
||||
width += len(value) - Command.display_len(value)
|
||||
total_width = width + pad
|
||||
line += f'{value:{total_width}}'
|
||||
lines.append(line)
|
||||
return lines
|
||||
|
||||
@classmethod
|
||||
def get_connection_status(cls, me, hostnames, data):
|
||||
host_stats = [('hostname', 'state', 'start time', 'duration (sec)')]
|
||||
for h in hostnames:
|
||||
connection_color = '91' # red
|
||||
h_safe = safe_name(h)
|
||||
prefix = f'awx_{h_safe}'
|
||||
connection_state = data.get(f'{prefix}_connection', 'N/A')
|
||||
connection_started = 'N/A'
|
||||
connection_duration = 'N/A'
|
||||
if connection_state is None:
|
||||
connection_state = 'unknown'
|
||||
if connection_state == 'connected':
|
||||
connection_color = '92' # green
|
||||
connection_started = data.get(f'{prefix}_connection_start', 'Error')
|
||||
if connection_started != 'Error':
|
||||
connection_started = datetime.datetime.fromtimestamp(connection_started)
|
||||
connection_duration = int((dt.now() - connection_started).total_seconds())
|
||||
|
||||
connection_state = f'\033[{connection_color}m{connection_state}\033[0m'
|
||||
|
||||
host_stats.append((h, connection_state, str(connection_started), str(connection_duration)))
|
||||
|
||||
return host_stats
|
||||
|
||||
@classmethod
|
||||
def get_connection_stats(cls, me, hostnames, data):
|
||||
host_stats = [('hostname', 'total', 'per minute')]
|
||||
for h in hostnames:
|
||||
h_safe = safe_name(h)
|
||||
prefix = f'awx_{h_safe}'
|
||||
messages_total = data.get(f'{prefix}_messages_received', '0')
|
||||
messages_per_minute = data.get(f'{prefix}_messages_received_per_minute', '0')
|
||||
|
||||
host_stats.append((h, str(int(messages_total)), str(int(messages_per_minute))))
|
||||
|
||||
return host_stats
|
||||
|
||||
def handle(self, *arg, **options):
|
||||
if options.get('status'):
|
||||
try:
|
||||
stats_all = BroadcastWebsocketStatsManager.get_stats_sync()
|
||||
except redis.exceptions.ConnectionError as e:
|
||||
print(f"Unable to get Broadcast Websocket Status. Failed to connect to redis {e}")
|
||||
return
|
||||
|
||||
data = {}
|
||||
for family in stats_all:
|
||||
if family.type == 'gauge' and len(family.samples) > 1:
|
||||
for sample in family.samples:
|
||||
if sample.value >= 1:
|
||||
data[family.name] = sample.labels[family.name]
|
||||
break
|
||||
else:
|
||||
data[family.name] = family.samples[0].value
|
||||
me = Instance.objects.me()
|
||||
hostnames = [i.hostname for i in Instance.objects.exclude(Q(hostname=me.hostname) | Q(rampart_groups__controller__isnull=False))]
|
||||
|
||||
host_stats = Command.get_connection_status(me, hostnames, data)
|
||||
lines = Command._format_lines(host_stats)
|
||||
|
||||
print(f'Broadcast websocket connection status from "{me.hostname}" to:')
|
||||
print('\n'.join(lines))
|
||||
|
||||
host_stats = Command.get_connection_stats(me, hostnames, data)
|
||||
lines = Command._format_lines(host_stats)
|
||||
|
||||
print(f'\nBroadcast websocket connection stats from "{me.hostname}" to:')
|
||||
print('\n'.join(lines))
|
||||
|
||||
return
|
||||
|
||||
try:
|
||||
broadcast_websocket_mgr = BroadcastWebsocketManager()
|
||||
task = broadcast_websocket_mgr.start()
|
||||
|
||||
@@ -12,23 +12,19 @@ import urllib.parse
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.db.models.signals import post_save
|
||||
from django.db.migrations.executor import MigrationExecutor
|
||||
from django.db import IntegrityError, connection
|
||||
from django.utils.functional import curry
|
||||
from django.db import connection
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.apps import apps
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.urls import reverse, resolve
|
||||
|
||||
from awx.main.models import ActivityStream
|
||||
from awx.main.utils.named_url_graph import generate_graph, GraphNode
|
||||
from awx.conf import fields, register
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.middleware')
|
||||
analytics_logger = logging.getLogger('awx.analytics.activity_stream')
|
||||
perf_logger = logging.getLogger('awx.analytics.performance')
|
||||
|
||||
|
||||
@@ -76,61 +72,6 @@ class TimingMiddleware(threading.local, MiddlewareMixin):
|
||||
return filepath
|
||||
|
||||
|
||||
class ActivityStreamMiddleware(threading.local, MiddlewareMixin):
|
||||
|
||||
def __init__(self, get_response=None):
|
||||
self.disp_uid = None
|
||||
self.instance_ids = []
|
||||
super().__init__(get_response)
|
||||
|
||||
def process_request(self, request):
|
||||
if hasattr(request, 'user') and request.user.is_authenticated:
|
||||
user = request.user
|
||||
else:
|
||||
user = None
|
||||
|
||||
set_actor = curry(self.set_actor, user)
|
||||
self.disp_uid = str(uuid.uuid1())
|
||||
self.instance_ids = []
|
||||
post_save.connect(set_actor, sender=ActivityStream, dispatch_uid=self.disp_uid, weak=False)
|
||||
|
||||
def process_response(self, request, response):
|
||||
drf_request = getattr(request, 'drf_request', None)
|
||||
drf_user = getattr(drf_request, 'user', None)
|
||||
if self.disp_uid is not None:
|
||||
post_save.disconnect(dispatch_uid=self.disp_uid)
|
||||
|
||||
for instance in ActivityStream.objects.filter(id__in=self.instance_ids):
|
||||
if drf_user and drf_user.id:
|
||||
instance.actor = drf_user
|
||||
try:
|
||||
instance.save(update_fields=['actor'])
|
||||
analytics_logger.info('Activity Stream update entry for %s' % str(instance.object1),
|
||||
extra=dict(changes=instance.changes, relationship=instance.object_relationship_type,
|
||||
actor=drf_user.username, operation=instance.operation,
|
||||
object1=instance.object1, object2=instance.object2))
|
||||
except IntegrityError:
|
||||
logger.debug("Integrity Error saving Activity Stream instance for id : " + str(instance.id))
|
||||
# else:
|
||||
# obj1_type_actual = instance.object1_type.split(".")[-1]
|
||||
# if obj1_type_actual in ("InventoryUpdate", "ProjectUpdate", "Job") and instance.id is not None:
|
||||
# instance.delete()
|
||||
|
||||
self.instance_ids = []
|
||||
return response
|
||||
|
||||
def set_actor(self, user, sender, instance, **kwargs):
|
||||
if sender == ActivityStream:
|
||||
if isinstance(user, User) and instance.actor is None:
|
||||
user = User.objects.filter(id=user.id)
|
||||
if user.exists():
|
||||
user = user[0]
|
||||
instance.actor = user
|
||||
else:
|
||||
if instance.id not in self.instance_ids:
|
||||
self.instance_ids.append(instance.id)
|
||||
|
||||
|
||||
class SessionTimeoutMiddleware(MiddlewareMixin):
|
||||
"""
|
||||
Resets the session timeout for both the UI and the actual session for the API
|
||||
|
||||
@@ -464,7 +464,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='instance_group',
|
||||
field=models.ForeignKey(on_delete=models.SET_NULL, default=None, blank=True, to='main.InstanceGroup', help_text='The Rampart/Instance group the job was run under', null=True),
|
||||
field=models.ForeignKey(on_delete=models.SET_NULL, default=None, blank=True, to='main.InstanceGroup', help_text='The Instance group the job was run under', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
|
||||
@@ -16,6 +16,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='instance_group',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='The Rampart/Instance group the job was run under', null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, to='main.InstanceGroup'),
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='The Instance group the job was run under', null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, to='main.InstanceGroup'),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
# Generated by Django 2.2.11 on 2020-04-03 00:11
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def remove_manual_inventory_sources(apps, schema_editor):
|
||||
'''Previously we would automatically create inventory sources after
|
||||
Group creation and we would use the parent Group as our interface for the user.
|
||||
During that process we would create InventorySource that had a source of "manual".
|
||||
'''
|
||||
InventoryUpdate = apps.get_model('main', 'InventoryUpdate')
|
||||
InventoryUpdate.objects.filter(source='').delete()
|
||||
InventorySource = apps.get_model('main', 'InventorySource')
|
||||
InventorySource.objects.filter(source='').delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0113_v370_event_bigint'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='inventorysource',
|
||||
name='deprecated_group',
|
||||
),
|
||||
migrations.RunPython(remove_manual_inventory_sources),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(choices=[('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')], default=None, max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(choices=[('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')], default=None, max_length=32),
|
||||
),
|
||||
]
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
# Django
|
||||
from django.conf import settings # noqa
|
||||
from django.db import connection, ProgrammingError
|
||||
from django.db import connection
|
||||
from django.db.models.signals import pre_delete # noqa
|
||||
|
||||
# AWX
|
||||
@@ -91,14 +91,13 @@ def enforce_bigint_pk_migration():
|
||||
'main_systemjobevent'
|
||||
):
|
||||
with connection.cursor() as cursor:
|
||||
try:
|
||||
cursor.execute(f'SELECT MAX(id) FROM _old_{tblname}')
|
||||
if cursor.fetchone():
|
||||
from awx.main.tasks import migrate_legacy_event_data
|
||||
migrate_legacy_event_data.apply_async([tblname])
|
||||
except ProgrammingError:
|
||||
# the table is gone (migration is unnecessary)
|
||||
pass
|
||||
cursor.execute(
|
||||
'SELECT 1 FROM information_schema.tables WHERE table_name=%s',
|
||||
(f'_old_{tblname}',)
|
||||
)
|
||||
if bool(cursor.rowcount):
|
||||
from awx.main.tasks import migrate_legacy_event_data
|
||||
migrate_legacy_event_data.apply_async([tblname])
|
||||
|
||||
|
||||
def cleanup_created_modified_by(sender, **kwargs):
|
||||
|
||||
@@ -821,7 +821,6 @@ class InventorySourceOptions(BaseModel):
|
||||
injectors = dict()
|
||||
|
||||
SOURCE_CHOICES = [
|
||||
('', _('Manual')),
|
||||
('file', _('File, Directory or Script')),
|
||||
('scm', _('Sourced from a Project')),
|
||||
('ec2', _('Amazon EC2')),
|
||||
@@ -932,8 +931,8 @@ class InventorySourceOptions(BaseModel):
|
||||
source = models.CharField(
|
||||
max_length=32,
|
||||
choices=SOURCE_CHOICES,
|
||||
blank=True,
|
||||
default='',
|
||||
blank=False,
|
||||
default=None,
|
||||
)
|
||||
source_path = models.CharField(
|
||||
max_length=1024,
|
||||
@@ -1237,14 +1236,6 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, CustomVirtualE
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
deprecated_group = models.OneToOneField(
|
||||
'Group',
|
||||
related_name='deprecated_inventory_source',
|
||||
null=True,
|
||||
default=None,
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
source_project = models.ForeignKey(
|
||||
'Project',
|
||||
related_name='scm_inventory_sources',
|
||||
@@ -1414,16 +1405,6 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, CustomVirtualE
|
||||
started=list(started_notification_templates),
|
||||
success=list(success_notification_templates))
|
||||
|
||||
def clean_source(self): # TODO: remove in 3.3
|
||||
source = self.source
|
||||
if source and self.deprecated_group:
|
||||
qs = self.deprecated_group.inventory_sources.filter(source__in=CLOUD_INVENTORY_SOURCES)
|
||||
existing_sources = qs.exclude(pk=self.pk)
|
||||
if existing_sources.count():
|
||||
s = u', '.join([x.deprecated_group.name for x in existing_sources])
|
||||
raise ValidationError(_('Unable to configure this item for cloud sync. It is already managed by %s.') % s)
|
||||
return source
|
||||
|
||||
def clean_update_on_project_update(self):
|
||||
if self.update_on_project_update is True and \
|
||||
self.source == 'scm' and \
|
||||
@@ -1512,8 +1493,6 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
|
||||
if self.inventory_source.inventory is not None:
|
||||
websocket_data.update(dict(inventory_id=self.inventory_source.inventory.pk))
|
||||
|
||||
if self.inventory_source.deprecated_group is not None: # TODO: remove in 3.3
|
||||
websocket_data.update(dict(group_id=self.inventory_source.deprecated_group.id))
|
||||
return websocket_data
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
|
||||
@@ -199,7 +199,7 @@ class ProjectOptions(models.Model):
|
||||
results = []
|
||||
project_path = self.get_project_path()
|
||||
if project_path:
|
||||
for dirpath, dirnames, filenames in os.walk(smart_str(project_path)):
|
||||
for dirpath, dirnames, filenames in os.walk(smart_str(project_path), followlinks=True):
|
||||
if skip_directory(dirpath):
|
||||
continue
|
||||
for filename in filenames:
|
||||
|
||||
@@ -191,7 +191,7 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
|
||||
return rrule
|
||||
|
||||
@classmethod
|
||||
def rrulestr(cls, rrule, **kwargs):
|
||||
def rrulestr(cls, rrule, fast_forward=True, **kwargs):
|
||||
"""
|
||||
Apply our own custom rrule parsing requirements
|
||||
"""
|
||||
@@ -205,11 +205,17 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
|
||||
'A valid TZID must be provided (e.g., America/New_York)'
|
||||
)
|
||||
|
||||
if 'MINUTELY' in rrule or 'HOURLY' in rrule:
|
||||
if fast_forward and ('MINUTELY' in rrule or 'HOURLY' in rrule):
|
||||
try:
|
||||
first_event = x[0]
|
||||
if first_event < now() - datetime.timedelta(days=365 * 5):
|
||||
raise ValueError('RRULE values with more than 1000 events are not allowed.')
|
||||
if first_event < now():
|
||||
# hourly/minutely rrules with far-past DTSTART values
|
||||
# are *really* slow to precompute
|
||||
# start *from* one week ago to speed things up drastically
|
||||
dtstart = x._rrule[0]._dtstart.strftime(':%Y%m%dT')
|
||||
new_start = (now() - datetime.timedelta(days=7)).strftime(':%Y%m%dT')
|
||||
new_rrule = rrule.replace(dtstart, new_start)
|
||||
return Schedule.rrulestr(new_rrule, fast_forward=False)
|
||||
except IndexError:
|
||||
pass
|
||||
return x
|
||||
|
||||
@@ -707,7 +707,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
null=True,
|
||||
default=None,
|
||||
on_delete=polymorphic.SET_NULL,
|
||||
help_text=_('The Rampart/Instance group the job was run under'),
|
||||
help_text=_('The Instance group the job was run under'),
|
||||
)
|
||||
organization = models.ForeignKey(
|
||||
'Organization',
|
||||
|
||||
@@ -89,7 +89,8 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
grafana_data['isRegion'] = self.isRegion
|
||||
grafana_data['dashboardId'] = self.dashboardId
|
||||
grafana_data['panelId'] = self.panelId
|
||||
grafana_data['tags'] = self.annotation_tags
|
||||
if self.annotation_tags:
|
||||
grafana_data['tags'] = self.annotation_tags
|
||||
grafana_data['text'] = m.subject
|
||||
grafana_headers['Authorization'] = "Bearer {}".format(self.grafana_key)
|
||||
grafana_headers['Content-Type'] = "application/json"
|
||||
|
||||
@@ -123,7 +123,7 @@ class SimpleDAG(object):
|
||||
self.root_nodes.discard(to_obj_ord)
|
||||
|
||||
if from_obj_ord is None and to_obj_ord is None:
|
||||
raise LookupError("From object {} and to object not found".format(from_obj, to_obj))
|
||||
raise LookupError("From object {} and to object {} not found".format(from_obj, to_obj))
|
||||
elif from_obj_ord is None:
|
||||
raise LookupError("From object not found {}".format(from_obj))
|
||||
elif to_obj_ord is None:
|
||||
|
||||
@@ -226,7 +226,7 @@ class TaskManager():
|
||||
# non-Ansible jobs on isolated instances run on controller
|
||||
task.instance_group = rampart_group.controller
|
||||
task.execution_node = random.choice(list(rampart_group.controller.instances.all().values_list('hostname', flat=True)))
|
||||
logger.debug('Submitting isolated {} to queue {}.'.format(
|
||||
logger.debug('Submitting isolated {} to queue {} on node {}.'.format(
|
||||
task.log_format, task.instance_group.name, task.execution_node))
|
||||
elif controller_node:
|
||||
task.instance_group = rampart_group
|
||||
|
||||
@@ -52,6 +52,7 @@ from awx.conf.utils import conf_to_dict
|
||||
__all__ = []
|
||||
|
||||
logger = logging.getLogger('awx.main.signals')
|
||||
analytics_logger = logging.getLogger('awx.analytics.activity_stream')
|
||||
|
||||
# Update has_active_failures for inventory/groups when a Host/Group is deleted,
|
||||
# when a Host-Group or Group-Group relationship is updated, or when a Job is deleted
|
||||
@@ -363,12 +364,24 @@ def model_serializer_mapping():
|
||||
}
|
||||
|
||||
|
||||
def emit_activity_stream_change(instance):
|
||||
if 'migrate' in sys.argv:
|
||||
# don't emit activity stream external logs during migrations, it
|
||||
# could be really noisy
|
||||
return
|
||||
from awx.api.serializers import ActivityStreamSerializer
|
||||
actor = None
|
||||
if instance.actor:
|
||||
actor = instance.actor.username
|
||||
summary_fields = ActivityStreamSerializer(instance).get_summary_fields(instance)
|
||||
analytics_logger.info('Activity Stream update entry for %s' % str(instance.object1),
|
||||
extra=dict(changes=instance.changes, relationship=instance.object_relationship_type,
|
||||
actor=actor, operation=instance.operation,
|
||||
object1=instance.object1, object2=instance.object2, summary_fields=summary_fields))
|
||||
|
||||
|
||||
def activity_stream_create(sender, instance, created, **kwargs):
|
||||
if created and activity_stream_enabled:
|
||||
# TODO: remove deprecated_group conditional in 3.3
|
||||
# Skip recording any inventory source directly associated with a group.
|
||||
if isinstance(instance, InventorySource) and instance.deprecated_group:
|
||||
return
|
||||
_type = type(instance)
|
||||
if getattr(_type, '_deferred', False):
|
||||
return
|
||||
@@ -399,6 +412,9 @@ def activity_stream_create(sender, instance, created, **kwargs):
|
||||
else:
|
||||
activity_entry.setting = conf_to_dict(instance)
|
||||
activity_entry.save()
|
||||
connection.on_commit(
|
||||
lambda: emit_activity_stream_change(activity_entry)
|
||||
)
|
||||
|
||||
|
||||
def activity_stream_update(sender, instance, **kwargs):
|
||||
@@ -430,15 +446,14 @@ def activity_stream_update(sender, instance, **kwargs):
|
||||
else:
|
||||
activity_entry.setting = conf_to_dict(instance)
|
||||
activity_entry.save()
|
||||
connection.on_commit(
|
||||
lambda: emit_activity_stream_change(activity_entry)
|
||||
)
|
||||
|
||||
|
||||
def activity_stream_delete(sender, instance, **kwargs):
|
||||
if not activity_stream_enabled:
|
||||
return
|
||||
# TODO: remove deprecated_group conditional in 3.3
|
||||
# Skip recording any inventory source directly associated with a group.
|
||||
if isinstance(instance, InventorySource) and instance.deprecated_group:
|
||||
return
|
||||
# Inventory delete happens in the task system rather than request-response-cycle.
|
||||
# If we trigger this handler there we may fall into db-integrity-related race conditions.
|
||||
# So we add flag verification to prevent normal signal handling. This funciton will be
|
||||
@@ -467,6 +482,9 @@ def activity_stream_delete(sender, instance, **kwargs):
|
||||
object1=object1,
|
||||
actor=get_current_user_or_none())
|
||||
activity_entry.save()
|
||||
connection.on_commit(
|
||||
lambda: emit_activity_stream_change(activity_entry)
|
||||
)
|
||||
|
||||
|
||||
def activity_stream_associate(sender, instance, **kwargs):
|
||||
@@ -540,6 +558,9 @@ def activity_stream_associate(sender, instance, **kwargs):
|
||||
activity_entry.role.add(role)
|
||||
activity_entry.object_relationship_type = obj_rel
|
||||
activity_entry.save()
|
||||
connection.on_commit(
|
||||
lambda: emit_activity_stream_change(activity_entry)
|
||||
)
|
||||
|
||||
|
||||
@receiver(current_user_getter)
|
||||
|
||||
@@ -73,6 +73,7 @@ from awx.main.utils import (get_ssh_version, update_scm_url,
|
||||
get_awx_version)
|
||||
from awx.main.utils.ansible import read_ansible_config
|
||||
from awx.main.utils.common import _get_ansible_version, get_custom_venv_choices
|
||||
from awx.main.utils.external_logging import reconfigure_rsyslog
|
||||
from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja
|
||||
from awx.main.utils.reload import stop_local_services
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
@@ -140,6 +141,9 @@ def dispatch_startup():
|
||||
# and Tower fall out of use/support, we can probably just _assume_ that
|
||||
# everybody has moved to bigint, and remove this code entirely
|
||||
enforce_bigint_pk_migration()
|
||||
|
||||
# Update Tower's rsyslog.conf file based on loggins settings in the db
|
||||
reconfigure_rsyslog()
|
||||
|
||||
|
||||
def inform_cluster_of_shutdown():
|
||||
@@ -280,6 +284,12 @@ def handle_setting_changes(setting_keys):
|
||||
logger.debug('cache delete_many(%r)', cache_keys)
|
||||
cache.delete_many(cache_keys)
|
||||
|
||||
if any([
|
||||
setting.startswith('LOG_AGGREGATOR')
|
||||
for setting in setting_keys
|
||||
]):
|
||||
connection.on_commit(reconfigure_rsyslog)
|
||||
|
||||
|
||||
@task(queue='tower_broadcast_all')
|
||||
def delete_project_files(project_path):
|
||||
@@ -1466,7 +1476,7 @@ class BaseTask(object):
|
||||
params.get('module'),
|
||||
module_args,
|
||||
ident=str(self.instance.pk))
|
||||
self.event_ct = len(isolated_manager_instance.handled_events)
|
||||
self.finished_callback(None)
|
||||
else:
|
||||
res = ansible_runner.interface.run(**params)
|
||||
status = res.status
|
||||
|
||||
@@ -220,7 +220,7 @@ def create_job_template(name, roles=None, persisted=True, webhook_service='', **
|
||||
if 'organization' in kwargs:
|
||||
org = kwargs['organization']
|
||||
if type(org) is not Organization:
|
||||
org = mk_organization(org, '%s-desc'.format(org), persisted=persisted)
|
||||
org = mk_organization(org, org, persisted=persisted)
|
||||
|
||||
if 'credential' in kwargs:
|
||||
cred = kwargs['credential']
|
||||
@@ -298,7 +298,7 @@ def create_organization(name, roles=None, persisted=True, **kwargs):
|
||||
labels = {}
|
||||
notification_templates = {}
|
||||
|
||||
org = mk_organization(name, '%s-desc'.format(name), persisted=persisted)
|
||||
org = mk_organization(name, name, persisted=persisted)
|
||||
|
||||
if 'inventories' in kwargs:
|
||||
for i in kwargs['inventories']:
|
||||
|
||||
160
awx/main/tests/functional/analytics/test_collectors.py
Normal file
160
awx/main/tests/functional/analytics/test_collectors.py
Normal file
@@ -0,0 +1,160 @@
|
||||
import pytest
|
||||
import tempfile
|
||||
import os
|
||||
import shutil
|
||||
import csv
|
||||
|
||||
from django.utils.timezone import now
|
||||
from django.db.backends.sqlite3.base import SQLiteCursorWrapper
|
||||
|
||||
from awx.main.analytics import collectors
|
||||
|
||||
from awx.main.models import (
|
||||
ProjectUpdate,
|
||||
InventorySource,
|
||||
WorkflowJob,
|
||||
WorkflowJobNode,
|
||||
JobTemplate,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sqlite_copy_expert(request):
|
||||
# copy_expert is postgres-specific, and SQLite doesn't support it; mock its
|
||||
# behavior to test that it writes a file that contains stdout from events
|
||||
path = tempfile.mkdtemp(prefix="copied_tables")
|
||||
|
||||
def write_stdout(self, sql, fd):
|
||||
# Would be cool if we instead properly disected the SQL query and verified
|
||||
# it that way. But instead, we just take the nieve approach here.
|
||||
assert sql.startswith("COPY (")
|
||||
assert sql.endswith(") TO STDOUT WITH CSV HEADER")
|
||||
|
||||
sql = sql.replace("COPY (", "")
|
||||
sql = sql.replace(") TO STDOUT WITH CSV HEADER", "")
|
||||
# sqlite equivalent
|
||||
sql = sql.replace("ARRAY_AGG", "GROUP_CONCAT")
|
||||
|
||||
# Remove JSON style queries
|
||||
# TODO: could replace JSON style queries with sqlite kind of equivalents
|
||||
sql_new = []
|
||||
for line in sql.split("\n"):
|
||||
if line.find("main_jobevent.event_data::") == -1:
|
||||
sql_new.append(line)
|
||||
elif not line.endswith(","):
|
||||
sql_new[-1] = sql_new[-1].rstrip(",")
|
||||
sql = "\n".join(sql_new)
|
||||
|
||||
self.execute(sql)
|
||||
results = self.fetchall()
|
||||
headers = [i[0] for i in self.description]
|
||||
|
||||
csv_handle = csv.writer(
|
||||
fd,
|
||||
delimiter=",",
|
||||
quoting=csv.QUOTE_ALL,
|
||||
escapechar="\\",
|
||||
lineterminator="\n",
|
||||
)
|
||||
csv_handle.writerow(headers)
|
||||
csv_handle.writerows(results)
|
||||
|
||||
setattr(SQLiteCursorWrapper, "copy_expert", write_stdout)
|
||||
request.addfinalizer(lambda: shutil.rmtree(path))
|
||||
request.addfinalizer(lambda: delattr(SQLiteCursorWrapper, "copy_expert"))
|
||||
return path
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_copy_tables_unified_job_query(
|
||||
sqlite_copy_expert, project, inventory, job_template
|
||||
):
|
||||
"""
|
||||
Ensure that various unified job types are in the output of the query.
|
||||
"""
|
||||
|
||||
time_start = now()
|
||||
inv_src = InventorySource.objects.create(
|
||||
name="inventory_update1", inventory=inventory, source="gce"
|
||||
)
|
||||
|
||||
project_update_name = ProjectUpdate.objects.create(
|
||||
project=project, name="project_update1"
|
||||
).name
|
||||
inventory_update_name = inv_src.create_unified_job().name
|
||||
job_name = job_template.create_unified_job().name
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
collectors.copy_tables(time_start, tmpdir, subset="unified_jobs")
|
||||
with open(os.path.join(tmpdir, "unified_jobs_table.csv")) as f:
|
||||
lines = "".join([l for l in f])
|
||||
|
||||
assert project_update_name in lines
|
||||
assert inventory_update_name in lines
|
||||
assert job_name in lines
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_job(states=["new", "new", "new", "new", "new"]):
|
||||
"""
|
||||
Workflow topology:
|
||||
node[0]
|
||||
/\
|
||||
s/ \f
|
||||
/ \
|
||||
node[1,5] node[3]
|
||||
/ \
|
||||
s/ \f
|
||||
/ \
|
||||
node[2] node[4]
|
||||
"""
|
||||
wfj = WorkflowJob.objects.create()
|
||||
jt = JobTemplate.objects.create(name="test-jt")
|
||||
nodes = [
|
||||
WorkflowJobNode.objects.create(workflow_job=wfj, unified_job_template=jt)
|
||||
for i in range(0, 6)
|
||||
]
|
||||
for node, state in zip(nodes, states):
|
||||
if state:
|
||||
node.job = jt.create_job()
|
||||
node.job.status = state
|
||||
node.job.save()
|
||||
node.save()
|
||||
nodes[0].success_nodes.add(nodes[1])
|
||||
nodes[0].success_nodes.add(nodes[5])
|
||||
nodes[1].success_nodes.add(nodes[2])
|
||||
nodes[0].failure_nodes.add(nodes[3])
|
||||
nodes[3].failure_nodes.add(nodes[4])
|
||||
return wfj
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_copy_tables_workflow_job_node_query(sqlite_copy_expert, workflow_job):
|
||||
time_start = now()
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
collectors.copy_tables(time_start, tmpdir, subset="workflow_job_node_query")
|
||||
with open(os.path.join(tmpdir, "workflow_job_node_table.csv")) as f:
|
||||
reader = csv.reader(f)
|
||||
# Pop the headers
|
||||
next(reader)
|
||||
lines = [l for l in reader]
|
||||
|
||||
ids = [int(l[0]) for l in lines]
|
||||
|
||||
assert ids == list(
|
||||
workflow_job.workflow_nodes.all().values_list("id", flat=True)
|
||||
)
|
||||
|
||||
for index, relationship in zip(
|
||||
[7, 8, 9], ["success_nodes", "failure_nodes", "always_nodes"]
|
||||
):
|
||||
for i, l in enumerate(lines):
|
||||
related_nodes = (
|
||||
[int(e) for e in l[index].split(",")] if l[index] else []
|
||||
)
|
||||
assert related_nodes == list(
|
||||
getattr(workflow_job.workflow_nodes.all()[i], relationship)
|
||||
.all()
|
||||
.values_list("id", flat=True)
|
||||
), f"(right side) workflow_nodes.all()[{i}].{relationship}.all()"
|
||||
@@ -13,10 +13,10 @@ def test_empty():
|
||||
"active_host_count": 0,
|
||||
"credential": 0,
|
||||
"custom_inventory_script": 0,
|
||||
"custom_virtualenvs": 0, # dev env ansible3
|
||||
"custom_virtualenvs": 0, # dev env ansible3
|
||||
"host": 0,
|
||||
"inventory": 0,
|
||||
"inventories": {'normal': 0, 'smart': 0},
|
||||
"inventories": {"normal": 0, "smart": 0},
|
||||
"job_template": 0,
|
||||
"notification_template": 0,
|
||||
"organization": 0,
|
||||
@@ -27,28 +27,97 @@ def test_empty():
|
||||
"user": 0,
|
||||
"workflow_job_template": 0,
|
||||
"unified_job": 0,
|
||||
"pending_jobs": 0
|
||||
"pending_jobs": 0,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_database_counts(organization_factory, job_template_factory,
|
||||
workflow_job_template_factory):
|
||||
objs = organization_factory('org', superusers=['admin'])
|
||||
jt = job_template_factory('test', organization=objs.organization,
|
||||
inventory='test_inv', project='test_project',
|
||||
credential='test_cred')
|
||||
workflow_job_template_factory('test')
|
||||
def test_database_counts(
|
||||
organization_factory, job_template_factory, workflow_job_template_factory
|
||||
):
|
||||
objs = organization_factory("org", superusers=["admin"])
|
||||
jt = job_template_factory(
|
||||
"test",
|
||||
organization=objs.organization,
|
||||
inventory="test_inv",
|
||||
project="test_project",
|
||||
credential="test_cred",
|
||||
)
|
||||
workflow_job_template_factory("test")
|
||||
models.Team(organization=objs.organization).save()
|
||||
models.Host(inventory=jt.inventory).save()
|
||||
models.Schedule(
|
||||
rrule='DTSTART;TZID=America/New_York:20300504T150000',
|
||||
unified_job_template=jt.job_template
|
||||
rrule="DTSTART;TZID=America/New_York:20300504T150000",
|
||||
unified_job_template=jt.job_template,
|
||||
).save()
|
||||
models.CustomInventoryScript(organization=objs.organization).save()
|
||||
|
||||
counts = collectors.counts(None)
|
||||
for key in ('organization', 'team', 'user', 'inventory', 'credential',
|
||||
'project', 'job_template', 'workflow_job_template', 'host',
|
||||
'schedule', 'custom_inventory_script'):
|
||||
for key in (
|
||||
"organization",
|
||||
"team",
|
||||
"user",
|
||||
"inventory",
|
||||
"credential",
|
||||
"project",
|
||||
"job_template",
|
||||
"workflow_job_template",
|
||||
"host",
|
||||
"schedule",
|
||||
"custom_inventory_script",
|
||||
):
|
||||
assert counts[key] == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_inventory_counts(organization_factory, inventory_factory):
|
||||
(inv1, inv2, inv3) = [inventory_factory(f"inv-{i}") for i in range(3)]
|
||||
|
||||
s1 = inv1.inventory_sources.create(name="src1", source="ec2")
|
||||
s2 = inv1.inventory_sources.create(name="src2", source="file")
|
||||
s3 = inv1.inventory_sources.create(name="src3", source="gce")
|
||||
|
||||
s1.hosts.create(name="host1", inventory=inv1)
|
||||
s1.hosts.create(name="host2", inventory=inv1)
|
||||
s1.hosts.create(name="host3", inventory=inv1)
|
||||
|
||||
s2.hosts.create(name="host4", inventory=inv1)
|
||||
s2.hosts.create(name="host5", inventory=inv1)
|
||||
|
||||
s3.hosts.create(name="host6", inventory=inv1)
|
||||
|
||||
s1 = inv2.inventory_sources.create(name="src1", source="ec2")
|
||||
|
||||
s1.hosts.create(name="host1", inventory=inv2)
|
||||
s1.hosts.create(name="host2", inventory=inv2)
|
||||
s1.hosts.create(name="host3", inventory=inv2)
|
||||
|
||||
inv_counts = collectors.inventory_counts(None)
|
||||
|
||||
assert {
|
||||
inv1.id: {
|
||||
"name": "inv-0",
|
||||
"kind": "",
|
||||
"hosts": 6,
|
||||
"sources": 3,
|
||||
"source_list": [
|
||||
{"name": "src1", "source": "ec2", "num_hosts": 3},
|
||||
{"name": "src2", "source": "file", "num_hosts": 2},
|
||||
{"name": "src3", "source": "gce", "num_hosts": 1},
|
||||
],
|
||||
},
|
||||
inv2.id: {
|
||||
"name": "inv-1",
|
||||
"kind": "",
|
||||
"hosts": 3,
|
||||
"sources": 1,
|
||||
"source_list": [{"name": "src1", "source": "ec2", "num_hosts": 3}],
|
||||
},
|
||||
inv3.id: {
|
||||
"name": "inv-2",
|
||||
"kind": "",
|
||||
"hosts": 0,
|
||||
"sources": 0,
|
||||
"source_list": [],
|
||||
},
|
||||
} == inv_counts
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.middleware import ActivityStreamMiddleware
|
||||
from awx.main.models.activity_stream import ActivityStream
|
||||
from awx.main.access import ActivityStreamAccess
|
||||
from awx.conf.models import Setting
|
||||
@@ -61,28 +60,6 @@ def test_ctint_activity_stream(monkeypatch, get, user, settings):
|
||||
assert response.data['summary_fields']['setting'][0]['name'] == 'FOO'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_middleware_actor_added(monkeypatch, post, get, user, settings):
|
||||
settings.ACTIVITY_STREAM_ENABLED = True
|
||||
u = user('admin-poster', True)
|
||||
|
||||
url = reverse('api:organization_list')
|
||||
response = post(url,
|
||||
dict(name='test-org', description='test-desc'),
|
||||
u,
|
||||
middleware=ActivityStreamMiddleware())
|
||||
assert response.status_code == 201
|
||||
|
||||
org_id = response.data['id']
|
||||
activity_stream = ActivityStream.objects.filter(organization__pk=org_id).first()
|
||||
|
||||
url = reverse('api:activity_stream_detail', kwargs={'pk': activity_stream.pk})
|
||||
response = get(url, u)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.data['summary_fields']['actor']['username'] == 'admin-poster'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_rbac_stream_resource_roles(activity_stream_entry, organization, org_admin, settings):
|
||||
settings.ACTIVITY_STREAM_ENABLED = True
|
||||
|
||||
@@ -972,7 +972,7 @@ def test_field_removal(put, organization, admin, credentialtype_ssh):
|
||||
['insights_inventories', Inventory()],
|
||||
['unifiedjobs', Job()],
|
||||
['unifiedjobtemplates', JobTemplate()],
|
||||
['unifiedjobtemplates', InventorySource()],
|
||||
['unifiedjobtemplates', InventorySource(source='ec2')],
|
||||
['projects', Project()],
|
||||
['workflowjobnodes', WorkflowJobNode()],
|
||||
])
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import pytest
|
||||
import base64
|
||||
import json
|
||||
import time
|
||||
|
||||
import pytest
|
||||
|
||||
from django.db import connection
|
||||
from django.test.utils import override_settings
|
||||
@@ -326,6 +328,38 @@ def test_refresh_accesstoken(oauth_application, post, get, delete, admin):
|
||||
assert original_refresh_token.revoked # is not None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_refresh_token_expiration_is_respected(oauth_application, post, get, delete, admin):
|
||||
response = post(
|
||||
reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}),
|
||||
{'scope': 'read'}, admin, expect=201
|
||||
)
|
||||
assert AccessToken.objects.count() == 1
|
||||
assert RefreshToken.objects.count() == 1
|
||||
refresh_token = RefreshToken.objects.get(token=response.data['refresh_token'])
|
||||
refresh_url = drf_reverse('api:oauth_authorization_root_view') + 'token/'
|
||||
short_lived = {
|
||||
'ACCESS_TOKEN_EXPIRE_SECONDS': 1,
|
||||
'AUTHORIZATION_CODE_EXPIRE_SECONDS': 1,
|
||||
'REFRESH_TOKEN_EXPIRE_SECONDS': 1
|
||||
}
|
||||
time.sleep(1)
|
||||
with override_settings(OAUTH2_PROVIDER=short_lived):
|
||||
response = post(
|
||||
refresh_url,
|
||||
data='grant_type=refresh_token&refresh_token=' + refresh_token.token,
|
||||
content_type='application/x-www-form-urlencoded',
|
||||
HTTP_AUTHORIZATION='Basic ' + smart_str(base64.b64encode(smart_bytes(':'.join([
|
||||
oauth_application.client_id, oauth_application.client_secret
|
||||
]))))
|
||||
)
|
||||
assert response.status_code == 403
|
||||
assert b'The refresh token has expired.' in response.content
|
||||
assert RefreshToken.objects.filter(token=refresh_token).exists()
|
||||
assert AccessToken.objects.count() == 1
|
||||
assert RefreshToken.objects.count() == 1
|
||||
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_revoke_access_then_refreshtoken(oauth_application, post, get, delete, admin):
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import datetime
|
||||
import pytest
|
||||
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.timezone import now
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import JobTemplate, Schedule
|
||||
@@ -140,7 +142,6 @@ def test_encrypted_survey_answer(post, patch, admin_user, project, inventory, su
|
||||
("DTSTART:20030925T104941Z RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z", "RRULE may not contain both COUNT and UNTIL"), # noqa
|
||||
("DTSTART;TZID=America/New_York:20300308T050000Z RRULE:FREQ=DAILY;INTERVAL=1", "rrule parsing failed validation"),
|
||||
("DTSTART:20300308T050000 RRULE:FREQ=DAILY;INTERVAL=1", "DTSTART cannot be a naive datetime"),
|
||||
("DTSTART:19700101T000000Z RRULE:FREQ=MINUTELY;INTERVAL=1", "more than 1000 events are not allowed"), # noqa
|
||||
])
|
||||
def test_invalid_rrules(post, admin_user, project, inventory, rrule, error):
|
||||
job_template = JobTemplate.objects.create(
|
||||
@@ -342,6 +343,40 @@ def test_months_with_31_days(post, admin_user):
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.timeout(3)
|
||||
@pytest.mark.parametrize('freq, delta, total_seconds', (
|
||||
('MINUTELY', 1, 60),
|
||||
('MINUTELY', 15, 15 * 60),
|
||||
('HOURLY', 1, 3600),
|
||||
('HOURLY', 4, 3600 * 4),
|
||||
))
|
||||
def test_really_old_dtstart(post, admin_user, freq, delta, total_seconds):
|
||||
url = reverse('api:schedule_rrule')
|
||||
# every <interval>, at the :30 second mark
|
||||
rrule = f'DTSTART;TZID=America/New_York:20051231T000030 RRULE:FREQ={freq};INTERVAL={delta}'
|
||||
start = now()
|
||||
next_ten = post(url, {'rrule': rrule}, admin_user, expect=200).data['utc']
|
||||
|
||||
assert len(next_ten) == 10
|
||||
|
||||
# the first date is *in the future*
|
||||
assert next_ten[0] >= start
|
||||
|
||||
# ...but *no more than* <interval> into the future
|
||||
assert now() + datetime.timedelta(**{
|
||||
'minutes' if freq == 'MINUTELY' else 'hours': delta
|
||||
})
|
||||
|
||||
# every date in the list is <interval> greater than the last
|
||||
for i, x in enumerate(next_ten):
|
||||
if i == 0:
|
||||
continue
|
||||
assert x.second == 30
|
||||
delta = (x - next_ten[i - 1])
|
||||
assert delta.total_seconds() == total_seconds
|
||||
|
||||
|
||||
def test_dst_rollback_duplicates(post, admin_user):
|
||||
# From Nov 2 -> Nov 3, 2030, daylight savings ends and we "roll back" an hour.
|
||||
# Make sure we don't "double count" duplicate times in the "rolled back"
|
||||
|
||||
@@ -5,17 +5,13 @@
|
||||
# Python
|
||||
import pytest
|
||||
import os
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
# Mock
|
||||
from unittest import mock
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.conf.models import Setting
|
||||
from awx.main.utils.handlers import AWXProxyHandler, LoggingConnectivityException
|
||||
from awx.conf.registry import settings_registry
|
||||
|
||||
|
||||
TEST_GIF_LOGO = 'data:image/gif;base64,R0lGODlhIQAjAPIAAP//////AP8AAMzMAJmZADNmAAAAAAAAACH/C05FVFNDQVBFMi4wAwEAAAAh+QQJCgAHACwAAAAAIQAjAAADo3i63P4wykmrvTjrzZsxXfR94WMQBFh6RECuixHMLyzPQ13ewZCvow9OpzEAjIBj79cJJmU+FceIVEZ3QRozxBttmyOBwPBtisdX4Bha3oxmS+llFIPHQXQKkiSEXz9PeklHBzx3hYNyEHt4fmmAhHp8Nz45KgV5FgWFOFEGmwWbGqEfniChohmoQZ+oqRiZDZhEgk81I4mwg4EKVbxzrDHBEAkAIfkECQoABwAsAAAAACEAIwAAA6V4utz+MMpJq724GpP15p1kEAQYQmOwnWjgrmxjuMEAx8rsDjZ+fJvdLWQAFAHGWo8FRM54JqIRmYTigDrDMqZTbbbMj0CgjTLHZKvPQH6CTx+a2vKR0XbbOsoZ7SphG057gjl+c0dGgzeGNiaBiSgbBQUHBV08NpOVlkMSk0FKjZuURHiiOJxQnSGfQJuoEKREejK0dFRGjoiQt7iOuLx0rgxYEQkAIfkECQoABwAsAAAAACEAIwAAA7h4utxnxslJDSGR6nrz/owxYB64QUEwlGaVqlB7vrAJscsd3Lhy+wBArGEICo3DUFH4QDqK0GMy51xOgcGlEAfJ+iAFie62chR+jYKaSAuQGOqwJp7jGQRDuol+F/jxZWsyCmoQfwYwgoM5Oyg1i2w0A2WQIW2TPYOIkleQmy+UlYygoaIPnJmapKmqKiusMmSdpjxypnALtrcHioq3ury7hGm3dnVosVpMWFmwREZbddDOSsjVswcJACH5BAkKAAcALAAAAAAhACMAAAOxeLrc/jDKSZUxNS9DCNYV54HURQwfGRlDEFwqdLVuGjOsW9/Odb0wnsUAKBKNwsMFQGwyNUHckVl8bqI4o43lA26PNkv1S9DtNuOeVirw+aTI3qWAQwnud1vhLSnQLS0GeFF+GoVKNF0fh4Z+LDQ6Bn5/MTNmL0mAl2E3j2aclTmRmYCQoKEDiaRDKFhJez6UmbKyQowHtzy1uEl8DLCnEktrQ2PBD1NxSlXKIW5hz6cJACH5BAkKAAcALAAAAAAhACMAAAOkeLrc/jDKSau9OOvNlTFd9H3hYxAEWDJfkK5LGwTq+g0zDR/GgM+10A04Cm56OANgqTRmkDTmSOiLMgFOTM9AnFJHuexzYBAIijZf2SweJ8ttbbXLmd5+wBiJosSCoGF/fXEeS1g8gHl9hxODKkh4gkwVIwUekESIhA4FlgV3PyCWG52WI2oGnR2lnUWpqhqVEF4Xi7QjhpsshpOFvLosrnpoEAkAIfkECQoABwAsAAAAACEAIwAAA6l4utz+MMpJq71YGpPr3t1kEAQXQltQnk8aBCa7bMMLy4wx1G8s072PL6SrGQDI4zBThCU/v50zCVhidIYgNPqxWZkDg0AgxB2K4vEXbBSvr1JtZ3uOext0x7FqovF6OXtfe1UzdjAxhINPM013ChtJER8FBQeVRX8GlpggFZWWfjwblTiigGZnfqRmpUKbljKxDrNMeY2eF4R8jUiSur6/Z8GFV2WBtwwJACH5BAkKAAcALAAAAAAhACMAAAO6eLrcZi3KyQwhkGpq8f6ONWQgaAxB8JTfg6YkO50pzD5xhaurhCsGAKCnEw6NucNDCAkyI8ugdAhFKpnJJdMaeiofBejowUseCr9GYa0j1GyMdVgjBxoEuPSZXWKf7gKBeHtzMms0gHgGfDIVLztmjScvNZEyk28qjT40b5aXlHCbDgOhnzedoqOOlKeopaqrCy56sgtotbYKhYW6e7e9tsHBssO6eSTIm1peV0iuFUZDyU7NJnmcuQsJACH5BAkKAAcALAAAAAAhACMAAAOteLrc/jDKSZsxNS9DCNYV54Hh4H0kdAXBgKaOwbYX/Miza1vrVe8KA2AoJL5gwiQgeZz4GMXlcHl8xozQ3kW3KTajL9zsBJ1+sV2fQfALem+XAlRApxu4ioI1UpC76zJ4fRqDBzI+LFyFhH1iiS59fkgziW07jjRAG5QDeECOLk2Tj6KjnZafW6hAej6Smgevr6yysza2tiCuMasUF2Yov2gZUUQbU8YaaqjLpQkAOw==' # NOQA
|
||||
@@ -237,73 +233,95 @@ def test_ui_settings(get, put, patch, delete, admin):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_requires_superuser(get, post, alice):
|
||||
def test_logging_aggregator_connection_test_requires_superuser(post, alice):
|
||||
url = reverse('api:setting_logging_test')
|
||||
post(url, {}, user=alice, expect=403)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('key', [
|
||||
'LOG_AGGREGATOR_TYPE',
|
||||
'LOG_AGGREGATOR_HOST',
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregator_connection_test_not_enabled(post, admin):
|
||||
url = reverse('api:setting_logging_test')
|
||||
resp = post(url, {}, user=admin, expect=409)
|
||||
assert 'Logging not enabled' in resp.data.get('error')
|
||||
|
||||
|
||||
def _mock_logging_defaults():
|
||||
# Pre-populate settings obj with defaults
|
||||
class MockSettings:
|
||||
pass
|
||||
mock_settings_obj = MockSettings()
|
||||
mock_settings_json = dict()
|
||||
for key in settings_registry.get_registered_settings(category_slug='logging'):
|
||||
value = settings_registry.get_setting_field(key).get_default()
|
||||
setattr(mock_settings_obj, key, value)
|
||||
mock_settings_json[key] = value
|
||||
setattr(mock_settings_obj, 'MAX_EVENT_RES_DATA', 700000)
|
||||
return mock_settings_obj, mock_settings_json
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize('key, value, error', [
|
||||
['LOG_AGGREGATOR_TYPE', 'logstash', 'Cannot enable log aggregator without providing host.'],
|
||||
['LOG_AGGREGATOR_HOST', 'https://logstash', 'Cannot enable log aggregator without providing type.']
|
||||
])
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_bad_request(get, post, admin, key):
|
||||
url = reverse('api:setting_logging_test')
|
||||
resp = post(url, {}, user=admin, expect=400)
|
||||
assert 'This field is required.' in resp.data.get(key, [])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_valid(mocker, get, post, admin):
|
||||
with mock.patch.object(AWXProxyHandler, 'perform_test') as perform_test:
|
||||
url = reverse('api:setting_logging_test')
|
||||
user_data = {
|
||||
'LOG_AGGREGATOR_TYPE': 'logstash',
|
||||
'LOG_AGGREGATOR_HOST': 'localhost',
|
||||
'LOG_AGGREGATOR_PORT': 8080,
|
||||
'LOG_AGGREGATOR_USERNAME': 'logger',
|
||||
'LOG_AGGREGATOR_PASSWORD': 'mcstash'
|
||||
}
|
||||
post(url, user_data, user=admin, expect=200)
|
||||
args, kwargs = perform_test.call_args_list[0]
|
||||
create_settings = kwargs['custom_settings']
|
||||
for k, v in user_data.items():
|
||||
assert hasattr(create_settings, k)
|
||||
assert getattr(create_settings, k) == v
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_with_masked_password(mocker, patch, post, admin):
|
||||
def test_logging_aggregator_missing_settings(put, post, admin, key, value, error):
|
||||
_, mock_settings = _mock_logging_defaults()
|
||||
mock_settings['LOG_AGGREGATOR_ENABLED'] = True
|
||||
mock_settings[key] = value
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'logging'})
|
||||
patch(url, user=admin, data={'LOG_AGGREGATOR_PASSWORD': 'password123'}, expect=200)
|
||||
time.sleep(1) # log settings are cached slightly
|
||||
response = put(url, data=mock_settings, user=admin, expect=400)
|
||||
assert error in str(response.data)
|
||||
|
||||
with mock.patch.object(AWXProxyHandler, 'perform_test') as perform_test:
|
||||
url = reverse('api:setting_logging_test')
|
||||
user_data = {
|
||||
'LOG_AGGREGATOR_TYPE': 'logstash',
|
||||
'LOG_AGGREGATOR_HOST': 'localhost',
|
||||
'LOG_AGGREGATOR_PORT': 8080,
|
||||
'LOG_AGGREGATOR_USERNAME': 'logger',
|
||||
'LOG_AGGREGATOR_PASSWORD': '$encrypted$'
|
||||
}
|
||||
post(url, user_data, user=admin, expect=200)
|
||||
args, kwargs = perform_test.call_args_list[0]
|
||||
create_settings = kwargs['custom_settings']
|
||||
assert getattr(create_settings, 'LOG_AGGREGATOR_PASSWORD') == 'password123'
|
||||
|
||||
@pytest.mark.parametrize('type, host, port, username, password', [
|
||||
['logstash', 'localhost', 8080, 'logger', 'mcstash'],
|
||||
['loggly', 'http://logs-01.loggly.com/inputs/1fd38090-hash-h4a$h-8d80-t0k3n71/tag/http/', None, None, None],
|
||||
['splunk', 'https://yoursplunk:8088/services/collector/event', None, None, None],
|
||||
['other', '97.221.40.41', 9000, 'logger', 'mcstash'],
|
||||
['sumologic', 'https://endpoint5.collection.us2.sumologic.com/receiver/v1/http/Zagnw_f9XGr_zZgd-_EPM0hb8_rUU7_RU8Q==',
|
||||
None, None, None]
|
||||
])
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregator_valid_settings(put, post, admin, type, host, port, username, password):
|
||||
_, mock_settings = _mock_logging_defaults()
|
||||
# type = 'splunk'
|
||||
# host = 'https://yoursplunk:8088/services/collector/event'
|
||||
mock_settings['LOG_AGGREGATOR_ENABLED'] = True
|
||||
mock_settings['LOG_AGGREGATOR_TYPE'] = type
|
||||
mock_settings['LOG_AGGREGATOR_HOST'] = host
|
||||
if port:
|
||||
mock_settings['LOG_AGGREGATOR_PORT'] = port
|
||||
if username:
|
||||
mock_settings['LOG_AGGREGATOR_USERNAME'] = username
|
||||
if password:
|
||||
mock_settings['LOG_AGGREGATOR_PASSWORD'] = password
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'logging'})
|
||||
response = put(url, data=mock_settings, user=admin, expect=200)
|
||||
assert type in response.data.get('LOG_AGGREGATOR_TYPE')
|
||||
assert host in response.data.get('LOG_AGGREGATOR_HOST')
|
||||
if port:
|
||||
assert port == response.data.get('LOG_AGGREGATOR_PORT')
|
||||
if username:
|
||||
assert username in response.data.get('LOG_AGGREGATOR_USERNAME')
|
||||
if password: # Note: password should be encrypted
|
||||
assert '$encrypted$' in response.data.get('LOG_AGGREGATOR_PASSWORD')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_invalid(mocker, get, post, admin):
|
||||
with mock.patch.object(AWXProxyHandler, 'perform_test') as perform_test:
|
||||
perform_test.side_effect = LoggingConnectivityException('404: Not Found')
|
||||
url = reverse('api:setting_logging_test')
|
||||
resp = post(url, {
|
||||
'LOG_AGGREGATOR_TYPE': 'logstash',
|
||||
'LOG_AGGREGATOR_HOST': 'localhost',
|
||||
'LOG_AGGREGATOR_PORT': 8080
|
||||
}, user=admin, expect=500)
|
||||
assert resp.data == {'error': '404: Not Found'}
|
||||
def test_logging_aggregator_connection_test_valid(put, post, admin):
|
||||
_, mock_settings = _mock_logging_defaults()
|
||||
type = 'other'
|
||||
host = 'https://localhost'
|
||||
mock_settings['LOG_AGGREGATOR_ENABLED'] = True
|
||||
mock_settings['LOG_AGGREGATOR_TYPE'] = type
|
||||
mock_settings['LOG_AGGREGATOR_HOST'] = host
|
||||
# POST to save these mock settings
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'logging'})
|
||||
put(url, data=mock_settings, user=admin, expect=200)
|
||||
# "Test" the logger
|
||||
url = reverse('api:setting_logging_test')
|
||||
post(url, {}, user=admin, expect=202)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -23,9 +23,9 @@ def _mk_project_update():
|
||||
|
||||
|
||||
def _mk_inventory_update():
|
||||
source = InventorySource()
|
||||
source = InventorySource(source='ec2')
|
||||
source.save()
|
||||
iu = InventoryUpdate(inventory_source=source)
|
||||
iu = InventoryUpdate(inventory_source=source, source='e2')
|
||||
return iu
|
||||
|
||||
|
||||
|
||||
@@ -123,7 +123,11 @@ def test_delete_project_update_in_active_state(project, delete, admin, status):
|
||||
@pytest.mark.parametrize("status", list(TEST_STATES))
|
||||
@pytest.mark.django_db
|
||||
def test_delete_inventory_update_in_active_state(inventory_source, delete, admin, status):
|
||||
i = InventoryUpdate.objects.create(inventory_source=inventory_source, status=status)
|
||||
i = InventoryUpdate.objects.create(
|
||||
inventory_source=inventory_source,
|
||||
status=status,
|
||||
source=inventory_source.source
|
||||
)
|
||||
url = reverse('api:inventory_update_detail', kwargs={'pk': i.pk})
|
||||
delete(url, None, admin, expect=403)
|
||||
|
||||
|
||||
@@ -228,7 +228,7 @@ class TestINIImports:
|
||||
assert inventory.hosts.count() == 1 # baseline worked
|
||||
|
||||
inv_src2 = inventory.inventory_sources.create(
|
||||
name='bar', overwrite=True
|
||||
name='bar', overwrite=True, source='ec2'
|
||||
)
|
||||
os.environ['INVENTORY_SOURCE_ID'] = str(inv_src2.pk)
|
||||
os.environ['INVENTORY_UPDATE_ID'] = str(inv_src2.create_unified_job().pk)
|
||||
|
||||
@@ -568,7 +568,10 @@ def inventory_source_factory(inventory_factory):
|
||||
|
||||
@pytest.fixture
|
||||
def inventory_update(inventory_source):
|
||||
return InventoryUpdate.objects.create(inventory_source=inventory_source)
|
||||
return InventoryUpdate.objects.create(
|
||||
inventory_source=inventory_source,
|
||||
source=inventory_source.source
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -197,9 +197,10 @@ class TestRelatedJobs:
|
||||
assert job.id in [jerb.id for jerb in group._get_related_jobs()]
|
||||
|
||||
def test_related_group_update(self, group):
|
||||
src = group.inventory_sources.create(name='foo')
|
||||
src = group.inventory_sources.create(name='foo', source='ec2')
|
||||
job = InventoryUpdate.objects.create(
|
||||
inventory_source=src
|
||||
inventory_source=src,
|
||||
source=src.source
|
||||
)
|
||||
assert job.id in [jerb.id for jerb in group._get_related_jobs()]
|
||||
|
||||
|
||||
@@ -109,6 +109,7 @@ class TestJobNotificationMixin(object):
|
||||
kwargs = {}
|
||||
if JobClass is InventoryUpdate:
|
||||
kwargs['inventory_source'] = inventory_source
|
||||
kwargs['source'] = inventory_source.source
|
||||
elif JobClass is ProjectUpdate:
|
||||
kwargs['project'] = project
|
||||
|
||||
|
||||
@@ -325,16 +325,19 @@ def test_dst_phantom_hour(job_template):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.timeout(3)
|
||||
def test_beginning_of_time(job_template):
|
||||
# ensure that really large generators don't have performance issues
|
||||
start = now()
|
||||
rrule = 'DTSTART:19700101T000000Z RRULE:FREQ=MINUTELY;INTERVAL=1'
|
||||
s = Schedule(
|
||||
name='Some Schedule',
|
||||
rrule=rrule,
|
||||
unified_job_template=job_template
|
||||
)
|
||||
with pytest.raises(ValueError):
|
||||
s.save()
|
||||
s.save()
|
||||
assert s.next_run > start
|
||||
assert (s.next_run - start).total_seconds() < 60
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -297,7 +297,10 @@ class TestInstanceGroupOrdering:
|
||||
assert ad_hoc.preferred_instance_groups == [ig_inv, ig_org]
|
||||
|
||||
def test_inventory_update_instance_groups(self, instance_group_factory, inventory_source, default_instance_group):
|
||||
iu = InventoryUpdate.objects.create(inventory_source=inventory_source)
|
||||
iu = InventoryUpdate.objects.create(
|
||||
inventory_source=inventory_source,
|
||||
source=inventory_source.source
|
||||
)
|
||||
assert iu.preferred_instance_groups == [default_instance_group]
|
||||
ig_org = instance_group_factory("OrgIstGrp", [default_instance_group.instances.first()])
|
||||
ig_inv = instance_group_factory("InvIstGrp", [default_instance_group.instances.first()])
|
||||
|
||||
@@ -186,7 +186,11 @@ def test_group(get, admin_user):
|
||||
def test_inventory_source(get, admin_user):
|
||||
test_org = Organization.objects.create(name='test_org')
|
||||
test_inv = Inventory.objects.create(name='test_inv', organization=test_org)
|
||||
test_source = InventorySource.objects.create(name='test_source', inventory=test_inv)
|
||||
test_source = InventorySource.objects.create(
|
||||
name='test_source',
|
||||
inventory=test_inv,
|
||||
source='ec2'
|
||||
)
|
||||
url = reverse('api:inventory_source_detail', kwargs={'pk': test_source.pk})
|
||||
response = get(url, user=admin_user, expect=200)
|
||||
assert response.data['related']['named_url'].endswith('/test_source++test_inv++test_org/')
|
||||
|
||||
@@ -90,7 +90,7 @@ def test_inherited_notification_templates(get, post, user, organization, project
|
||||
notification_templates.append(response.data['id'])
|
||||
i = Inventory.objects.create(name='test', organization=organization)
|
||||
i.save()
|
||||
isrc = InventorySource.objects.create(name='test', inventory=i)
|
||||
isrc = InventorySource.objects.create(name='test', inventory=i, source='ec2')
|
||||
isrc.save()
|
||||
jt = JobTemplate.objects.create(name='test', inventory=i, project=project, playbook='debug.yml')
|
||||
jt.save()
|
||||
|
||||
@@ -65,14 +65,29 @@ def test_job_template_access_read_level(jt_linked, rando):
|
||||
assert not access.can_unattach(jt_linked, cred, 'credentials', {})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_project_use_access(project, rando):
|
||||
project.use_role.members.add(rando)
|
||||
access = JobTemplateAccess(rando)
|
||||
assert access.can_add(None)
|
||||
assert access.can_add({'project': project.id, 'ask_inventory_on_launch': True})
|
||||
project2 = Project.objects.create(
|
||||
name='second-project', scm_type=project.scm_type, playbook_files=project.playbook_files,
|
||||
organization=project.organization,
|
||||
)
|
||||
project2.use_role.members.add(rando)
|
||||
jt = JobTemplate.objects.create(project=project, ask_inventory_on_launch=True)
|
||||
jt.admin_role.members.add(rando)
|
||||
assert access.can_change(jt, {'project': project2.pk})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_template_access_use_level(jt_linked, rando):
|
||||
access = JobTemplateAccess(rando)
|
||||
jt_linked.project.use_role.members.add(rando)
|
||||
jt_linked.inventory.use_role.members.add(rando)
|
||||
jt_linked.organization.job_template_admin_role.members.add(rando)
|
||||
jt_linked.admin_role.members.add(rando)
|
||||
proj_pk = jt_linked.project.pk
|
||||
org_pk = jt_linked.organization_id
|
||||
|
||||
assert access.can_change(jt_linked, {'job_type': 'check', 'project': proj_pk})
|
||||
assert access.can_change(jt_linked, {'job_type': 'check', 'inventory': None})
|
||||
@@ -80,8 +95,8 @@ def test_job_template_access_use_level(jt_linked, rando):
|
||||
for cred in jt_linked.credentials.all():
|
||||
assert access.can_unattach(jt_linked, cred, 'credentials', {})
|
||||
|
||||
assert access.can_add(dict(inventory=jt_linked.inventory.pk, project=proj_pk, organization=org_pk))
|
||||
assert access.can_add(dict(project=proj_pk, organization=org_pk))
|
||||
assert access.can_add(dict(inventory=jt_linked.inventory.pk, project=proj_pk))
|
||||
assert access.can_add(dict(project=proj_pk))
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -94,17 +109,16 @@ def test_job_template_access_admin(role_names, jt_linked, rando):
|
||||
assert not access.can_read(jt_linked)
|
||||
assert not access.can_delete(jt_linked)
|
||||
|
||||
# Appoint this user as admin of the organization
|
||||
jt_linked.organization.admin_role.members.add(rando)
|
||||
org_pk = jt_linked.organization.id
|
||||
# Appoint this user to the org role
|
||||
organization = jt_linked.organization
|
||||
for role_name in role_names:
|
||||
getattr(organization, role_name).members.add(rando)
|
||||
|
||||
# Assign organization permission in the same way the create view does
|
||||
organization = jt_linked.inventory.organization
|
||||
ssh_cred.admin_role.parents.add(organization.admin_role)
|
||||
|
||||
proj_pk = jt_linked.project.pk
|
||||
assert access.can_add(dict(inventory=jt_linked.inventory.pk, project=proj_pk, organization=org_pk))
|
||||
assert access.can_add(dict(credential=ssh_cred.pk, project=proj_pk, organization=org_pk))
|
||||
assert access.can_add(dict(inventory=jt_linked.inventory.pk, project=proj_pk))
|
||||
|
||||
for cred in jt_linked.credentials.all():
|
||||
assert access.can_unattach(jt_linked, cred, 'credentials', {})
|
||||
@@ -170,12 +184,10 @@ class TestOrphanJobTemplate:
|
||||
@pytest.mark.job_permissions
|
||||
def test_job_template_creator_access(project, organization, rando, post):
|
||||
project.use_role.members.add(rando)
|
||||
organization.job_template_admin_role.members.add(rando)
|
||||
response = post(url=reverse('api:job_template_list'), data=dict(
|
||||
name='newly-created-jt',
|
||||
ask_inventory_on_launch=True,
|
||||
project=project.pk,
|
||||
organization=organization.id,
|
||||
playbook='helloworld.yml'
|
||||
), user=rando, expect=201)
|
||||
|
||||
|
||||
@@ -24,7 +24,11 @@ def test_implied_organization_subquery_inventory():
|
||||
inventory = Inventory.objects.create(name='foo{}'.format(i))
|
||||
else:
|
||||
inventory = Inventory.objects.create(name='foo{}'.format(i), organization=org)
|
||||
inv_src = InventorySource.objects.create(name='foo{}'.format(i), inventory=inventory)
|
||||
inv_src = InventorySource.objects.create(
|
||||
name='foo{}'.format(i),
|
||||
inventory=inventory,
|
||||
source='ec2'
|
||||
)
|
||||
sources = UnifiedJobTemplate.objects.annotate(
|
||||
test_field=rbac.implicit_org_subquery(UnifiedJobTemplate, InventorySource)
|
||||
)
|
||||
|
||||
@@ -60,6 +60,8 @@ def test_org_user_role_attach(user, organization, inventory):
|
||||
'''
|
||||
admin = user('admin')
|
||||
nonmember = user('nonmember')
|
||||
other_org = Organization.objects.create(name="other_org")
|
||||
other_org.member_role.members.add(nonmember)
|
||||
inventory.admin_role.members.add(nonmember)
|
||||
|
||||
organization.admin_role.members.add(admin)
|
||||
@@ -186,13 +188,17 @@ def test_need_all_orgs_to_admin_user(user):
|
||||
|
||||
# Orphaned user can be added to member role, only in special cases
|
||||
@pytest.mark.django_db
|
||||
def test_orphaned_user_allowed(org_admin, rando, organization):
|
||||
def test_orphaned_user_allowed(org_admin, rando, organization, org_credential):
|
||||
'''
|
||||
We still allow adoption of orphaned* users by assigning them to
|
||||
organization member role, but only in the situation where the
|
||||
org admin already posesses indirect access to all of the user's roles
|
||||
*orphaned means user is not a member of any organization
|
||||
'''
|
||||
# give a descendent role to rando, to trigger the conditional
|
||||
# where all ancestor roles of rando should be in the set of
|
||||
# org_admin roles.
|
||||
org_credential.admin_role.members.add(rando)
|
||||
role_access = RoleAccess(org_admin)
|
||||
org_access = OrganizationAccess(org_admin)
|
||||
assert role_access.can_attach(organization.member_role, rando, 'members', None)
|
||||
|
||||
174
awx/main/tests/unit/api/test_logger.py
Normal file
174
awx/main/tests/unit/api/test_logger.py
Normal file
@@ -0,0 +1,174 @@
|
||||
import pytest
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.utils.external_logging import construct_rsyslog_conf_template
|
||||
from awx.main.tests.functional.api.test_settings import _mock_logging_defaults
|
||||
|
||||
'''
|
||||
# Example User Data
|
||||
data_logstash = {
|
||||
"LOG_AGGREGATOR_TYPE": "logstash",
|
||||
"LOG_AGGREGATOR_HOST": "localhost",
|
||||
"LOG_AGGREGATOR_PORT": 8080,
|
||||
"LOG_AGGREGATOR_PROTOCOL": "tcp",
|
||||
"LOG_AGGREGATOR_USERNAME": "logger",
|
||||
"LOG_AGGREGATOR_PASSWORD": "mcstash"
|
||||
}
|
||||
|
||||
data_netcat = {
|
||||
"LOG_AGGREGATOR_TYPE": "other",
|
||||
"LOG_AGGREGATOR_HOST": "localhost",
|
||||
"LOG_AGGREGATOR_PORT": 9000,
|
||||
"LOG_AGGREGATOR_PROTOCOL": "udp",
|
||||
}
|
||||
|
||||
data_loggly = {
|
||||
"LOG_AGGREGATOR_TYPE": "loggly",
|
||||
"LOG_AGGREGATOR_HOST": "http://logs-01.loggly.com/inputs/1fd38090-2af1-4e1e-8d80-492899da0f71/tag/http/",
|
||||
"LOG_AGGREGATOR_PORT": 8080,
|
||||
"LOG_AGGREGATOR_PROTOCOL": "https"
|
||||
}
|
||||
'''
|
||||
|
||||
|
||||
# Test reconfigure logging settings function
|
||||
# name this whatever you want
|
||||
@pytest.mark.parametrize(
|
||||
'enabled, type, host, port, protocol, expected_config', [
|
||||
(
|
||||
True,
|
||||
'loggly',
|
||||
'http://logs-01.loggly.com/inputs/1fd38090-2af1-4e1e-8d80-492899da0f71/tag/http/',
|
||||
None,
|
||||
'https',
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="logs-01.loggly.com" serverport="80" usehttps="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="inputs/1fd38090-2af1-4e1e-8d80-492899da0f71/tag/http/")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # localhost w/ custom UDP port
|
||||
'other',
|
||||
'localhost',
|
||||
9000,
|
||||
'udp',
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")',
|
||||
'action(type="omfwd" target="localhost" port="9000" protocol="udp" action.resumeRetryCount="-1" action.resumeInterval="5" template="awx")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # localhost w/ custom TCP port
|
||||
'other',
|
||||
'localhost',
|
||||
9000,
|
||||
'tcp',
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")',
|
||||
'action(type="omfwd" target="localhost" port="9000" protocol="tcp" action.resumeRetryCount="-1" action.resumeInterval="5" template="awx")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # https, default port 443
|
||||
'splunk',
|
||||
'https://yoursplunk/services/collector/event',
|
||||
None,
|
||||
None,
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="443" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # http, default port 80
|
||||
'splunk',
|
||||
'http://yoursplunk/services/collector/event',
|
||||
None,
|
||||
None,
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="80" usehttps="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # https, custom port in URL string
|
||||
'splunk',
|
||||
'https://yoursplunk:8088/services/collector/event',
|
||||
None,
|
||||
None,
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # https, custom port explicitly specified
|
||||
'splunk',
|
||||
'https://yoursplunk/services/collector/event',
|
||||
8088,
|
||||
None,
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # no scheme specified in URL, default to https, respect custom port
|
||||
'splunk',
|
||||
'yoursplunk.org/services/collector/event',
|
||||
8088,
|
||||
'https',
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # respect custom http-only port
|
||||
'splunk',
|
||||
'http://yoursplunk.org/services/collector/event',
|
||||
8088,
|
||||
None,
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
]
|
||||
)
|
||||
def test_rsyslog_conf_template(enabled, type, host, port, protocol, expected_config):
|
||||
|
||||
mock_settings, _ = _mock_logging_defaults()
|
||||
|
||||
# Set test settings
|
||||
logging_defaults = getattr(settings, 'LOGGING')
|
||||
setattr(mock_settings, 'LOGGING', logging_defaults)
|
||||
setattr(mock_settings, 'LOGGING["handlers"]["external_logger"]["address"]', '/var/run/awx-rsyslog/rsyslog.sock')
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_ENABLED', enabled)
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_TYPE', type)
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_HOST', host)
|
||||
if port:
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_PORT', port)
|
||||
if protocol:
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_PROTOCOL', protocol)
|
||||
|
||||
# create rsyslog conf template
|
||||
tmpl = construct_rsyslog_conf_template(mock_settings)
|
||||
|
||||
# check validity of created template
|
||||
assert expected_config in tmpl
|
||||
|
||||
|
||||
def test_splunk_auth():
|
||||
mock_settings, _ = _mock_logging_defaults()
|
||||
# Set test settings
|
||||
logging_defaults = getattr(settings, 'LOGGING')
|
||||
setattr(mock_settings, 'LOGGING', logging_defaults)
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_ENABLED', True)
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_TYPE', 'splunk')
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_HOST', 'example.org')
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_PASSWORD', 'SECRET-TOKEN')
|
||||
|
||||
tmpl = construct_rsyslog_conf_template(mock_settings)
|
||||
assert 'httpheaderkey="Authorization" httpheadervalue="Splunk SECRET-TOKEN"' in tmpl
|
||||
@@ -18,7 +18,7 @@ def test_send_messages():
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'https://example.com/api/annotations',
|
||||
headers={'Content-Type': 'application/json', 'Authorization': 'Bearer testapikey'},
|
||||
json={'tags': [], 'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': None, 'time': 60000, 'dashboardId': None},
|
||||
json={'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': None, 'time': 60000, 'dashboardId': None},
|
||||
verify=True)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -36,7 +36,7 @@ def test_send_messages_with_no_verify_ssl():
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'https://example.com/api/annotations',
|
||||
headers={'Content-Type': 'application/json', 'Authorization': 'Bearer testapikey'},
|
||||
json={'tags': [], 'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': None,'time': 60000, 'dashboardId': None},
|
||||
json={'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': None,'time': 60000, 'dashboardId': None},
|
||||
verify=False)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -54,7 +54,7 @@ def test_send_messages_with_dashboardid():
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'https://example.com/api/annotations',
|
||||
headers={'Content-Type': 'application/json', 'Authorization': 'Bearer testapikey'},
|
||||
json={'tags': [], 'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': None, 'time': 60000, 'dashboardId': 42},
|
||||
json={'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': None, 'time': 60000, 'dashboardId': 42},
|
||||
verify=True)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -72,7 +72,7 @@ def test_send_messages_with_panelid():
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'https://example.com/api/annotations',
|
||||
headers={'Content-Type': 'application/json', 'Authorization': 'Bearer testapikey'},
|
||||
json={'tags': [], 'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': 42, 'time': 60000, 'dashboardId': None},
|
||||
json={'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': 42, 'time': 60000, 'dashboardId': None},
|
||||
verify=True)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -90,7 +90,7 @@ def test_send_messages_with_bothids():
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'https://example.com/api/annotations',
|
||||
headers={'Content-Type': 'application/json', 'Authorization': 'Bearer testapikey'},
|
||||
json={'tags': [], 'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': 42, 'time': 60000, 'dashboardId': 42},
|
||||
json={'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': 42, 'time': 60000, 'dashboardId': 42},
|
||||
verify=True)
|
||||
assert sent_messages == 1
|
||||
|
||||
|
||||
@@ -1,393 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import base64
|
||||
import logging
|
||||
import socket
|
||||
import datetime
|
||||
from dateutil.tz import tzutc
|
||||
from io import StringIO
|
||||
from uuid import uuid4
|
||||
|
||||
from unittest import mock
|
||||
|
||||
from django.conf import LazySettings
|
||||
from django.utils.encoding import smart_str
|
||||
import pytest
|
||||
import requests
|
||||
from requests_futures.sessions import FuturesSession
|
||||
|
||||
from awx.main.utils.handlers import (BaseHandler, BaseHTTPSHandler as HTTPSHandler,
|
||||
TCPHandler, UDPHandler, _encode_payload_for_socket,
|
||||
PARAM_NAMES, LoggingConnectivityException,
|
||||
AWXProxyHandler)
|
||||
from awx.main.utils.formatters import LogstashFormatter
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def https_adapter():
|
||||
class FakeHTTPSAdapter(requests.adapters.HTTPAdapter):
|
||||
requests = []
|
||||
status = 200
|
||||
reason = None
|
||||
|
||||
def send(self, request, **kwargs):
|
||||
self.requests.append(request)
|
||||
resp = requests.models.Response()
|
||||
resp.status_code = self.status
|
||||
resp.reason = self.reason
|
||||
resp.request = request
|
||||
return resp
|
||||
|
||||
return FakeHTTPSAdapter()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def connection_error_adapter():
|
||||
class ConnectionErrorAdapter(requests.adapters.HTTPAdapter):
|
||||
|
||||
def send(self, request, **kwargs):
|
||||
err = requests.packages.urllib3.exceptions.SSLError()
|
||||
raise requests.exceptions.ConnectionError(err, request=request)
|
||||
|
||||
return ConnectionErrorAdapter()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_socket(tmpdir_factory, request):
|
||||
sok = socket.socket
|
||||
sok.send = mock.MagicMock()
|
||||
sok.connect = mock.MagicMock()
|
||||
sok.setblocking = mock.MagicMock()
|
||||
sok.close = mock.MagicMock()
|
||||
return sok
|
||||
|
||||
|
||||
def test_https_logging_handler_requests_async_implementation():
|
||||
handler = HTTPSHandler()
|
||||
assert isinstance(handler.session, FuturesSession)
|
||||
|
||||
|
||||
def test_https_logging_handler_has_default_http_timeout():
|
||||
handler = TCPHandler()
|
||||
assert handler.tcp_timeout == 5
|
||||
|
||||
|
||||
@pytest.mark.parametrize('param', ['host', 'port', 'indv_facts'])
|
||||
def test_base_logging_handler_defaults(param):
|
||||
handler = BaseHandler()
|
||||
assert hasattr(handler, param) and getattr(handler, param) is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize('param', ['host', 'port', 'indv_facts'])
|
||||
def test_base_logging_handler_kwargs(param):
|
||||
handler = BaseHandler(**{param: 'EXAMPLE'})
|
||||
assert hasattr(handler, param) and getattr(handler, param) == 'EXAMPLE'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('params', [
|
||||
{
|
||||
'LOG_AGGREGATOR_HOST': 'https://server.invalid',
|
||||
'LOG_AGGREGATOR_PORT': 22222,
|
||||
'LOG_AGGREGATOR_TYPE': 'loggly',
|
||||
'LOG_AGGREGATOR_USERNAME': 'foo',
|
||||
'LOG_AGGREGATOR_PASSWORD': 'bar',
|
||||
'LOG_AGGREGATOR_INDIVIDUAL_FACTS': True,
|
||||
'LOG_AGGREGATOR_TCP_TIMEOUT': 96,
|
||||
'LOG_AGGREGATOR_VERIFY_CERT': False,
|
||||
'LOG_AGGREGATOR_PROTOCOL': 'https'
|
||||
},
|
||||
{
|
||||
'LOG_AGGREGATOR_HOST': 'https://server.invalid',
|
||||
'LOG_AGGREGATOR_PORT': 22222,
|
||||
'LOG_AGGREGATOR_PROTOCOL': 'udp'
|
||||
}
|
||||
])
|
||||
def test_real_handler_from_django_settings(params):
|
||||
settings = LazySettings()
|
||||
settings.configure(**params)
|
||||
handler = AWXProxyHandler().get_handler(custom_settings=settings)
|
||||
# need the _reverse_ dictionary from PARAM_NAMES
|
||||
attr_lookup = {}
|
||||
for attr_name, setting_name in PARAM_NAMES.items():
|
||||
attr_lookup[setting_name] = attr_name
|
||||
for setting_name, val in params.items():
|
||||
attr_name = attr_lookup[setting_name]
|
||||
if attr_name == 'protocol':
|
||||
continue
|
||||
assert hasattr(handler, attr_name)
|
||||
|
||||
|
||||
def test_invalid_kwarg_to_real_handler():
|
||||
settings = LazySettings()
|
||||
settings.configure(**{
|
||||
'LOG_AGGREGATOR_HOST': 'https://server.invalid',
|
||||
'LOG_AGGREGATOR_PORT': 22222,
|
||||
'LOG_AGGREGATOR_PROTOCOL': 'udp',
|
||||
'LOG_AGGREGATOR_VERIFY_CERT': False # setting not valid for UDP handler
|
||||
})
|
||||
handler = AWXProxyHandler().get_handler(custom_settings=settings)
|
||||
assert not hasattr(handler, 'verify_cert')
|
||||
|
||||
|
||||
def test_protocol_not_specified():
|
||||
settings = LazySettings()
|
||||
settings.configure(**{
|
||||
'LOG_AGGREGATOR_HOST': 'https://server.invalid',
|
||||
'LOG_AGGREGATOR_PORT': 22222,
|
||||
'LOG_AGGREGATOR_PROTOCOL': None # awx/settings/defaults.py
|
||||
})
|
||||
handler = AWXProxyHandler().get_handler(custom_settings=settings)
|
||||
assert isinstance(handler, logging.NullHandler)
|
||||
|
||||
|
||||
def test_base_logging_handler_emit_system_tracking(dummy_log_record):
|
||||
handler = BaseHandler(host='127.0.0.1', indv_facts=True)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
dummy_log_record.name = 'awx.analytics.system_tracking'
|
||||
dummy_log_record.msg = None
|
||||
dummy_log_record.inventory_id = 11
|
||||
dummy_log_record.host_name = 'my_lucky_host'
|
||||
dummy_log_record.job_id = 777
|
||||
dummy_log_record.ansible_facts = {
|
||||
"ansible_kernel": "4.4.66-boot2docker",
|
||||
"ansible_machine": "x86_64",
|
||||
"ansible_swapfree_mb": 4663,
|
||||
}
|
||||
dummy_log_record.ansible_facts_modified = datetime.datetime.now(tzutc()).isoformat()
|
||||
sent_payloads = handler.emit(dummy_log_record)
|
||||
|
||||
assert len(sent_payloads) == 1
|
||||
assert sent_payloads[0]['ansible_facts'] == dummy_log_record.ansible_facts
|
||||
assert sent_payloads[0]['ansible_facts_modified'] == dummy_log_record.ansible_facts_modified
|
||||
assert sent_payloads[0]['level'] == 'INFO'
|
||||
assert sent_payloads[0]['logger_name'] == 'awx.analytics.system_tracking'
|
||||
assert sent_payloads[0]['job_id'] == dummy_log_record.job_id
|
||||
assert sent_payloads[0]['inventory_id'] == dummy_log_record.inventory_id
|
||||
assert sent_payloads[0]['host_name'] == dummy_log_record.host_name
|
||||
|
||||
|
||||
@pytest.mark.parametrize('host, port, normalized, hostname_only', [
|
||||
('http://localhost', None, 'http://localhost', False),
|
||||
('http://localhost', 8080, 'http://localhost:8080', False),
|
||||
('https://localhost', 443, 'https://localhost:443', False),
|
||||
('ftp://localhost', 443, 'ftp://localhost:443', False),
|
||||
('https://localhost:550', 443, 'https://localhost:550', False),
|
||||
('https://localhost:yoho/foobar', 443, 'https://localhost:443/foobar', False),
|
||||
('https://localhost:yoho/foobar', None, 'https://localhost:yoho/foobar', False),
|
||||
('http://splunk.server:8088/services/collector/event', 80,
|
||||
'http://splunk.server:8088/services/collector/event', False),
|
||||
('http://splunk.server/services/collector/event', 8088,
|
||||
'http://splunk.server:8088/services/collector/event', False),
|
||||
('splunk.server:8088/services/collector/event', 80,
|
||||
'http://splunk.server:8088/services/collector/event', False),
|
||||
('splunk.server/services/collector/event', 8088,
|
||||
'http://splunk.server:8088/services/collector/event', False),
|
||||
('localhost', None, 'http://localhost', False),
|
||||
('localhost', 8080, 'http://localhost:8080', False),
|
||||
('localhost', 4399, 'localhost', True),
|
||||
('tcp://localhost:4399/foo/bar', 4399, 'localhost', True),
|
||||
])
|
||||
def test_base_logging_handler_host_format(host, port, normalized, hostname_only):
|
||||
handler = BaseHandler(host=host, port=port)
|
||||
assert handler._get_host(scheme='http', hostname_only=hostname_only) == normalized
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'status, reason, exc',
|
||||
[(200, '200 OK', None), (404, 'Not Found', LoggingConnectivityException)]
|
||||
)
|
||||
@pytest.mark.parametrize('protocol', ['http', 'https', None])
|
||||
def test_https_logging_handler_connectivity_test(https_adapter, status, reason, exc, protocol):
|
||||
host = 'example.org'
|
||||
if protocol:
|
||||
host = '://'.join([protocol, host])
|
||||
https_adapter.status = status
|
||||
https_adapter.reason = reason
|
||||
settings = LazySettings()
|
||||
settings.configure(**{
|
||||
'LOG_AGGREGATOR_HOST': host,
|
||||
'LOG_AGGREGATOR_PORT': 8080,
|
||||
'LOG_AGGREGATOR_TYPE': 'logstash',
|
||||
'LOG_AGGREGATOR_USERNAME': 'user',
|
||||
'LOG_AGGREGATOR_PASSWORD': 'password',
|
||||
'LOG_AGGREGATOR_LOGGERS': ['awx', 'activity_stream', 'job_events', 'system_tracking'],
|
||||
'LOG_AGGREGATOR_PROTOCOL': 'https',
|
||||
'CLUSTER_HOST_ID': '',
|
||||
'LOG_AGGREGATOR_TOWER_UUID': str(uuid4()),
|
||||
'LOG_AGGREGATOR_LEVEL': 'DEBUG',
|
||||
})
|
||||
|
||||
class FakeHTTPSHandler(HTTPSHandler):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(FakeHTTPSHandler, self).__init__(*args, **kwargs)
|
||||
self.session.mount('{}://'.format(protocol or 'https'), https_adapter)
|
||||
|
||||
def emit(self, record):
|
||||
return super(FakeHTTPSHandler, self).emit(record)
|
||||
|
||||
with mock.patch.object(AWXProxyHandler, 'get_handler_class') as mock_get_class:
|
||||
mock_get_class.return_value = FakeHTTPSHandler
|
||||
if exc:
|
||||
with pytest.raises(exc) as e:
|
||||
AWXProxyHandler().perform_test(settings)
|
||||
assert str(e).endswith('%s: %s' % (status, reason))
|
||||
else:
|
||||
assert AWXProxyHandler().perform_test(settings) is None
|
||||
|
||||
|
||||
def test_https_logging_handler_logstash_auth_info():
|
||||
handler = HTTPSHandler(message_type='logstash', username='bob', password='ansible')
|
||||
handler._add_auth_information()
|
||||
assert isinstance(handler.session.auth, requests.auth.HTTPBasicAuth)
|
||||
assert handler.session.auth.username == 'bob'
|
||||
assert handler.session.auth.password == 'ansible'
|
||||
|
||||
|
||||
def test_https_logging_handler_splunk_auth_info():
|
||||
handler = HTTPSHandler(message_type='splunk', password='ansible')
|
||||
handler._add_auth_information()
|
||||
assert handler.session.headers['Authorization'] == 'Splunk ansible'
|
||||
assert handler.session.headers['Content-Type'] == 'application/json'
|
||||
|
||||
|
||||
def test_https_logging_handler_connection_error(connection_error_adapter,
|
||||
dummy_log_record):
|
||||
handler = HTTPSHandler(host='127.0.0.1', message_type='logstash')
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('http://', connection_error_adapter)
|
||||
|
||||
buff = StringIO()
|
||||
logging.getLogger('awx.main.utils.handlers').addHandler(
|
||||
logging.StreamHandler(buff)
|
||||
)
|
||||
|
||||
async_futures = handler.emit(dummy_log_record)
|
||||
with pytest.raises(requests.exceptions.ConnectionError):
|
||||
[future.result() for future in async_futures]
|
||||
assert 'failed to emit log to external aggregator\nTraceback' in buff.getvalue()
|
||||
|
||||
# we should only log failures *periodically*, so causing *another*
|
||||
# immediate failure shouldn't report a second ConnectionError
|
||||
buff.truncate(0)
|
||||
async_futures = handler.emit(dummy_log_record)
|
||||
with pytest.raises(requests.exceptions.ConnectionError):
|
||||
[future.result() for future in async_futures]
|
||||
assert buff.getvalue() == ''
|
||||
|
||||
|
||||
@pytest.mark.parametrize('message_type', ['logstash', 'splunk'])
|
||||
def test_https_logging_handler_emit_without_cred(https_adapter, dummy_log_record,
|
||||
message_type):
|
||||
handler = HTTPSHandler(host='127.0.0.1', message_type=message_type)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('https://', https_adapter)
|
||||
async_futures = handler.emit(dummy_log_record)
|
||||
[future.result() for future in async_futures]
|
||||
|
||||
assert len(https_adapter.requests) == 1
|
||||
request = https_adapter.requests[0]
|
||||
assert request.url == 'https://127.0.0.1/'
|
||||
assert request.method == 'POST'
|
||||
|
||||
if message_type == 'logstash':
|
||||
# A username + password weren't used, so this header should be missing
|
||||
assert 'Authorization' not in request.headers
|
||||
|
||||
if message_type == 'splunk':
|
||||
assert request.headers['Authorization'] == 'Splunk None'
|
||||
|
||||
|
||||
def test_https_logging_handler_emit_logstash_with_creds(https_adapter,
|
||||
dummy_log_record):
|
||||
handler = HTTPSHandler(host='127.0.0.1',
|
||||
username='user', password='pass',
|
||||
message_type='logstash')
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('https://', https_adapter)
|
||||
async_futures = handler.emit(dummy_log_record)
|
||||
[future.result() for future in async_futures]
|
||||
|
||||
assert len(https_adapter.requests) == 1
|
||||
request = https_adapter.requests[0]
|
||||
assert request.headers['Authorization'] == 'Basic %s' % smart_str(base64.b64encode(b"user:pass"))
|
||||
|
||||
|
||||
def test_https_logging_handler_emit_splunk_with_creds(https_adapter,
|
||||
dummy_log_record):
|
||||
handler = HTTPSHandler(host='127.0.0.1',
|
||||
password='pass', message_type='splunk')
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('https://', https_adapter)
|
||||
async_futures = handler.emit(dummy_log_record)
|
||||
[future.result() for future in async_futures]
|
||||
|
||||
assert len(https_adapter.requests) == 1
|
||||
request = https_adapter.requests[0]
|
||||
assert request.headers['Authorization'] == 'Splunk pass'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('payload, encoded_payload', [
|
||||
('foobar', 'foobar'),
|
||||
({'foo': 'bar'}, '{"foo": "bar"}'),
|
||||
({u'测试键': u'测试值'}, '{"测试键": "测试值"}'),
|
||||
])
|
||||
def test_encode_payload_for_socket(payload, encoded_payload):
|
||||
assert _encode_payload_for_socket(payload).decode('utf-8') == encoded_payload
|
||||
|
||||
|
||||
def test_udp_handler_create_socket_at_init():
|
||||
handler = UDPHandler(host='127.0.0.1', port=4399)
|
||||
assert hasattr(handler, 'socket')
|
||||
assert isinstance(handler.socket, socket.socket)
|
||||
assert handler.socket.family == socket.AF_INET
|
||||
assert handler.socket.type == socket.SOCK_DGRAM
|
||||
|
||||
|
||||
def test_udp_handler_send(dummy_log_record):
|
||||
handler = UDPHandler(host='127.0.0.1', port=4399)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('awx.main.utils.handlers._encode_payload_for_socket', return_value="des") as encode_mock,\
|
||||
mock.patch.object(handler, 'socket') as socket_mock:
|
||||
handler.emit(dummy_log_record)
|
||||
encode_mock.assert_called_once_with(handler.format(dummy_log_record))
|
||||
socket_mock.sendto.assert_called_once_with("des", ('127.0.0.1', 4399))
|
||||
|
||||
|
||||
def test_tcp_handler_send(fake_socket, dummy_log_record):
|
||||
handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('socket.socket', return_value=fake_socket) as sok_init_mock,\
|
||||
mock.patch('select.select', return_value=([], [fake_socket], [])):
|
||||
handler.emit(dummy_log_record)
|
||||
sok_init_mock.assert_called_once_with(socket.AF_INET, socket.SOCK_STREAM)
|
||||
fake_socket.connect.assert_called_once_with(('127.0.0.1', 4399))
|
||||
fake_socket.setblocking.assert_called_once_with(0)
|
||||
fake_socket.send.assert_called_once_with(handler.format(dummy_log_record))
|
||||
fake_socket.close.assert_called_once()
|
||||
|
||||
|
||||
def test_tcp_handler_return_if_socket_unavailable(fake_socket, dummy_log_record):
|
||||
handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('socket.socket', return_value=fake_socket) as sok_init_mock,\
|
||||
mock.patch('select.select', return_value=([], [], [])):
|
||||
handler.emit(dummy_log_record)
|
||||
sok_init_mock.assert_called_once_with(socket.AF_INET, socket.SOCK_STREAM)
|
||||
fake_socket.connect.assert_called_once_with(('127.0.0.1', 4399))
|
||||
fake_socket.setblocking.assert_called_once_with(0)
|
||||
assert not fake_socket.send.called
|
||||
fake_socket.close.assert_called_once()
|
||||
|
||||
|
||||
def test_tcp_handler_log_exception(fake_socket, dummy_log_record):
|
||||
handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('socket.socket', return_value=fake_socket) as sok_init_mock,\
|
||||
mock.patch('select.select', return_value=([], [], [])),\
|
||||
mock.patch('awx.main.utils.handlers.logger') as logger_mock:
|
||||
fake_socket.connect.side_effect = Exception("foo")
|
||||
handler.emit(dummy_log_record)
|
||||
sok_init_mock.assert_called_once_with(socket.AF_INET, socket.SOCK_STREAM)
|
||||
logger_mock.exception.assert_called_once()
|
||||
fake_socket.close.assert_called_once()
|
||||
assert not fake_socket.send.called
|
||||
@@ -8,7 +8,7 @@ def test_produce_supervisor_command(mocker):
|
||||
mock_process.communicate = communicate_mock
|
||||
Popen_mock = mocker.MagicMock(return_value=mock_process)
|
||||
with mocker.patch.object(reload.subprocess, 'Popen', Popen_mock):
|
||||
reload._supervisor_service_command("restart")
|
||||
reload.supervisor_service_command("restart")
|
||||
reload.subprocess.Popen.assert_called_once_with(
|
||||
['supervisorctl', 'restart', 'tower-processes:*',],
|
||||
stderr=-1, stdin=-1, stdout=-1)
|
||||
|
||||
@@ -64,6 +64,7 @@ def could_be_playbook(project_path, dir_path, filename):
|
||||
matched = True
|
||||
break
|
||||
except IOError:
|
||||
logger.exception(f'failed to open {playbook_path}')
|
||||
return None
|
||||
if not matched:
|
||||
return None
|
||||
|
||||
@@ -56,6 +56,7 @@ __all__ = [
|
||||
'has_model_field_prefetched', 'set_environ', 'IllegalArgumentError',
|
||||
'get_custom_venv_choices', 'get_external_account', 'task_manager_bulk_reschedule',
|
||||
'schedule_task_manager', 'classproperty', 'create_temporary_fifo', 'truncate_stdout',
|
||||
'StubLicense'
|
||||
]
|
||||
|
||||
|
||||
|
||||
113
awx/main/utils/external_logging.py
Normal file
113
awx/main/utils/external_logging.py
Normal file
@@ -0,0 +1,113 @@
|
||||
import os
|
||||
|
||||
import urllib.parse as urlparse
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.utils.reload import supervisor_service_command
|
||||
|
||||
|
||||
def construct_rsyslog_conf_template(settings=settings):
|
||||
tmpl = ''
|
||||
parts = []
|
||||
enabled = getattr(settings, 'LOG_AGGREGATOR_ENABLED')
|
||||
host = getattr(settings, 'LOG_AGGREGATOR_HOST', '')
|
||||
port = getattr(settings, 'LOG_AGGREGATOR_PORT', '')
|
||||
protocol = getattr(settings, 'LOG_AGGREGATOR_PROTOCOL', '')
|
||||
timeout = getattr(settings, 'LOG_AGGREGATOR_TCP_TIMEOUT', 5)
|
||||
max_disk_space = getattr(settings, 'LOG_AGGREGATOR_MAX_DISK_USAGE_GB', 1)
|
||||
spool_directory = getattr(settings, 'LOG_AGGREGATOR_MAX_DISK_USAGE_PATH', '/var/lib/awx').rstrip('/')
|
||||
|
||||
if not os.access(spool_directory, os.W_OK):
|
||||
spool_directory = '/var/lib/awx'
|
||||
|
||||
max_bytes = settings.MAX_EVENT_RES_DATA
|
||||
parts.extend([
|
||||
'$WorkDirectory /var/lib/awx/rsyslog',
|
||||
f'$MaxMessageSize {max_bytes}',
|
||||
'$IncludeConfig /var/lib/awx/rsyslog/conf.d/*.conf',
|
||||
f'main_queue(queue.spoolDirectory="{spool_directory}" queue.maxdiskspace="{max_disk_space}g" queue.type="Disk" queue.filename="awx-external-logger-backlog")', # noqa
|
||||
'module(load="imuxsock" SysSock.Use="off")',
|
||||
'input(type="imuxsock" Socket="' + settings.LOGGING['handlers']['external_logger']['address'] + '" unlink="on")',
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")',
|
||||
])
|
||||
|
||||
def escape_quotes(x):
|
||||
return x.replace('"', '\\"')
|
||||
|
||||
if not enabled:
|
||||
parts.append('action(type="omfile" file="/dev/null")') # rsyslog needs *at least* one valid action to start
|
||||
tmpl = '\n'.join(parts)
|
||||
return tmpl
|
||||
|
||||
if protocol.startswith('http'):
|
||||
scheme = 'https'
|
||||
# urlparse requires '//' to be provided if scheme is not specified
|
||||
original_parsed = urlparse.urlsplit(host)
|
||||
if (not original_parsed.scheme and not host.startswith('//')) or original_parsed.hostname is None:
|
||||
host = '%s://%s' % (scheme, host) if scheme else '//%s' % host
|
||||
parsed = urlparse.urlsplit(host)
|
||||
|
||||
host = escape_quotes(parsed.hostname)
|
||||
try:
|
||||
if parsed.port:
|
||||
port = parsed.port
|
||||
except ValueError:
|
||||
port = settings.LOG_AGGREGATOR_PORT
|
||||
|
||||
# https://github.com/rsyslog/rsyslog-doc/blob/master/source/configuration/modules/omhttp.rst
|
||||
ssl = 'on' if parsed.scheme == 'https' else 'off'
|
||||
skip_verify = 'off' if settings.LOG_AGGREGATOR_VERIFY_CERT else 'on'
|
||||
if not port:
|
||||
port = 443 if parsed.scheme == 'https' else 80
|
||||
|
||||
params = [
|
||||
'type="omhttp"',
|
||||
f'server="{host}"',
|
||||
f'serverport="{port}"',
|
||||
f'usehttps="{ssl}"',
|
||||
f'skipverifyhost="{skip_verify}"',
|
||||
'action.resumeRetryCount="-1"',
|
||||
'template="awx"',
|
||||
'errorfile="/var/log/tower/rsyslog.err"',
|
||||
f'action.resumeInterval="{timeout}"'
|
||||
]
|
||||
if parsed.path:
|
||||
path = urlparse.quote(parsed.path[1:])
|
||||
if parsed.query:
|
||||
path = f'{path}?{urlparse.quote(parsed.query)}'
|
||||
params.append(f'restpath="{path}"')
|
||||
username = escape_quotes(getattr(settings, 'LOG_AGGREGATOR_USERNAME', ''))
|
||||
password = escape_quotes(getattr(settings, 'LOG_AGGREGATOR_PASSWORD', ''))
|
||||
if getattr(settings, 'LOG_AGGREGATOR_TYPE', None) == 'splunk':
|
||||
# splunk has a weird authorization header <shrug>
|
||||
if password:
|
||||
# from omhttp docs:
|
||||
# https://www.rsyslog.com/doc/v8-stable/configuration/modules/omhttp.html
|
||||
# > Currently only a single additional header/key pair is
|
||||
# > configurable, further development is needed to support
|
||||
# > arbitrary header key/value lists.
|
||||
params.append('httpheaderkey="Authorization"')
|
||||
params.append(f'httpheadervalue="Splunk {password}"')
|
||||
elif username:
|
||||
params.append(f'uid="{username}"')
|
||||
if password:
|
||||
# you can only have a basic auth password if there's a username
|
||||
params.append(f'pwd="{password}"')
|
||||
params = ' '.join(params)
|
||||
parts.extend(['module(load="omhttp")', f'action({params})'])
|
||||
elif protocol and host and port:
|
||||
parts.append(
|
||||
f'action(type="omfwd" target="{host}" port="{port}" protocol="{protocol}" action.resumeRetryCount="-1" action.resumeInterval="{timeout}" template="awx")' # noqa
|
||||
)
|
||||
else:
|
||||
parts.append('action(type="omfile" file="/dev/null")') # rsyslog needs *at least* one valid action to start
|
||||
tmpl = '\n'.join(parts)
|
||||
return tmpl
|
||||
|
||||
|
||||
def reconfigure_rsyslog():
|
||||
tmpl = construct_rsyslog_conf_template()
|
||||
with open('/var/lib/awx/rsyslog/rsyslog.conf', 'w') as f:
|
||||
f.write(tmpl + '\n')
|
||||
supervisor_service_command(command='restart', service='awx-rsyslogd')
|
||||
@@ -3,13 +3,13 @@
|
||||
|
||||
from copy import copy
|
||||
import json
|
||||
import time
|
||||
import logging
|
||||
import traceback
|
||||
import socket
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
from dateutil.tz import tzutc
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
@@ -91,18 +91,13 @@ class LogstashFormatterBase(logging.Formatter):
|
||||
'processName': record.processName,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def format_timestamp(cls, time):
|
||||
tstamp = datetime.utcfromtimestamp(time)
|
||||
return tstamp.strftime("%Y-%m-%dT%H:%M:%S") + ".%03d" % (tstamp.microsecond / 1000) + "Z"
|
||||
|
||||
@classmethod
|
||||
def format_exception(cls, exc_info):
|
||||
return ''.join(traceback.format_exception(*exc_info)) if exc_info else ''
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, message):
|
||||
return bytes(json.dumps(message), 'utf-8')
|
||||
return json.dumps(message, cls=DjangoJSONEncoder) + '\n'
|
||||
|
||||
|
||||
class LogstashFormatter(LogstashFormatterBase):
|
||||
@@ -157,9 +152,6 @@ class LogstashFormatter(LogstashFormatterBase):
|
||||
|
||||
try:
|
||||
data_for_log[key] = getattr(job_event, fd)
|
||||
if fd in ['created', 'modified'] and data_for_log[key] is not None:
|
||||
time_float = time.mktime(data_for_log[key].timetuple())
|
||||
data_for_log[key] = self.format_timestamp(time_float)
|
||||
except Exception as e:
|
||||
data_for_log[key] = 'Exception `{}` producing field'.format(e)
|
||||
|
||||
@@ -231,10 +223,12 @@ class LogstashFormatter(LogstashFormatterBase):
|
||||
return fields
|
||||
|
||||
def format(self, record):
|
||||
stamp = datetime.utcfromtimestamp(record.created)
|
||||
stamp = stamp.replace(tzinfo=tzutc())
|
||||
message = {
|
||||
# Field not included, but exist in related logs
|
||||
# 'path': record.pathname
|
||||
'@timestamp': self.format_timestamp(record.created),
|
||||
'@timestamp': stamp,
|
||||
'message': record.getMessage(),
|
||||
'host': self.host,
|
||||
|
||||
@@ -250,4 +244,7 @@ class LogstashFormatter(LogstashFormatterBase):
|
||||
if record.exc_info:
|
||||
message.update(self.get_debug_fields(record))
|
||||
|
||||
if settings.LOG_AGGREGATOR_TYPE == 'splunk':
|
||||
# splunk messages must have a top level "event" key
|
||||
message = {'event': message}
|
||||
return self.serialize(message)
|
||||
|
||||
@@ -3,404 +3,29 @@
|
||||
|
||||
# Python
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
import time
|
||||
import threading
|
||||
import socket
|
||||
import select
|
||||
from urllib import parse as urlparse
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from requests.exceptions import RequestException
|
||||
import os.path
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
|
||||
# requests futures, a dependency used by these handlers
|
||||
from requests_futures.sessions import FuturesSession
|
||||
import cachetools
|
||||
|
||||
# AWX
|
||||
from awx.main.utils.formatters import LogstashFormatter
|
||||
class RSysLogHandler(logging.handlers.SysLogHandler):
|
||||
|
||||
append_nul = False
|
||||
|
||||
__all__ = ['BaseHTTPSHandler', 'TCPHandler', 'UDPHandler',
|
||||
'AWXProxyHandler']
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.utils.handlers')
|
||||
|
||||
|
||||
# Translation of parameter names to names in Django settings
|
||||
# logging settings category, only those related to handler / log emission
|
||||
PARAM_NAMES = {
|
||||
'host': 'LOG_AGGREGATOR_HOST',
|
||||
'port': 'LOG_AGGREGATOR_PORT',
|
||||
'message_type': 'LOG_AGGREGATOR_TYPE',
|
||||
'username': 'LOG_AGGREGATOR_USERNAME',
|
||||
'password': 'LOG_AGGREGATOR_PASSWORD',
|
||||
'indv_facts': 'LOG_AGGREGATOR_INDIVIDUAL_FACTS',
|
||||
'tcp_timeout': 'LOG_AGGREGATOR_TCP_TIMEOUT',
|
||||
'verify_cert': 'LOG_AGGREGATOR_VERIFY_CERT',
|
||||
'protocol': 'LOG_AGGREGATOR_PROTOCOL'
|
||||
}
|
||||
|
||||
|
||||
def unused_callback(sess, resp):
|
||||
pass
|
||||
|
||||
|
||||
class LoggingConnectivityException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class VerboseThreadPoolExecutor(ThreadPoolExecutor):
|
||||
|
||||
last_log_emit = 0
|
||||
|
||||
def submit(self, func, *args, **kwargs):
|
||||
def _wrapped(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception:
|
||||
# If an exception occurs in a concurrent thread worker (like
|
||||
# a ConnectionError or a read timeout), periodically log
|
||||
# that failure.
|
||||
#
|
||||
# This approach isn't really thread-safe, so we could
|
||||
# potentially log once per thread every 10 seconds, but it
|
||||
# beats logging *every* failed HTTP request in a scenario where
|
||||
# you've typo'd your log aggregator hostname.
|
||||
now = time.time()
|
||||
if now - self.last_log_emit > 10:
|
||||
logger.exception('failed to emit log to external aggregator')
|
||||
self.last_log_emit = now
|
||||
raise
|
||||
return super(VerboseThreadPoolExecutor, self).submit(_wrapped, *args,
|
||||
**kwargs)
|
||||
|
||||
|
||||
class SocketResult:
|
||||
'''
|
||||
A class to be the return type of methods that send data over a socket
|
||||
allows object to be used in the same way as a request futures object
|
||||
'''
|
||||
def __init__(self, ok, reason=None):
|
||||
self.ok = ok
|
||||
self.reason = reason
|
||||
|
||||
def result(self):
|
||||
return self
|
||||
|
||||
|
||||
class BaseHandler(logging.Handler):
|
||||
def __init__(self, host=None, port=None, indv_facts=None, **kwargs):
|
||||
super(BaseHandler, self).__init__()
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.indv_facts = indv_facts
|
||||
|
||||
def _send(self, payload):
|
||||
"""Actually send message to log aggregator.
|
||||
"""
|
||||
return payload
|
||||
|
||||
def _format_and_send_record(self, record):
|
||||
if self.indv_facts:
|
||||
return [self._send(json.loads(self.format(record)))]
|
||||
return [self._send(self.format(record))]
|
||||
|
||||
def emit(self, record):
|
||||
"""
|
||||
Emit a log record. Returns a list of zero or more
|
||||
implementation-specific objects for tests.
|
||||
"""
|
||||
def emit(self, msg):
|
||||
if not settings.LOG_AGGREGATOR_ENABLED:
|
||||
return
|
||||
if not os.path.exists(settings.LOGGING['handlers']['external_logger']['address']):
|
||||
return
|
||||
try:
|
||||
return self._format_and_send_record(record)
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except Exception:
|
||||
self.handleError(record)
|
||||
|
||||
def _get_host(self, scheme='', hostname_only=False):
|
||||
"""Return the host name of log aggregator.
|
||||
"""
|
||||
host = self.host or ''
|
||||
# urlparse requires '//' to be provided if scheme is not specified
|
||||
original_parsed = urlparse.urlsplit(host)
|
||||
if (not original_parsed.scheme and not host.startswith('//')) or original_parsed.hostname is None:
|
||||
host = '%s://%s' % (scheme, host) if scheme else '//%s' % host
|
||||
parsed = urlparse.urlsplit(host)
|
||||
|
||||
if hostname_only:
|
||||
return parsed.hostname
|
||||
|
||||
try:
|
||||
port = parsed.port or self.port
|
||||
except ValueError:
|
||||
port = self.port
|
||||
netloc = parsed.netloc if port is None else '%s:%s' % (parsed.hostname, port)
|
||||
|
||||
url_components = list(parsed)
|
||||
url_components[1] = netloc
|
||||
ret = urlparse.urlunsplit(url_components)
|
||||
return ret.lstrip('/')
|
||||
|
||||
|
||||
class BaseHTTPSHandler(BaseHandler):
|
||||
'''
|
||||
Originally derived from python-logstash library
|
||||
Non-blocking request accomplished by FuturesSession, similar
|
||||
to the loggly-python-handler library
|
||||
'''
|
||||
def _add_auth_information(self):
|
||||
if self.message_type == 'logstash':
|
||||
if not self.username:
|
||||
# Logstash authentication not enabled
|
||||
return
|
||||
logstash_auth = requests.auth.HTTPBasicAuth(self.username, self.password)
|
||||
self.session.auth = logstash_auth
|
||||
elif self.message_type == 'splunk':
|
||||
auth_header = "Splunk %s" % self.password
|
||||
headers = {
|
||||
"Authorization": auth_header,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
self.session.headers.update(headers)
|
||||
|
||||
def __init__(self, fqdn=False, message_type=None, username=None, password=None,
|
||||
tcp_timeout=5, verify_cert=True, **kwargs):
|
||||
self.fqdn = fqdn
|
||||
self.message_type = message_type
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.tcp_timeout = tcp_timeout
|
||||
self.verify_cert = verify_cert
|
||||
super(BaseHTTPSHandler, self).__init__(**kwargs)
|
||||
self.session = FuturesSession(executor=VerboseThreadPoolExecutor(
|
||||
max_workers=2 # this is the default used by requests_futures
|
||||
))
|
||||
self._add_auth_information()
|
||||
|
||||
def _get_post_kwargs(self, payload_input):
|
||||
if self.message_type == 'splunk':
|
||||
# Splunk needs data nested under key "event"
|
||||
if not isinstance(payload_input, dict):
|
||||
payload_input = json.loads(payload_input)
|
||||
payload_input = {'event': payload_input}
|
||||
if isinstance(payload_input, dict):
|
||||
payload_str = json.dumps(payload_input)
|
||||
else:
|
||||
payload_str = payload_input
|
||||
kwargs = dict(data=payload_str, background_callback=unused_callback,
|
||||
timeout=self.tcp_timeout)
|
||||
if self.verify_cert is False:
|
||||
kwargs['verify'] = False
|
||||
return kwargs
|
||||
|
||||
|
||||
def _send(self, payload):
|
||||
"""See:
|
||||
https://docs.python.org/3/library/concurrent.futures.html#future-objects
|
||||
http://pythonhosted.org/futures/
|
||||
"""
|
||||
return self.session.post(self._get_host(scheme='https'),
|
||||
**self._get_post_kwargs(payload))
|
||||
|
||||
|
||||
def _encode_payload_for_socket(payload):
|
||||
encoded_payload = payload
|
||||
if isinstance(encoded_payload, dict):
|
||||
encoded_payload = json.dumps(encoded_payload, ensure_ascii=False)
|
||||
if isinstance(encoded_payload, str):
|
||||
encoded_payload = encoded_payload.encode('utf-8')
|
||||
return encoded_payload
|
||||
|
||||
|
||||
class TCPHandler(BaseHandler):
|
||||
def __init__(self, tcp_timeout=5, **kwargs):
|
||||
self.tcp_timeout = tcp_timeout
|
||||
super(TCPHandler, self).__init__(**kwargs)
|
||||
|
||||
def _send(self, payload):
|
||||
payload = _encode_payload_for_socket(payload)
|
||||
sok = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
sok.connect((self._get_host(hostname_only=True), self.port or 0))
|
||||
sok.setblocking(0)
|
||||
_, ready_to_send, _ = select.select([], [sok], [], float(self.tcp_timeout))
|
||||
if len(ready_to_send) == 0:
|
||||
ret = SocketResult(False, "Socket currently busy, failed to send message")
|
||||
logger.warning(ret.reason)
|
||||
else:
|
||||
sok.send(payload)
|
||||
ret = SocketResult(True) # success!
|
||||
except Exception as e:
|
||||
ret = SocketResult(False, "Error sending message from %s: %s" %
|
||||
(TCPHandler.__name__,
|
||||
' '.join(str(arg) for arg in e.args)))
|
||||
logger.exception(ret.reason)
|
||||
finally:
|
||||
sok.close()
|
||||
return ret
|
||||
|
||||
|
||||
class UDPHandler(BaseHandler):
|
||||
message = "Cannot determine if UDP messages are received."
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(UDPHandler, self).__init__(**kwargs)
|
||||
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
|
||||
def _send(self, payload):
|
||||
payload = _encode_payload_for_socket(payload)
|
||||
self.socket.sendto(payload, (self._get_host(hostname_only=True), self.port or 0))
|
||||
return SocketResult(True, reason=self.message)
|
||||
|
||||
|
||||
class AWXNullHandler(logging.NullHandler):
|
||||
'''
|
||||
Only additional this does is accept arbitrary __init__ params because
|
||||
the proxy handler does not (yet) work with arbitrary handler classes
|
||||
'''
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(AWXNullHandler, self).__init__()
|
||||
|
||||
|
||||
HANDLER_MAPPING = {
|
||||
'https': BaseHTTPSHandler,
|
||||
'tcp': TCPHandler,
|
||||
'udp': UDPHandler,
|
||||
}
|
||||
|
||||
|
||||
TTLCache = cachetools.TTLCache
|
||||
|
||||
if 'py.test' in os.environ.get('_', ''):
|
||||
# don't cache settings in unit tests
|
||||
class TTLCache(TTLCache):
|
||||
|
||||
def __getitem__(self, item):
|
||||
raise KeyError()
|
||||
|
||||
|
||||
class AWXProxyHandler(logging.Handler):
|
||||
'''
|
||||
Handler specific to the AWX external logging feature
|
||||
|
||||
Will dynamically create a handler specific to the configured
|
||||
protocol, and will create a new one automatically on setting change
|
||||
|
||||
Managing parameters:
|
||||
All parameters will get their value from settings as a default
|
||||
if the parameter was either provided on init, or set manually,
|
||||
this value will take precedence.
|
||||
Parameters match same parameters in the actualized handler classes.
|
||||
'''
|
||||
|
||||
thread_local = threading.local()
|
||||
_auditor = None
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
# TODO: process 'level' kwarg
|
||||
super(AWXProxyHandler, self).__init__(**kwargs)
|
||||
self._handler = None
|
||||
self._old_kwargs = {}
|
||||
|
||||
@property
|
||||
def auditor(self):
|
||||
if not self._auditor:
|
||||
self._auditor = logging.handlers.RotatingFileHandler(
|
||||
filename='/var/log/tower/external.log',
|
||||
maxBytes=1024 * 1024 * 50, # 50 MB
|
||||
backupCount=5,
|
||||
)
|
||||
|
||||
class WritableLogstashFormatter(LogstashFormatter):
|
||||
@classmethod
|
||||
def serialize(cls, message):
|
||||
return json.dumps(message)
|
||||
|
||||
self._auditor.setFormatter(WritableLogstashFormatter())
|
||||
return self._auditor
|
||||
|
||||
def get_handler_class(self, protocol):
|
||||
return HANDLER_MAPPING.get(protocol, AWXNullHandler)
|
||||
|
||||
@cachetools.cached(cache=TTLCache(maxsize=1, ttl=3), key=lambda *args, **kw: 'get_handler')
|
||||
def get_handler(self, custom_settings=None, force_create=False):
|
||||
new_kwargs = {}
|
||||
use_settings = custom_settings or settings
|
||||
for field_name, setting_name in PARAM_NAMES.items():
|
||||
val = getattr(use_settings, setting_name, None)
|
||||
if val is None:
|
||||
continue
|
||||
new_kwargs[field_name] = val
|
||||
if new_kwargs == self._old_kwargs and self._handler and (not force_create):
|
||||
# avoids re-creating session objects, and other such things
|
||||
return self._handler
|
||||
self._old_kwargs = new_kwargs.copy()
|
||||
# TODO: remove any kwargs no applicable to that particular handler
|
||||
protocol = new_kwargs.pop('protocol', None)
|
||||
HandlerClass = self.get_handler_class(protocol)
|
||||
# cleanup old handler and make new one
|
||||
if self._handler:
|
||||
self._handler.close()
|
||||
logger.debug('Creating external log handler due to startup or settings change.')
|
||||
self._handler = HandlerClass(**new_kwargs)
|
||||
if self.formatter:
|
||||
# self.format(record) is called inside of emit method
|
||||
# so not safe to assume this can be handled within self
|
||||
self._handler.setFormatter(self.formatter)
|
||||
return self._handler
|
||||
|
||||
@cachetools.cached(cache=TTLCache(maxsize=1, ttl=3), key=lambda *args, **kw: 'should_audit')
|
||||
def should_audit(self):
|
||||
return settings.LOG_AGGREGATOR_AUDIT
|
||||
|
||||
def emit(self, record):
|
||||
if AWXProxyHandler.thread_local.enabled:
|
||||
actual_handler = self.get_handler()
|
||||
if self.should_audit():
|
||||
self.auditor.setLevel(settings.LOG_AGGREGATOR_LEVEL)
|
||||
self.auditor.emit(record)
|
||||
return actual_handler.emit(record)
|
||||
|
||||
def perform_test(self, custom_settings):
|
||||
"""
|
||||
Tests logging connectivity for given settings module.
|
||||
@raises LoggingConnectivityException
|
||||
"""
|
||||
handler = self.get_handler(custom_settings=custom_settings, force_create=True)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
logger = logging.getLogger(__file__)
|
||||
fn, lno, func, _ = logger.findCaller()
|
||||
record = logger.makeRecord('awx', 10, fn, lno,
|
||||
'AWX Connection Test', tuple(),
|
||||
None, func)
|
||||
futures = handler.emit(record)
|
||||
for future in futures:
|
||||
try:
|
||||
resp = future.result()
|
||||
if not resp.ok:
|
||||
if isinstance(resp, SocketResult):
|
||||
raise LoggingConnectivityException(
|
||||
'Socket error: {}'.format(resp.reason or '')
|
||||
)
|
||||
else:
|
||||
raise LoggingConnectivityException(
|
||||
': '.join([str(resp.status_code), resp.reason or ''])
|
||||
)
|
||||
except RequestException as e:
|
||||
raise LoggingConnectivityException(str(e))
|
||||
|
||||
@classmethod
|
||||
def disable(cls):
|
||||
cls.thread_local.enabled = False
|
||||
|
||||
|
||||
AWXProxyHandler.thread_local.enabled = True
|
||||
return super(RSysLogHandler, self).emit(msg)
|
||||
except ConnectionRefusedError:
|
||||
# rsyslogd has gone to lunch; this generally means that it's just
|
||||
# been restarted (due to a configuration change)
|
||||
# unfortunately, we can't log that because...rsyslogd is down (and
|
||||
# would just us back ddown this code path)
|
||||
pass
|
||||
|
||||
|
||||
ColorHandler = logging.StreamHandler
|
||||
|
||||
@@ -4,25 +4,24 @@
|
||||
# Python
|
||||
import subprocess
|
||||
import logging
|
||||
import os
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
|
||||
logger = logging.getLogger('awx.main.utils.reload')
|
||||
|
||||
|
||||
def _supervisor_service_command(command, communicate=True):
|
||||
def supervisor_service_command(command, service='*', communicate=True):
|
||||
'''
|
||||
example use pattern of supervisorctl:
|
||||
# supervisorctl restart tower-processes:receiver tower-processes:factcacher
|
||||
'''
|
||||
group_name = 'tower-processes'
|
||||
if settings.DEBUG:
|
||||
group_name = 'awx-processes'
|
||||
args = ['supervisorctl']
|
||||
if settings.DEBUG:
|
||||
args.extend(['-c', '/supervisor.conf'])
|
||||
args.extend([command, '{}:*'.format(group_name)])
|
||||
|
||||
supervisor_config_path = os.getenv('SUPERVISOR_WEB_CONFIG_PATH', None)
|
||||
if supervisor_config_path:
|
||||
args.extend(['-c', supervisor_config_path])
|
||||
|
||||
args.extend([command, ':'.join(['tower-processes', service])])
|
||||
logger.debug('Issuing command to {} services, args={}'.format(command, args))
|
||||
supervisor_process = subprocess.Popen(args, stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
@@ -30,15 +29,16 @@ def _supervisor_service_command(command, communicate=True):
|
||||
restart_stdout, restart_err = supervisor_process.communicate()
|
||||
restart_code = supervisor_process.returncode
|
||||
if restart_code or restart_err:
|
||||
logger.error('supervisorctl {} errored with exit code `{}`, stdout:\n{}stderr:\n{}'.format(
|
||||
command, restart_code, restart_stdout.strip(), restart_err.strip()))
|
||||
logger.error('supervisorctl {} {} errored with exit code `{}`, stdout:\n{}stderr:\n{}'.format(
|
||||
command, service, restart_code, restart_stdout.strip(), restart_err.strip()))
|
||||
else:
|
||||
logger.info('supervisorctl {} finished, stdout:\n{}'.format(
|
||||
command, restart_stdout.strip()))
|
||||
logger.debug(
|
||||
'supervisorctl {} {} succeeded'.format(command, service)
|
||||
)
|
||||
else:
|
||||
logger.info('Submitted supervisorctl {} command, not waiting for result'.format(command))
|
||||
|
||||
|
||||
def stop_local_services(communicate=True):
|
||||
logger.warn('Stopping services on this node in response to user action')
|
||||
_supervisor_service_command(command='stop', communicate=communicate)
|
||||
supervisor_service_command(command='stop', communicate=communicate)
|
||||
|
||||
@@ -70,7 +70,7 @@ class WebsocketTask():
|
||||
|
||||
async def connect(self, attempt):
|
||||
from awx.main.consumers import WebsocketSecretAuthHelper # noqa
|
||||
logger.debug(f"{self.name} connect attempt {attempt} to {self.remote_host}")
|
||||
logger.debug(f"Connection from {self.name} to {self.remote_host} attempt number {attempt}.")
|
||||
|
||||
'''
|
||||
Can not put get_channel_layer() in the init code because it is in the init
|
||||
@@ -83,7 +83,7 @@ class WebsocketTask():
|
||||
if attempt > 0:
|
||||
await asyncio.sleep(settings.BROADCAST_WEBSOCKET_RECONNECT_RETRY_RATE_SECONDS)
|
||||
except asyncio.CancelledError:
|
||||
logger.warn(f"{self.name} connection to {self.remote_host} cancelled")
|
||||
logger.warn(f"Connection from {self.name} to {self.remote_host} cancelled")
|
||||
raise
|
||||
|
||||
uri = f"{self.protocol}://{self.remote_host}:{self.remote_port}/websocket/{self.endpoint}/"
|
||||
@@ -93,29 +93,29 @@ class WebsocketTask():
|
||||
try:
|
||||
async with aiohttp.ClientSession(headers={'secret': secret_val},
|
||||
timeout=timeout) as session:
|
||||
async with session.ws_connect(uri, ssl=self.verify_ssl) as websocket:
|
||||
async with session.ws_connect(uri, ssl=self.verify_ssl, heartbeat=20) as websocket:
|
||||
logger.info(f"Connection from {self.name} to {self.remote_host} established.")
|
||||
self.stats.record_connection_established()
|
||||
attempt = 0
|
||||
await self.run_loop(websocket)
|
||||
except asyncio.CancelledError:
|
||||
# TODO: Check if connected and disconnect
|
||||
# Possibly use run_until_complete() if disconnect is async
|
||||
logger.warn(f"{self.name} connection to {self.remote_host} cancelled")
|
||||
logger.warn(f"Connection from {self.name} to {self.remote_host} cancelled.")
|
||||
self.stats.record_connection_lost()
|
||||
raise
|
||||
except client_exceptions.ClientConnectorError as e:
|
||||
logger.warn(f"Failed to connect to {self.remote_host}: '{e}'. Reconnecting ...")
|
||||
self.stats.record_connection_lost()
|
||||
self.start(attempt=attempt + 1)
|
||||
logger.warn(f"Connection from {self.name} to {self.remote_host} failed: '{e}'.")
|
||||
except asyncio.TimeoutError:
|
||||
logger.warn(f"Timeout while trying to connect to {self.remote_host}. Reconnecting ...")
|
||||
self.stats.record_connection_lost()
|
||||
self.start(attempt=attempt + 1)
|
||||
logger.warn(f"Connection from {self.name} to {self.remote_host} timed out.")
|
||||
except Exception as e:
|
||||
# Early on, this is our canary. I'm not sure what exceptions we can really encounter.
|
||||
logger.warn(f"Websocket broadcast client exception {type(e)} {e}")
|
||||
self.stats.record_connection_lost()
|
||||
self.start(attempt=attempt + 1)
|
||||
logger.warn(f"Connection from {self.name} to {self.remote_host} failed for unknown reason: '{e}'.")
|
||||
else:
|
||||
logger.warn(f"Connection from {self.name} to {self.remote_host} list.")
|
||||
|
||||
self.stats.record_connection_lost()
|
||||
self.start(attempt=attempt + 1)
|
||||
|
||||
def start(self, attempt=0):
|
||||
self.async_task = self.event_loop.create_task(self.connect(attempt=attempt))
|
||||
@@ -163,9 +163,9 @@ class BroadcastWebsocketManager(object):
|
||||
new_remote_hosts = set(future_remote_hosts) - set(current_remote_hosts)
|
||||
|
||||
if deleted_remote_hosts:
|
||||
logger.warn(f"{self.local_hostname} going to remove {deleted_remote_hosts} from the websocket broadcast list")
|
||||
logger.warn(f"Removing {deleted_remote_hosts} from websocket broadcast list")
|
||||
if new_remote_hosts:
|
||||
logger.warn(f"{self.local_hostname} going to add {new_remote_hosts} to the websocket broadcast list")
|
||||
logger.warn(f"Adding {new_remote_hosts} to websocket broadcast list")
|
||||
|
||||
for h in deleted_remote_hosts:
|
||||
self.broadcast_tasks[h].cancel()
|
||||
|
||||
@@ -38,6 +38,7 @@
|
||||
recursive: true
|
||||
set_remote_user: false
|
||||
rsync_opts:
|
||||
- "--blocking-io"
|
||||
- "--rsh=$RSH"
|
||||
environment:
|
||||
RSH: "oc rsh --config={{ ansible_kubectl_config }}"
|
||||
@@ -51,6 +52,7 @@
|
||||
mode: pull
|
||||
set_remote_user: false
|
||||
rsync_opts:
|
||||
- "--blocking-io"
|
||||
- "--rsh=$RSH"
|
||||
environment:
|
||||
RSH: "oc rsh --config={{ ansible_kubectl_config }}"
|
||||
|
||||
@@ -144,6 +144,7 @@
|
||||
changed_when: "'was installed successfully' in galaxy_result.stdout"
|
||||
environment:
|
||||
ANSIBLE_FORCE_COLOR: false
|
||||
GIT_SSH_COMMAND: "ssh -o StrictHostKeyChecking=no"
|
||||
|
||||
when: roles_enabled|bool
|
||||
tags:
|
||||
@@ -165,6 +166,7 @@
|
||||
environment:
|
||||
ANSIBLE_FORCE_COLOR: false
|
||||
ANSIBLE_COLLECTIONS_PATHS: "{{ collections_destination }}"
|
||||
GIT_SSH_COMMAND: "ssh -o StrictHostKeyChecking=no"
|
||||
|
||||
when:
|
||||
- "ansible_version.full is version_compare('2.8', '>=')"
|
||||
|
||||
@@ -25,6 +25,7 @@
|
||||
dest: "{{ dest }}"
|
||||
set_remote_user: false
|
||||
rsync_opts:
|
||||
- "--blocking-io"
|
||||
- "--rsh=$RSH"
|
||||
environment:
|
||||
RSH: "oc rsh --config={{ ansible_kubectl_config }}"
|
||||
|
||||
@@ -405,17 +405,6 @@ AWX_ISOLATED_PERIODIC_CHECK = 600
|
||||
# Verbosity level for isolated node management tasks
|
||||
AWX_ISOLATED_VERBOSITY = 0
|
||||
|
||||
# Memcached django cache configuration
|
||||
# CACHES = {
|
||||
# 'default': {
|
||||
# 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
|
||||
# 'LOCATION': '127.0.0.1:11211',
|
||||
# 'TIMEOUT': 864000,
|
||||
# 'KEY_PREFIX': 'tower_dev',
|
||||
# }
|
||||
# }
|
||||
|
||||
|
||||
DEVSERVER_DEFAULT_ADDR = '0.0.0.0'
|
||||
DEVSERVER_DEFAULT_PORT = '8013'
|
||||
|
||||
@@ -458,7 +447,7 @@ CELERYBEAT_SCHEDULE = {
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
|
||||
'LOCATION': 'memcached:11211',
|
||||
'LOCATION': 'unix:/var/run/memcached/memcached.sock'
|
||||
},
|
||||
}
|
||||
|
||||
@@ -947,6 +936,8 @@ LOG_AGGREGATOR_ENABLED = False
|
||||
LOG_AGGREGATOR_TCP_TIMEOUT = 5
|
||||
LOG_AGGREGATOR_VERIFY_CERT = True
|
||||
LOG_AGGREGATOR_LEVEL = 'INFO'
|
||||
LOG_AGGREGATOR_MAX_DISK_USAGE_GB = 1
|
||||
LOG_AGGREGATOR_MAX_DISK_USAGE_PATH = '/var/lib/awx'
|
||||
|
||||
# The number of retry attempts for websocket session establishment
|
||||
# If you're encountering issues establishing websockets in clustered Tower,
|
||||
@@ -1022,8 +1013,9 @@ LOGGING = {
|
||||
'formatter': 'simple',
|
||||
},
|
||||
'external_logger': {
|
||||
'class': 'awx.main.utils.handlers.AWXProxyHandler',
|
||||
'class': 'awx.main.utils.handlers.RSysLogHandler',
|
||||
'formatter': 'json',
|
||||
'address': '/var/run/awx-rsyslog/rsyslog.sock',
|
||||
'filters': ['external_log_enabled', 'dynamic_level_filter'],
|
||||
},
|
||||
'tower_warnings': {
|
||||
@@ -1053,6 +1045,15 @@ LOGGING = {
|
||||
'backupCount': 5,
|
||||
'formatter':'dispatcher',
|
||||
},
|
||||
'wsbroadcast': {
|
||||
# don't define a level here, it's set by settings.LOG_AGGREGATOR_LEVEL
|
||||
'class': 'logging.handlers.RotatingFileHandler',
|
||||
'filters': ['require_debug_false', 'dynamic_level_filter'],
|
||||
'filename': os.path.join(LOG_ROOT, 'wsbroadcast.log'),
|
||||
'maxBytes': 1024 * 1024 * 5, # 5 MB
|
||||
'backupCount': 5,
|
||||
'formatter':'simple',
|
||||
},
|
||||
'celery.beat': {
|
||||
'class':'logging.StreamHandler',
|
||||
'level': 'ERROR'
|
||||
@@ -1107,9 +1108,9 @@ LOGGING = {
|
||||
'handlers': ['console', 'file', 'tower_warnings'],
|
||||
'level': 'WARNING',
|
||||
},
|
||||
'celery': { # for celerybeat connection warnings
|
||||
'daphne': {
|
||||
'handlers': ['console', 'file', 'tower_warnings'],
|
||||
'level': 'WARNING',
|
||||
'level': 'INFO',
|
||||
},
|
||||
'rest_framework.request': {
|
||||
'handlers': ['console', 'file', 'tower_warnings'],
|
||||
@@ -1140,6 +1141,13 @@ LOGGING = {
|
||||
'awx.main.dispatch': {
|
||||
'handlers': ['dispatcher'],
|
||||
},
|
||||
'awx.main.consumers': {
|
||||
'handlers': ['console', 'file', 'tower_warnings'],
|
||||
'level': 'INFO',
|
||||
},
|
||||
'awx.main.wsbroadcast': {
|
||||
'handlers': ['wsbroadcast'],
|
||||
},
|
||||
'awx.isolated.manager.playbooks': {
|
||||
'handlers': ['management_playbooks'],
|
||||
'propagate': False
|
||||
@@ -1229,7 +1237,6 @@ MIDDLEWARE = [
|
||||
'django.middleware.csrf.CsrfViewMiddleware',
|
||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'django.contrib.messages.middleware.MessageMiddleware',
|
||||
'awx.main.middleware.ActivityStreamMiddleware',
|
||||
'awx.sso.middleware.SocialAuthMiddleware',
|
||||
'crum.CurrentRequestUserMiddleware',
|
||||
'awx.main.middleware.URLModificationMiddleware',
|
||||
|
||||
@@ -40,7 +40,6 @@ NOTEBOOK_ARGUMENTS = [
|
||||
]
|
||||
|
||||
# print SQL queries in shell_plus
|
||||
SHELL_PLUS = 'ipython'
|
||||
SHELL_PLUS_PRINT_SQL = False
|
||||
|
||||
# show colored logs in the dev environment
|
||||
|
||||
@@ -55,6 +55,7 @@ AWX_ISOLATED_USERNAME = 'awx'
|
||||
LOGGING['handlers']['tower_warnings']['filename'] = '/var/log/tower/tower.log' # noqa
|
||||
LOGGING['handlers']['callback_receiver']['filename'] = '/var/log/tower/callback_receiver.log' # noqa
|
||||
LOGGING['handlers']['dispatcher']['filename'] = '/var/log/tower/dispatcher.log' # noqa
|
||||
LOGGING['handlers']['wsbroadcast']['filename'] = '/var/log/tower/wsbroadcast.log' # noqa
|
||||
LOGGING['handlers']['task_system']['filename'] = '/var/log/tower/task_system.log' # noqa
|
||||
LOGGING['handlers']['management_playbooks']['filename'] = '/var/log/tower/management_playbooks.log' # noqa
|
||||
LOGGING['handlers']['system_tracking_migrations']['filename'] = '/var/log/tower/tower_system_tracking_migrations.log' # noqa
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
<div class="at-Row-container">
|
||||
<div class="at-Row-container">
|
||||
<at-row-item
|
||||
header-value="{{ approval.summary_fields.source_workflow_job.name }}"
|
||||
header-value="{{ approval.summary_fields.source_workflow_job.id }} - {{ approval.summary_fields.source_workflow_job.name }}"
|
||||
header-state="workflowResults({id: {{approval.summary_fields.source_workflow_job.id}}})">
|
||||
</at-row-item>
|
||||
</div>
|
||||
|
||||
@@ -92,6 +92,7 @@ export default [
|
||||
var populateFromApi = function() {
|
||||
SettingsService.getCurrentValues()
|
||||
.then(function(data) {
|
||||
$scope.logAggregatorEnabled = data.LOG_AGGREGATOR_ENABLED;
|
||||
// these two values need to be unnested from the
|
||||
// OAUTH2_PROVIDER key
|
||||
data.ACCESS_TOKEN_EXPIRE_SECONDS = data
|
||||
@@ -538,8 +539,11 @@ export default [
|
||||
var payload = {};
|
||||
payload[key] = $scope[key];
|
||||
SettingsService.patchConfiguration(payload)
|
||||
.then(function() {
|
||||
.then(function(data) {
|
||||
//TODO consider updating form values with returned data here
|
||||
if (key === 'LOG_AGGREGATOR_ENABLED') {
|
||||
$scope.logAggregatorEnabled = data.LOG_AGGREGATOR_ENABLED;
|
||||
}
|
||||
})
|
||||
.catch(function(data) {
|
||||
//Change back on unsuccessful update
|
||||
|
||||
@@ -17,7 +17,7 @@ export default [
|
||||
'ProcessErrors',
|
||||
'ngToast',
|
||||
'$filter',
|
||||
function(
|
||||
function (
|
||||
$rootScope, $scope, $stateParams,
|
||||
systemActivityStreamForm,
|
||||
systemLoggingForm,
|
||||
@@ -41,8 +41,8 @@ export default [
|
||||
formTracker.setCurrentSystem(activeSystemForm);
|
||||
}
|
||||
|
||||
var activeForm = function(tab) {
|
||||
if(!_.get($scope.$parent, [formTracker.currentFormName(), '$dirty'])) {
|
||||
var activeForm = function (tab) {
|
||||
if (!_.get($scope.$parent, [formTracker.currentFormName(), '$dirty'])) {
|
||||
systemVm.activeSystemForm = tab;
|
||||
formTracker.setCurrentSystem(systemVm.activeSystemForm);
|
||||
} else {
|
||||
@@ -52,7 +52,7 @@ export default [
|
||||
label: i18n._('Discard changes'),
|
||||
"class": "btn Form-cancelButton",
|
||||
"id": "formmodal-cancel-button",
|
||||
onClick: function() {
|
||||
onClick: function () {
|
||||
$scope.$parent.vm.populateFromApi();
|
||||
$scope.$parent[formTracker.currentFormName()].$setPristine();
|
||||
systemVm.activeSystemForm = tab;
|
||||
@@ -61,15 +61,15 @@ export default [
|
||||
}
|
||||
}, {
|
||||
label: i18n._('Save changes'),
|
||||
onClick: function() {
|
||||
onClick: function () {
|
||||
$scope.$parent.vm.formSave()
|
||||
.then(function() {
|
||||
$scope.$parent[formTracker.currentFormName()].$setPristine();
|
||||
$scope.$parent.vm.populateFromApi();
|
||||
systemVm.activeSystemForm = tab;
|
||||
formTracker.setCurrentSystem(systemVm.activeSystemForm);
|
||||
$('#FormModal-dialog').dialog('close');
|
||||
});
|
||||
.then(function () {
|
||||
$scope.$parent[formTracker.currentFormName()].$setPristine();
|
||||
$scope.$parent.vm.populateFromApi();
|
||||
systemVm.activeSystemForm = tab;
|
||||
formTracker.setCurrentSystem(systemVm.activeSystemForm);
|
||||
$('#FormModal-dialog').dialog('close');
|
||||
});
|
||||
},
|
||||
"class": "btn btn-primary",
|
||||
"id": "formmodal-save-button"
|
||||
@@ -80,9 +80,9 @@ export default [
|
||||
};
|
||||
|
||||
var dropdownOptions = [
|
||||
{label: i18n._('Misc. System'), value: 'misc'},
|
||||
{label: i18n._('Activity Stream'), value: 'activity_stream'},
|
||||
{label: i18n._('Logging'), value: 'logging'},
|
||||
{ label: i18n._('Misc. System'), value: 'misc' },
|
||||
{ label: i18n._('Activity Stream'), value: 'activity_stream' },
|
||||
{ label: i18n._('Logging'), value: 'logging' },
|
||||
];
|
||||
|
||||
var systemForms = [{
|
||||
@@ -97,14 +97,14 @@ export default [
|
||||
}];
|
||||
|
||||
var forms = _.map(systemForms, 'formDef');
|
||||
_.each(forms, function(form) {
|
||||
_.each(forms, function (form) {
|
||||
var keys = _.keys(form.fields);
|
||||
_.each(keys, function(key) {
|
||||
if($scope.configDataResolve[key].type === 'choice') {
|
||||
_.each(keys, function (key) {
|
||||
if ($scope.configDataResolve[key].type === 'choice') {
|
||||
// Create options for dropdowns
|
||||
var optionsGroup = key + '_options';
|
||||
$scope.$parent.$parent[optionsGroup] = [];
|
||||
_.each($scope.configDataResolve[key].choices, function(choice){
|
||||
_.each($scope.configDataResolve[key].choices, function (choice) {
|
||||
$scope.$parent.$parent[optionsGroup].push({
|
||||
name: choice[0],
|
||||
label: choice[1],
|
||||
@@ -121,7 +121,7 @@ export default [
|
||||
function addFieldInfo(form, key) {
|
||||
_.extend(form.fields[key], {
|
||||
awPopOver: ($scope.configDataResolve[key].defined_in_file) ?
|
||||
null: $scope.configDataResolve[key].help_text,
|
||||
null : $scope.configDataResolve[key].help_text,
|
||||
label: $scope.configDataResolve[key].label,
|
||||
name: key,
|
||||
toggleSource: key,
|
||||
@@ -138,7 +138,7 @@ export default [
|
||||
|
||||
$scope.$parent.$parent.parseType = 'json';
|
||||
|
||||
_.each(systemForms, function(form) {
|
||||
_.each(systemForms, function (form) {
|
||||
generator.inject(form.formDef, {
|
||||
id: form.id,
|
||||
mode: 'edit',
|
||||
@@ -150,37 +150,37 @@ export default [
|
||||
|
||||
var dropdownRendered = false;
|
||||
|
||||
$scope.$on('populated', function() {
|
||||
$scope.$on('populated', function () {
|
||||
populateLogAggregator(false);
|
||||
});
|
||||
|
||||
$scope.$on('LOG_AGGREGATOR_TYPE_populated', function(e, data, flag) {
|
||||
$scope.$on('LOG_AGGREGATOR_TYPE_populated', function (e, data, flag) {
|
||||
populateLogAggregator(flag);
|
||||
});
|
||||
|
||||
$scope.$on('LOG_AGGREGATOR_PROTOCOL_populated', function(e, data, flag) {
|
||||
$scope.$on('LOG_AGGREGATOR_PROTOCOL_populated', function (e, data, flag) {
|
||||
populateLogAggregator(flag);
|
||||
});
|
||||
|
||||
function populateLogAggregator(flag){
|
||||
function populateLogAggregator(flag) {
|
||||
|
||||
if($scope.$parent.$parent.LOG_AGGREGATOR_TYPE !== null) {
|
||||
if ($scope.$parent.$parent.LOG_AGGREGATOR_TYPE !== null) {
|
||||
$scope.$parent.$parent.LOG_AGGREGATOR_TYPE = _.find($scope.$parent.$parent.LOG_AGGREGATOR_TYPE_options, { value: $scope.$parent.$parent.LOG_AGGREGATOR_TYPE });
|
||||
}
|
||||
|
||||
if($scope.$parent.$parent.LOG_AGGREGATOR_PROTOCOL !== null) {
|
||||
if ($scope.$parent.$parent.LOG_AGGREGATOR_PROTOCOL !== null) {
|
||||
$scope.$parent.$parent.LOG_AGGREGATOR_PROTOCOL = _.find($scope.$parent.$parent.LOG_AGGREGATOR_PROTOCOL_options, { value: $scope.$parent.$parent.LOG_AGGREGATOR_PROTOCOL });
|
||||
}
|
||||
|
||||
if($scope.$parent.$parent.LOG_AGGREGATOR_LEVEL !== null) {
|
||||
if ($scope.$parent.$parent.LOG_AGGREGATOR_LEVEL !== null) {
|
||||
$scope.$parent.$parent.LOG_AGGREGATOR_LEVEL = _.find($scope.$parent.$parent.LOG_AGGREGATOR_LEVEL_options, { value: $scope.$parent.$parent.LOG_AGGREGATOR_LEVEL });
|
||||
}
|
||||
|
||||
if(flag !== undefined){
|
||||
if (flag !== undefined) {
|
||||
dropdownRendered = flag;
|
||||
}
|
||||
|
||||
if(!dropdownRendered) {
|
||||
if (!dropdownRendered) {
|
||||
dropdownRendered = true;
|
||||
CreateSelect2({
|
||||
element: '#configuration_logging_template_LOG_AGGREGATOR_TYPE',
|
||||
@@ -193,33 +193,52 @@ export default [
|
||||
}
|
||||
}
|
||||
|
||||
$scope.$parent.vm.testLogging = function() {
|
||||
Rest.setUrl("/api/v2/settings/logging/test/");
|
||||
Rest.post($scope.$parent.vm.getFormPayload())
|
||||
.then(() => {
|
||||
ngToast.success({
|
||||
content: `<i class="fa fa-check-circle
|
||||
Toast-successIcon"></i>` +
|
||||
i18n._('Log aggregator test successful.')
|
||||
});
|
||||
})
|
||||
.catch(({data, status}) => {
|
||||
if (status === 500) {
|
||||
ngToast.danger({
|
||||
content: '<i class="fa fa-exclamation-triangle Toast-successIcon"></i>' +
|
||||
i18n._('Log aggregator test failed. <br> Detail: ') + $filter('sanitize')(data.error),
|
||||
additionalClasses: "LogAggregator-failedNotification"
|
||||
$scope.$watchGroup(['configuration_logging_template_form.$pending', 'configuration_logging_template_form.$dirty', '!logAggregatorEnabled'], (vals) => {
|
||||
if (vals.some(val => val === true)) {
|
||||
$scope.$parent.vm.disableTestButton = true;
|
||||
$scope.$parent.vm.testTooltip = i18n._('Save and enable log aggregation before testing the log aggregator.');
|
||||
} else {
|
||||
$scope.$parent.vm.disableTestButton = false;
|
||||
$scope.$parent.vm.testTooltip = i18n._('Send a test log message to the configured log aggregator.');
|
||||
}
|
||||
});
|
||||
|
||||
$scope.$parent.vm.testLogging = function () {
|
||||
if (!$scope.$parent.vm.disableTestButton) {
|
||||
$scope.$parent.vm.disableTestButton = true;
|
||||
Rest.setUrl("/api/v2/settings/logging/test/");
|
||||
Rest.post({})
|
||||
.then(() => {
|
||||
$scope.$parent.vm.disableTestButton = false;
|
||||
ngToast.success({
|
||||
dismissButton: false,
|
||||
dismissOnTimeout: true,
|
||||
content: `<i class="fa fa-check-circle
|
||||
Toast-successIcon"></i>` +
|
||||
i18n._('Log aggregator test sent successfully.')
|
||||
});
|
||||
} else {
|
||||
ProcessErrors($scope, data, status, null,
|
||||
{
|
||||
hdr: i18n._('Error!'),
|
||||
msg: i18n._('There was an error testing the ' +
|
||||
'log aggregator. Returned status: ') +
|
||||
status
|
||||
})
|
||||
.catch(({ data, status }) => {
|
||||
$scope.$parent.vm.disableTestButton = false;
|
||||
if (status === 400 || status === 500) {
|
||||
ngToast.danger({
|
||||
dismissButton: false,
|
||||
dismissOnTimeout: true,
|
||||
content: '<i class="fa fa-exclamation-triangle Toast-successIcon"></i>' +
|
||||
i18n._('Log aggregator test failed. <br> Detail: ') + $filter('sanitize')(data.error),
|
||||
additionalClasses: "LogAggregator-failedNotification"
|
||||
});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
ProcessErrors($scope, data, status, null,
|
||||
{
|
||||
hdr: i18n._('Error!'),
|
||||
msg: i18n._('There was an error testing the ' +
|
||||
'log aggregator. Returned status: ') +
|
||||
status
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
angular.extend(systemVm, {
|
||||
|
||||
@@ -75,10 +75,13 @@
|
||||
class: 'Form-resetAll'
|
||||
},
|
||||
testLogging: {
|
||||
ngClass: "{'Form-button--disabled': vm.disableTestButton}",
|
||||
ngClick: 'vm.testLogging()',
|
||||
label: i18n._('Test'),
|
||||
class: 'btn-primary',
|
||||
ngDisabled: 'configuration_logging_template_form.$invalid'
|
||||
class: 'Form-primaryButton',
|
||||
awToolTip: '{{vm.testTooltip}}',
|
||||
dataTipWatch: 'vm.testTooltip',
|
||||
dataPlacement: 'top',
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'vm.formCancel()',
|
||||
|
||||
@@ -215,8 +215,8 @@ export default ['NotificationsList', 'i18n', function(NotificationsList, i18n){
|
||||
dataTitle: i18n._("Source Variables"),
|
||||
dataPlacement: 'right',
|
||||
awPopOver: "<p>" + i18n._("Override variables found in ec2.ini and used by the inventory update script. For a detailed description of these variables ") +
|
||||
"<a href=\"https://github.com/ansible/ansible/blob/devel/contrib/inventory/ec2.ini\" target=\"_blank\">" +
|
||||
i18n._("view ec2.ini in the Ansible github repo.") + "</a></p>" +
|
||||
"<a href=\"https://github.com/ansible-collections/community.aws/blob/master/scripts/inventory/ec2.ini\" target=\"_blank\">" +
|
||||
i18n._("view ec2.ini in the community.aws repo.") + "</a></p>" +
|
||||
"<p>" + i18n._("Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "</p>" +
|
||||
i18n._("JSON:") + "<br />\n" +
|
||||
"<blockquote>{<br /> \"somevar\": \"somevalue\",<br /> \"password\": \"magic\"<br /> }</blockquote>\n" +
|
||||
@@ -239,8 +239,8 @@ export default ['NotificationsList', 'i18n', function(NotificationsList, i18n){
|
||||
dataTitle: i18n._("Source Variables"),
|
||||
dataPlacement: 'right',
|
||||
awPopOver: "<p>" + i18n._("Override variables found in vmware.ini and used by the inventory update script. For a detailed description of these variables ") +
|
||||
"<a href=\"https://github.com/ansible/ansible/blob/devel/contrib/inventory/vmware_inventory.ini\" target=\"_blank\">" +
|
||||
i18n._("view vmware_inventory.ini in the Ansible github repo.") + "</a></p>" +
|
||||
"<a href=\"https://github.com/ansible-collections/vmware/blob/master/scripts/inventory/vmware_inventory.ini\" target=\"_blank\">" +
|
||||
i18n._("view vmware_inventory.ini in the vmware community repo.") + "</a></p>" +
|
||||
"<p>" + i18n._("Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "</p>" +
|
||||
i18n._("JSON:") + "<br />\n" +
|
||||
"<blockquote>{<br /> \"somevar\": \"somevalue\",<br /> \"password\": \"magic\"<br /> }</blockquote>\n" +
|
||||
@@ -314,8 +314,8 @@ export default ['NotificationsList', 'i18n', function(NotificationsList, i18n){
|
||||
dataTitle: i18n._("Source Variables"),
|
||||
dataPlacement: 'right',
|
||||
awPopOver: "<p>" + i18n._("Override variables found in azure_rm.ini and used by the inventory update script. For a detailed description of these variables ") +
|
||||
"<a href=\"https://github.com/ansible/ansible/blob/devel/contrib/inventory/azure_rm.ini\" target=\"_blank\">" +
|
||||
i18n._("view azure_rm.ini in the Ansible github repo.") + "</a></p>" +
|
||||
"<a href=\"https://github.com/ansible-collections/community.general/blob/master/scripts/inventory/azure_rm.ini\" target=\"_blank\">" +
|
||||
i18n._("view azure_rm.ini in the Ansible community.general github repo.") + "</a></p>" +
|
||||
"<p>" + i18n._("Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "</p>" +
|
||||
i18n._("JSON:") + "<br />\n" +
|
||||
"<blockquote>{<br /> \"somevar\": \"somevalue\",<br /> \"password\": \"magic\"<br /> }</blockquote>\n" +
|
||||
|
||||
@@ -657,10 +657,10 @@ function(SettingsUtils, i18n, $rootScope) {
|
||||
query += '&credential_type__namespace=ssh&role_level=use_role';
|
||||
break;
|
||||
case 'scm_credential':
|
||||
query += '&redential_type__namespace=scm&role_level=use_role';
|
||||
query += '&credential_type__namespace=scm&role_level=use_role';
|
||||
break;
|
||||
case 'network_credential':
|
||||
query += '&redential_type__namespace=net&role_level=use_role';
|
||||
query += '&credential_type__namespace=net&role_level=use_role';
|
||||
break;
|
||||
case 'cloud_credential':
|
||||
query += '&cloud=true&role_level=use_role';
|
||||
|
||||
@@ -1690,6 +1690,9 @@ angular.module('FormGenerator', [GeneratorHelpers.name, 'Utilities', listGenerat
|
||||
if (button.ngClick) {
|
||||
html += this.attr(button, 'ngClick');
|
||||
}
|
||||
if (button.ngClass) {
|
||||
html += this.attr(button, 'ngClass');
|
||||
}
|
||||
if (button.ngDisabled) {
|
||||
ngDisabled = (button.ngDisabled===true) ? `${this.form.name}_form.$invalid || ${this.form.name}_form.$pending`: button.ngDisabled;
|
||||
if (btn !== 'reset') {
|
||||
|
||||
@@ -34,9 +34,9 @@
|
||||
<div class="List-tableHeader--info col-md-4" base-path="unified_job_templates" collection="wf_maker_templates" dataset="wf_maker_template_dataset" column-sort="" column-field="info" column-iterator="wf_maker_template" column-no-sort="true" column-label="" column-custom-class="" query-set="wf_maker_template_queryset"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div ng-class="[template.success_class, {'List-tableRow--selected' : $stateParams['template_id'] == wf_maker_template.id}, {'List-tableRow--disabled': !wf_maker_template.summary_fields.user_capabilities.edit}]" id="{{ wf_maker_template.id }}" class="List-lookupLayout List-tableRow template_class" disable-row="{{ !wf_maker_template.summary_fields.user_capabilities.edit }}" ng-repeat="wf_maker_template in wf_maker_templates">
|
||||
<div ng-class="[template.success_class, {'List-tableRow--selected' : $stateParams['template_id'] == wf_maker_template.id}, {'List-tableRow--disabled': !wf_maker_template.summary_fields.user_capabilities.start}]" id="{{ wf_maker_template.id }}" class="List-lookupLayout List-tableRow template_class" disable-row="{{ !wf_maker_template.summary_fields.user_capabilities.start }}" ng-repeat="wf_maker_template in wf_maker_templates">
|
||||
<div class="List-centerEnd select-column">
|
||||
<input type="radio" ng-model="wf_maker_template.checked" ng-value="1" ng-false-value="0" name="check_template_{{wf_maker_template.id}}" ng-click="selectTemplate(wf_maker_template)" ng-disabled="!wf_maker_template.summary_fields.user_capabilities.edit">
|
||||
<input type="radio" ng-model="wf_maker_template.checked" ng-value="1" ng-false-value="0" name="check_template_{{wf_maker_template.id}}" ng-click="selectTemplate(wf_maker_template)" ng-disabled="!wf_maker_template.summary_fields.user_capabilities.start">
|
||||
</div>
|
||||
<div class="d-flex h-100">
|
||||
<div class="List-tableCell name-column col-md-8" ng-click="selectTemplate(wf_maker_template)">
|
||||
|
||||
@@ -14,6 +14,7 @@ Have questions about this document or anything not covered here? Feel free to re
|
||||
* [Accessing the AWX web interface](#accessing-the-awx-web-interface)
|
||||
* [AWX REST API Interaction](#awx-rest-api-interaction)
|
||||
* [Handling API Errors](#handling-api-errors)
|
||||
* [Forms](#forms)
|
||||
* [Working with React](#working-with-react)
|
||||
* [App structure](#app-structure)
|
||||
* [Naming files](#naming-files)
|
||||
@@ -30,6 +31,10 @@ Have questions about this document or anything not covered here? Feel free to re
|
||||
- All code submissions are done through pull requests against the `devel` branch.
|
||||
- If collaborating with someone else on the same branch, please use `--force-with-lease` instead of `--force` when pushing up code. This will prevent you from accidentally overwriting commits pushed by someone else. For more information, see https://git-scm.com/docs/git-push#git-push---force-with-leaseltrefnamegt
|
||||
- We use a [code formatter](https://prettier.io/). Before adding a new commit or opening a PR, please apply the formatter using `npm run prettier`
|
||||
- We adopt the following code style guide:
|
||||
- functions should adopt camelCase
|
||||
- constructors/classes should adopt PascalCase
|
||||
- constants to be exported should adopt UPPERCASE
|
||||
|
||||
## Setting up your development environment
|
||||
|
||||
@@ -76,7 +81,7 @@ Note that mixins can be chained. See the example below.
|
||||
|
||||
Example of a model using multiple mixins:
|
||||
|
||||
```
|
||||
```javascript
|
||||
import NotificationsMixin from '../mixins/Notifications.mixin';
|
||||
import InstanceGroupsMixin from '../mixins/InstanceGroups.mixin';
|
||||
|
||||
@@ -91,7 +96,7 @@ export default Organizations;
|
||||
|
||||
Example of mocking a specific method for every test in a suite:
|
||||
|
||||
```
|
||||
```javascript
|
||||
import { OrganizationsAPI } from '../../../../src/api';
|
||||
|
||||
// Mocks out all available methods. Comparable to:
|
||||
@@ -124,6 +129,9 @@ API requests can and will fail occasionally so they should include explicit erro
|
||||
|
||||
- **other errors** - Most errors will fall into the first two categories, but for miscellaneous actions like toggling notifications, deleting a list item, etc. we display an alert modal to notify the user that their requested action couldn't be performed.
|
||||
|
||||
## Forms
|
||||
Our forms should have a known, consistent, and fully-resolved starting state before it is possible for a user, keyboard-mouse, screen reader, or automated test to interact with them. If multiple network calls are needed to populate a form, resolve them all before displaying the form or showing a content error. When multiple requests are needed to create or update the resources represented by a form, resolve them all before transitioning the ui to a success or failure state.
|
||||
|
||||
## Working with React
|
||||
|
||||
### App structure
|
||||
@@ -164,7 +172,7 @@ Ideally, files should be named the same as the component they export, and tests
|
||||
|
||||
**File naming** - Since contexts export both consumer and provider (and potentially in withContext function form), the file can be simplified to be named after the consumer export. In other words, the file containing the `Network` context components would be named `Network.jsx`.
|
||||
|
||||
**Component naming and conventions** - In order to provide a consistent interface with react-router and lingui, as well as make their usage easier and less verbose, context components follow these conventions:
|
||||
**Component naming and conventions** - In order to provide a consistent interface with react-router and [lingui](https://lingui.js.org/), as well as make their usage easier and less verbose, context components follow these conventions:
|
||||
- Providers are wrapped in a component in the `FooProvider` format.
|
||||
- The value prop of the provider should be pulled from state. This is recommended by the react docs, [here](https://reactjs.org/docs/context.html#caveats).
|
||||
- The provider should also be able to accept its value by prop for testing.
|
||||
@@ -262,7 +270,7 @@ We have several React contexts that wrap much of the app, including those from r
|
||||
|
||||
If you want to stub the value of a context, or assert actions taken on it, you can customize a contexts value by passing a second parameter to `mountWithContexts`. For example, this provides a custom value for the `Config` context:
|
||||
|
||||
```
|
||||
```javascript
|
||||
const config = {
|
||||
custom_virtualenvs: ['foo', 'bar'],
|
||||
};
|
||||
@@ -301,7 +309,7 @@ The lingui library provides various React helpers for dealing with both marking
|
||||
|
||||
**Note:** We try to avoid the `I18n` consumer, `i18nMark` function, or `<Trans>` component lingui gives us access to in this repo. i18nMark does not actually replace the string in the UI (leading to the potential for untranslated bugs), and the other helpers are redundant. Settling on a consistent, single pattern helps us ease the mental overhead of the need to understand the ins and outs of the lingui API.
|
||||
|
||||
You can learn more about the ways lingui and its React helpers at [this link](https://lingui.js.org/tutorials/react-patterns.html).
|
||||
You can learn more about the ways lingui and its React helpers at [this link](https://lingui.js.org/tutorials/react-patterns.html).
|
||||
|
||||
### Setting up .po files to give to translation team
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
* npm start
|
||||
* visit `https://127.0.0.1:3001/`
|
||||
|
||||
**note:** These instructions assume you have the [awx](https://github.com/ansible/awx/blob/devel/CONTRIBUTING.md#running-the-environment) development api server up and running at `localhost:8043`. You can use a different backend server with the `TAGET_HOST` and `TARGET_PORT` environment variables when starting the development server:
|
||||
**note:** These instructions assume you have the [awx](https://github.com/ansible/awx/blob/devel/CONTRIBUTING.md#running-the-environment) development api server up and running at `localhost:8043`. You can use a different backend server with the `TARGET_HOST` and `TARGET_PORT` environment variables when starting the development server:
|
||||
|
||||
```shell
|
||||
# use a non-default host and port when starting the development server
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
## UX Considerations
|
||||
|
||||
Historically, the code that powers search in the AngularJS version of the AWX/Tower UI is very complex and prone to bugs. In order to reduce that complexity, we've made some UX desicions to help make the code easier to maintain.
|
||||
Historically, the code that powers search in the AngularJS version of the AWX/Tower UI is very complex and prone to bugs. In order to reduce that complexity, we've made some UX decisions to help make the code easier to maintain.
|
||||
|
||||
**ALL query params namespaced and in url bar**
|
||||
|
||||
|
||||
643
awx/ui_next/package-lock.json
generated
643
awx/ui_next/package-lock.json
generated
@@ -4910,6 +4910,201 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"@jest/transform": {
|
||||
"version": "25.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@jest/transform/-/transform-25.1.0.tgz",
|
||||
"integrity": "sha512-4ktrQ2TPREVeM+KxB4zskAT84SnmG1vaz4S+51aTefyqn3zocZUnliLLm5Fsl85I3p/kFPN4CRp1RElIfXGegQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@babel/core": "^7.1.0",
|
||||
"@jest/types": "^25.1.0",
|
||||
"babel-plugin-istanbul": "^6.0.0",
|
||||
"chalk": "^3.0.0",
|
||||
"convert-source-map": "^1.4.0",
|
||||
"fast-json-stable-stringify": "^2.0.0",
|
||||
"graceful-fs": "^4.2.3",
|
||||
"jest-haste-map": "^25.1.0",
|
||||
"jest-regex-util": "^25.1.0",
|
||||
"jest-util": "^25.1.0",
|
||||
"micromatch": "^4.0.2",
|
||||
"pirates": "^4.0.1",
|
||||
"realpath-native": "^1.1.0",
|
||||
"slash": "^3.0.0",
|
||||
"source-map": "^0.6.1",
|
||||
"write-file-atomic": "^3.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"ansi-styles": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz",
|
||||
"integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/color-name": "^1.1.1",
|
||||
"color-convert": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"braces": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
|
||||
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"fill-range": "^7.0.1"
|
||||
}
|
||||
},
|
||||
"chalk": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz",
|
||||
"integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ansi-styles": "^4.1.0",
|
||||
"supports-color": "^7.1.0"
|
||||
}
|
||||
},
|
||||
"color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"color-name": "~1.1.4"
|
||||
}
|
||||
},
|
||||
"color-name": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
|
||||
"dev": true
|
||||
},
|
||||
"fill-range": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
|
||||
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"to-regex-range": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"graceful-fs": {
|
||||
"version": "4.2.3",
|
||||
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz",
|
||||
"integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==",
|
||||
"dev": true
|
||||
},
|
||||
"has-flag": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
|
||||
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
|
||||
"dev": true
|
||||
},
|
||||
"is-number": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
|
||||
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
|
||||
"dev": true
|
||||
},
|
||||
"jest-regex-util": {
|
||||
"version": "25.1.0",
|
||||
"resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-25.1.0.tgz",
|
||||
"integrity": "sha512-9lShaDmDpqwg+xAd73zHydKrBbbrIi08Kk9YryBEBybQFg/lBWR/2BDjjiSE7KIppM9C5+c03XiDaZ+m4Pgs1w==",
|
||||
"dev": true
|
||||
},
|
||||
"micromatch": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.2.tgz",
|
||||
"integrity": "sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"braces": "^3.0.1",
|
||||
"picomatch": "^2.0.5"
|
||||
}
|
||||
},
|
||||
"supports-color": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz",
|
||||
"integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"has-flag": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"to-regex-range": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
|
||||
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-number": "^7.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@jest/types": {
|
||||
"version": "25.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@jest/types/-/types-25.1.0.tgz",
|
||||
"integrity": "sha512-VpOtt7tCrgvamWZh1reVsGADujKigBUFTi19mlRjqEGsE8qH4r3s+skY33dNdXOwyZIvuftZ5tqdF1IgsMejMA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/istanbul-lib-coverage": "^2.0.0",
|
||||
"@types/istanbul-reports": "^1.1.1",
|
||||
"@types/yargs": "^15.0.0",
|
||||
"chalk": "^3.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"ansi-styles": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz",
|
||||
"integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/color-name": "^1.1.1",
|
||||
"color-convert": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"chalk": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz",
|
||||
"integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ansi-styles": "^4.1.0",
|
||||
"supports-color": "^7.1.0"
|
||||
}
|
||||
},
|
||||
"color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"color-name": "~1.1.4"
|
||||
}
|
||||
},
|
||||
"color-name": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
|
||||
"dev": true
|
||||
},
|
||||
"has-flag": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
|
||||
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
|
||||
"dev": true
|
||||
},
|
||||
"supports-color": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz",
|
||||
"integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"has-flag": "^4.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@lingui/babel-plugin-extract-messages": {
|
||||
"version": "2.7.4",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/babel-plugin-extract-messages/-/babel-plugin-extract-messages-2.7.4.tgz",
|
||||
@@ -5096,51 +5291,50 @@
|
||||
"dev": true
|
||||
},
|
||||
"@patternfly/patternfly": {
|
||||
"version": "2.66.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-2.66.0.tgz",
|
||||
"integrity": "sha512-fZMr2q9LZhVtKAEcDJ4rzcCGC6iN93mEQPoLlv2T9td5Hba1bLw8Bpgp5fdTm95Fv/++AY0PsdUPZUzh1cx7Sg=="
|
||||
"version": "2.71.3",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-2.71.3.tgz",
|
||||
"integrity": "sha512-uTb9zAtPjTKB8aHmWdavEOrSMs+NL9XovMvWYL9R74zXbGnEMHEpibn7cNSu469u2JrxY6VsH7x44aOfdZpqpg=="
|
||||
},
|
||||
"@patternfly/react-core": {
|
||||
"version": "3.140.11",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-3.140.11.tgz",
|
||||
"integrity": "sha512-841DeN5BTuUS02JfVXAAVJYtWY0HWc4ewqMD32Xog2MAR/pn74jzjnQOSQr4LUyVrH5QufB68SK4Alm2+IUzSw==",
|
||||
"version": "3.153.3",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-3.153.3.tgz",
|
||||
"integrity": "sha512-2ccnn/HPfEhZfj9gfKZJpWgzOA9O6QeCHjZGh41tx7Lz7iZGl9b/UdTmDsQUeYYuJ+0M8fxhYnQMKaDxfcqyOQ==",
|
||||
"requires": {
|
||||
"@patternfly/react-icons": "^3.15.3",
|
||||
"@patternfly/react-styles": "^3.7.4",
|
||||
"@patternfly/react-tokens": "^2.8.4",
|
||||
"emotion": "^9.2.9",
|
||||
"exenv": "^1.2.2",
|
||||
"focus-trap-react": "^4.0.1",
|
||||
"@patternfly/react-icons": "^3.15.15",
|
||||
"@patternfly/react-styles": "^3.7.12",
|
||||
"@patternfly/react-tokens": "^2.8.12",
|
||||
"focus-trap": "4.0.2",
|
||||
"react-dropzone": "9.0.0",
|
||||
"tippy.js": "5.1.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@patternfly/react-icons": {
|
||||
"version": "3.15.4",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-3.15.4.tgz",
|
||||
"integrity": "sha512-tOVirISoZDIn0bWYFctGN9B7Q8wQ19FaK4XIUD2sgIDRBzDbe9JWuqdef7ogJFF78eQnZNsWOci6nhvVCVF/zA==",
|
||||
"version": "3.15.15",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-3.15.15.tgz",
|
||||
"integrity": "sha512-oYOgY7fELe3gKbKB2KRUANpYPWkKkEGpmKdmXonNmNUlg0t/a8V68raVX8bTjXN9pwKsUKqNQW1R+xFibtt0Aw==",
|
||||
"requires": {
|
||||
"@fortawesome/free-brands-svg-icons": "^5.8.1"
|
||||
}
|
||||
},
|
||||
"@patternfly/react-tokens": {
|
||||
"version": "2.8.4",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-2.8.4.tgz",
|
||||
"integrity": "sha512-GlLyutls0bG39Nwl/sv2FUkicwyRNrXQFso+e7Y4470+VOUtSsVSdQz+rTjgPxQ38olKPsSZdtEjqN9o2PbDiw=="
|
||||
"version": "2.8.12",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-2.8.12.tgz",
|
||||
"integrity": "sha512-QyuMaTizuSn9eESl6bcopGKKgFydocc/N8T7OGB6jARBt6gdIoQWcztdBabSIVz/YGoEDw6lKeoNfed8p6GynA=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"@patternfly/react-icons": {
|
||||
"version": "3.15.4",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-3.15.4.tgz",
|
||||
"integrity": "sha512-tOVirISoZDIn0bWYFctGN9B7Q8wQ19FaK4XIUD2sgIDRBzDbe9JWuqdef7ogJFF78eQnZNsWOci6nhvVCVF/zA==",
|
||||
"version": "3.15.15",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-3.15.15.tgz",
|
||||
"integrity": "sha512-oYOgY7fELe3gKbKB2KRUANpYPWkKkEGpmKdmXonNmNUlg0t/a8V68raVX8bTjXN9pwKsUKqNQW1R+xFibtt0Aw==",
|
||||
"requires": {
|
||||
"@fortawesome/free-brands-svg-icons": "^5.8.1"
|
||||
}
|
||||
},
|
||||
"@patternfly/react-styles": {
|
||||
"version": "3.7.4",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-3.7.4.tgz",
|
||||
"integrity": "sha512-D+wu0OIfWVgxWNShQhTK9cadw+KdMCoBYR8gbWjV9Q1aCsCEV/aL/x1nMyyaUQ3c2dqizHhujDG4z9jUZCmCcw==",
|
||||
"version": "3.7.12",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-3.7.12.tgz",
|
||||
"integrity": "sha512-vTKyC78oKlrS6VTQ3GPYevc17qgxj2Ono+SCDwoMyhUexPEyXRuZHLoZA1/MkJHvSCqJHGBageBAFcRq5wb0XQ==",
|
||||
"requires": {
|
||||
"camel-case": "^3.0.0",
|
||||
"css": "^2.2.3",
|
||||
@@ -5150,9 +5344,9 @@
|
||||
}
|
||||
},
|
||||
"@patternfly/react-tokens": {
|
||||
"version": "2.8.4",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-2.8.4.tgz",
|
||||
"integrity": "sha512-GlLyutls0bG39Nwl/sv2FUkicwyRNrXQFso+e7Y4470+VOUtSsVSdQz+rTjgPxQ38olKPsSZdtEjqN9o2PbDiw=="
|
||||
"version": "2.8.12",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-2.8.12.tgz",
|
||||
"integrity": "sha512-QyuMaTizuSn9eESl6bcopGKKgFydocc/N8T7OGB6jARBt6gdIoQWcztdBabSIVz/YGoEDw6lKeoNfed8p6GynA=="
|
||||
},
|
||||
"@sinonjs/commons": {
|
||||
"version": "1.7.1",
|
||||
@@ -5260,6 +5454,15 @@
|
||||
"integrity": "sha512-l42BggppR6zLmpfU6fq9HEa2oGPEI8yrSPL3GITjfRInppYFahObbIQOQK3UGxEnyQpltZLaPe75046NOZQikw==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/yargs": {
|
||||
"version": "15.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz",
|
||||
"integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/yargs-parser": "*"
|
||||
}
|
||||
},
|
||||
"@types/yargs-parser": {
|
||||
"version": "15.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-15.0.0.tgz",
|
||||
@@ -6210,6 +6413,14 @@
|
||||
"resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz",
|
||||
"integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg=="
|
||||
},
|
||||
"attr-accept": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/attr-accept/-/attr-accept-1.1.3.tgz",
|
||||
"integrity": "sha512-iT40nudw8zmCweivz6j58g+RT33I4KbaIvRUhjNmDwO2WmsQUxFEZZYZ5w3vXe5x5MX9D7mfvA/XaLOZYFR9EQ==",
|
||||
"requires": {
|
||||
"core-js": "^2.5.0"
|
||||
}
|
||||
},
|
||||
"aws-sign2": {
|
||||
"version": "0.7.0",
|
||||
"resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
|
||||
@@ -6330,6 +6541,73 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"babel-jest": {
|
||||
"version": "25.1.0",
|
||||
"resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-25.1.0.tgz",
|
||||
"integrity": "sha512-tz0VxUhhOE2y+g8R2oFrO/2VtVjA1lkJeavlhExuRBg3LdNJY9gwQ+Vcvqt9+cqy71MCTJhewvTB7Qtnnr9SWg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@jest/transform": "^25.1.0",
|
||||
"@jest/types": "^25.1.0",
|
||||
"@types/babel__core": "^7.1.0",
|
||||
"babel-plugin-istanbul": "^6.0.0",
|
||||
"babel-preset-jest": "^25.1.0",
|
||||
"chalk": "^3.0.0",
|
||||
"slash": "^3.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"ansi-styles": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz",
|
||||
"integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/color-name": "^1.1.1",
|
||||
"color-convert": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"chalk": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz",
|
||||
"integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ansi-styles": "^4.1.0",
|
||||
"supports-color": "^7.1.0"
|
||||
}
|
||||
},
|
||||
"color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"color-name": "~1.1.4"
|
||||
}
|
||||
},
|
||||
"color-name": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
|
||||
"dev": true
|
||||
},
|
||||
"has-flag": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
|
||||
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
|
||||
"dev": true
|
||||
},
|
||||
"supports-color": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz",
|
||||
"integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"has-flag": "^4.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"babel-loader": {
|
||||
"version": "8.0.6",
|
||||
"resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.0.6.tgz",
|
||||
@@ -6407,6 +6685,15 @@
|
||||
"test-exclude": "^6.0.0"
|
||||
}
|
||||
},
|
||||
"babel-plugin-jest-hoist": {
|
||||
"version": "25.1.0",
|
||||
"resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-25.1.0.tgz",
|
||||
"integrity": "sha512-oIsopO41vW4YFZ9yNYoLQATnnN46lp+MZ6H4VvPKFkcc2/fkl3CfE/NZZSmnEIEsJRmJAgkVEK0R7Zbl50CpTw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/babel__traverse": "^7.0.6"
|
||||
}
|
||||
},
|
||||
"babel-plugin-macros": {
|
||||
"version": "2.4.2",
|
||||
"resolved": "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-2.4.2.tgz",
|
||||
@@ -6451,6 +6738,17 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"babel-preset-jest": {
|
||||
"version": "25.1.0",
|
||||
"resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-25.1.0.tgz",
|
||||
"integrity": "sha512-eCGn64olaqwUMaugXsTtGAM2I0QTahjEtnRu0ql8Ie+gDWAc1N6wqN0k2NilnyTunM69Pad7gJY7LOtwLimoFQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@babel/plugin-syntax-bigint": "^7.0.0",
|
||||
"@babel/plugin-syntax-object-rest-spread": "^7.0.0",
|
||||
"babel-plugin-jest-hoist": "^25.1.0"
|
||||
}
|
||||
},
|
||||
"babel-runtime": {
|
||||
"version": "6.26.0",
|
||||
"resolved": "https://registry.npmjs.org/babel-runtime/-/babel-runtime-6.26.0.tgz",
|
||||
@@ -7898,9 +8196,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"cssom": {
|
||||
"version": "0.3.4",
|
||||
"resolved": "https://registry.npmjs.org/cssom/-/cssom-0.3.4.tgz",
|
||||
"integrity": "sha512-+7prCSORpXNeR4/fUP3rL+TzqtiFfhMvTd7uEqMdgPvLPt4+uzFUeufx5RHjGTACCargg/DiEt/moMQmvnfkog=="
|
||||
"version": "0.3.8",
|
||||
"resolved": "https://registry.npmjs.org/cssom/-/cssom-0.3.8.tgz",
|
||||
"integrity": "sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg=="
|
||||
},
|
||||
"cssstyle": {
|
||||
"version": "0.3.1",
|
||||
@@ -7911,9 +8209,9 @@
|
||||
}
|
||||
},
|
||||
"csstype": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/csstype/-/csstype-2.6.9.tgz",
|
||||
"integrity": "sha512-xz39Sb4+OaTsULgUERcCk+TJj8ylkL4aSVDQiX/ksxbELSqwkgt4d4RD7fovIdgJGSuNYqwZEiVjYY5l0ask+Q=="
|
||||
"version": "2.6.10",
|
||||
"resolved": "https://registry.npmjs.org/csstype/-/csstype-2.6.10.tgz",
|
||||
"integrity": "sha512-D34BqZU4cIlMCY93rZHbrq9pjTAQJ3U8S8rfBqjwHxkGPThWFjzZDQpgMJY0QViLxth6ZKYiwFBo14RdN44U/w=="
|
||||
},
|
||||
"currently-unhandled": {
|
||||
"version": "0.4.1",
|
||||
@@ -9751,11 +10049,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exenv": {
|
||||
"version": "1.2.2",
|
||||
"resolved": "https://registry.npmjs.org/exenv/-/exenv-1.2.2.tgz",
|
||||
"integrity": "sha1-KueOhdmJQVhnCwPUe+wfA72Ru50="
|
||||
},
|
||||
"exit": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz",
|
||||
@@ -10280,6 +10573,14 @@
|
||||
"schema-utils": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"file-selector": {
|
||||
"version": "0.1.12",
|
||||
"resolved": "https://registry.npmjs.org/file-selector/-/file-selector-0.1.12.tgz",
|
||||
"integrity": "sha512-Kx7RTzxyQipHuiqyZGf+Nz4vY9R1XGxuQl/hLoJwq+J4avk/9wxxgZyHKtbyIPJmbD4A66DWGYfyykWNpcYutQ==",
|
||||
"requires": {
|
||||
"tslib": "^1.9.0"
|
||||
}
|
||||
},
|
||||
"fill-range": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz",
|
||||
@@ -10414,11 +10715,11 @@
|
||||
}
|
||||
},
|
||||
"focus-trap": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/focus-trap/-/focus-trap-3.0.0.tgz",
|
||||
"integrity": "sha512-jTFblf0tLWbleGjj2JZsAKbgtZTdL1uC48L8FcmSDl4c2vDoU4NycN1kgV5vJhuq1mxNFkw7uWZ1JAGlINWvyw==",
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/focus-trap/-/focus-trap-4.0.2.tgz",
|
||||
"integrity": "sha512-HtLjfAK7Hp2qbBtLS6wEznID1mPT+48ZnP2nkHzgjpL4kroYHg0CdqJ5cTXk+UO5znAxF5fRUkhdyfgrhh8Lzw==",
|
||||
"requires": {
|
||||
"tabbable": "^3.1.0",
|
||||
"tabbable": "^3.1.2",
|
||||
"xtend": "^4.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
@@ -10429,14 +10730,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"focus-trap-react": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/focus-trap-react/-/focus-trap-react-4.0.1.tgz",
|
||||
"integrity": "sha512-UUZKVEn5cFbF6yUnW7lbXNW0iqN617ShSqYKgxctUvWw1wuylLtyVmC0RmPQNnJ/U+zoKc/djb0tZMs0uN/0QQ==",
|
||||
"requires": {
|
||||
"focus-trap": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"follow-redirects": {
|
||||
"version": "1.5.9",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.9.tgz",
|
||||
@@ -10625,8 +10918,7 @@
|
||||
"ansi-regex": {
|
||||
"version": "2.1.1",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"aproba": {
|
||||
"version": "1.2.0",
|
||||
@@ -10647,14 +10939,12 @@
|
||||
"balanced-match": {
|
||||
"version": "1.0.0",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
@@ -10669,20 +10959,17 @@
|
||||
"code-point-at": {
|
||||
"version": "1.1.0",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"concat-map": {
|
||||
"version": "0.0.1",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"console-control-strings": {
|
||||
"version": "1.1.0",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"core-util-is": {
|
||||
"version": "1.0.2",
|
||||
@@ -10799,8 +11086,7 @@
|
||||
"inherits": {
|
||||
"version": "2.0.3",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"ini": {
|
||||
"version": "1.3.5",
|
||||
@@ -10812,7 +11098,6 @@
|
||||
"version": "1.0.0",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"number-is-nan": "^1.0.0"
|
||||
}
|
||||
@@ -10827,7 +11112,6 @@
|
||||
"version": "3.0.4",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
}
|
||||
@@ -10835,14 +11119,12 @@
|
||||
"minimist": {
|
||||
"version": "0.0.8",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"minipass": {
|
||||
"version": "2.3.5",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"safe-buffer": "^5.1.2",
|
||||
"yallist": "^3.0.0"
|
||||
@@ -10861,7 +11143,6 @@
|
||||
"version": "0.5.1",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"minimist": "0.0.8"
|
||||
}
|
||||
@@ -10942,8 +11223,7 @@
|
||||
"number-is-nan": {
|
||||
"version": "1.0.1",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"object-assign": {
|
||||
"version": "4.1.1",
|
||||
@@ -10955,7 +11235,6 @@
|
||||
"version": "1.4.0",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"wrappy": "1"
|
||||
}
|
||||
@@ -11041,8 +11320,7 @@
|
||||
"safe-buffer": {
|
||||
"version": "5.1.2",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"safer-buffer": {
|
||||
"version": "2.1.2",
|
||||
@@ -11078,7 +11356,6 @@
|
||||
"version": "1.0.2",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"code-point-at": "^1.0.0",
|
||||
"is-fullwidth-code-point": "^1.0.0",
|
||||
@@ -11098,7 +11375,6 @@
|
||||
"version": "3.0.1",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"ansi-regex": "^2.0.0"
|
||||
}
|
||||
@@ -11142,14 +11418,12 @@
|
||||
"wrappy": {
|
||||
"version": "1.0.2",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"yallist": {
|
||||
"version": "3.0.3",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -11629,15 +11903,22 @@
|
||||
"dev": true
|
||||
},
|
||||
"html-tokenize": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/html-tokenize/-/html-tokenize-2.0.0.tgz",
|
||||
"integrity": "sha1-izqaXetHXK5qb5ZxYA0sIKspglE=",
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/html-tokenize/-/html-tokenize-2.0.1.tgz",
|
||||
"integrity": "sha512-QY6S+hZ0f5m1WT8WffYN+Hg+xm/w5I8XeUcAq/ZYP5wVC8xbKi4Whhru3FtrAebD5EhBW8rmFzkDI6eCAuFe2w==",
|
||||
"requires": {
|
||||
"buffer-from": "~0.1.1",
|
||||
"inherits": "~2.0.1",
|
||||
"minimist": "~0.0.8",
|
||||
"minimist": "~1.2.5",
|
||||
"readable-stream": "~1.0.27-1",
|
||||
"through2": "~0.4.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"minimist": {
|
||||
"version": "1.2.5",
|
||||
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
|
||||
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"htmlparser2": {
|
||||
@@ -14622,6 +14903,99 @@
|
||||
"integrity": "sha512-/jsz0Y+V29w1chdXVygEKSz2nBoHoYqNShPe+QgxSNjAuP1i8+k4LbQNrfoliKej0P45sivkSCh7yiD6ubHS3w==",
|
||||
"dev": true
|
||||
},
|
||||
"jest-haste-map": {
|
||||
"version": "25.1.0",
|
||||
"resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-25.1.0.tgz",
|
||||
"integrity": "sha512-/2oYINIdnQZAqyWSn1GTku571aAfs8NxzSErGek65Iu5o8JYb+113bZysRMcC/pjE5v9w0Yz+ldbj9NxrFyPyw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@jest/types": "^25.1.0",
|
||||
"anymatch": "^3.0.3",
|
||||
"fb-watchman": "^2.0.0",
|
||||
"fsevents": "^2.1.2",
|
||||
"graceful-fs": "^4.2.3",
|
||||
"jest-serializer": "^25.1.0",
|
||||
"jest-util": "^25.1.0",
|
||||
"jest-worker": "^25.1.0",
|
||||
"micromatch": "^4.0.2",
|
||||
"sane": "^4.0.3",
|
||||
"walker": "^1.0.7"
|
||||
},
|
||||
"dependencies": {
|
||||
"anymatch": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz",
|
||||
"integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"normalize-path": "^3.0.0",
|
||||
"picomatch": "^2.0.4"
|
||||
}
|
||||
},
|
||||
"braces": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
|
||||
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"fill-range": "^7.0.1"
|
||||
}
|
||||
},
|
||||
"fill-range": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
|
||||
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"to-regex-range": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"fsevents": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.2.tgz",
|
||||
"integrity": "sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"graceful-fs": {
|
||||
"version": "4.2.3",
|
||||
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz",
|
||||
"integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==",
|
||||
"dev": true
|
||||
},
|
||||
"is-number": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
|
||||
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
|
||||
"dev": true
|
||||
},
|
||||
"micromatch": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.2.tgz",
|
||||
"integrity": "sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"braces": "^3.0.1",
|
||||
"picomatch": "^2.0.5"
|
||||
}
|
||||
},
|
||||
"normalize-path": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
|
||||
"integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
|
||||
"dev": true
|
||||
},
|
||||
"to-regex-range": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
|
||||
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-number": "^7.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"jest-jasmine2": {
|
||||
"version": "25.1.0",
|
||||
"resolved": "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-25.1.0.tgz",
|
||||
@@ -15941,6 +16315,12 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"jest-serializer": {
|
||||
"version": "25.1.0",
|
||||
"resolved": "https://registry.npmjs.org/jest-serializer/-/jest-serializer-25.1.0.tgz",
|
||||
"integrity": "sha512-20Wkq5j7o84kssBwvyuJ7Xhn7hdPeTXndnwIblKDR2/sy1SUm6rWWiG9kSCgJPIfkDScJCIsTtOKdlzfIHOfKA==",
|
||||
"dev": true
|
||||
},
|
||||
"jest-snapshot": {
|
||||
"version": "25.1.0",
|
||||
"resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-25.1.0.tgz",
|
||||
@@ -16239,6 +16619,70 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"jest-util": {
|
||||
"version": "25.1.0",
|
||||
"resolved": "https://registry.npmjs.org/jest-util/-/jest-util-25.1.0.tgz",
|
||||
"integrity": "sha512-7did6pLQ++87Qsj26Fs/TIwZMUFBXQ+4XXSodRNy3luch2DnRXsSnmpVtxxQ0Yd6WTipGpbhh2IFP1mq6/fQGw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@jest/types": "^25.1.0",
|
||||
"chalk": "^3.0.0",
|
||||
"is-ci": "^2.0.0",
|
||||
"mkdirp": "^0.5.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"ansi-styles": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz",
|
||||
"integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/color-name": "^1.1.1",
|
||||
"color-convert": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"chalk": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz",
|
||||
"integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ansi-styles": "^4.1.0",
|
||||
"supports-color": "^7.1.0"
|
||||
}
|
||||
},
|
||||
"color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"color-name": "~1.1.4"
|
||||
}
|
||||
},
|
||||
"color-name": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
|
||||
"dev": true
|
||||
},
|
||||
"has-flag": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
|
||||
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
|
||||
"dev": true
|
||||
},
|
||||
"supports-color": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz",
|
||||
"integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"has-flag": "^4.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"jest-validate": {
|
||||
"version": "23.6.0",
|
||||
"resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-23.6.0.tgz",
|
||||
@@ -18927,6 +19371,15 @@
|
||||
"reflect.ownkeys": "^0.2.0"
|
||||
}
|
||||
},
|
||||
"prop-types-extra": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/prop-types-extra/-/prop-types-extra-1.1.1.tgz",
|
||||
"integrity": "sha512-59+AHNnHYCdiC+vMwY52WmvP5dM3QLeoumYuEyceQDi9aEhtwN9zIQ2ZNo25sMyXnbh32h+P1ezDsUpUH3JAew==",
|
||||
"requires": {
|
||||
"react-is": "^16.3.2",
|
||||
"warning": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"proxy-addr": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.4.tgz",
|
||||
@@ -19157,6 +19610,17 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"react-dropzone": {
|
||||
"version": "9.0.0",
|
||||
"resolved": "https://registry.npmjs.org/react-dropzone/-/react-dropzone-9.0.0.tgz",
|
||||
"integrity": "sha512-wZ2o9B2qkdE3RumWhfyZT9swgJYJPeU5qHEcMU8weYpmLex1eeWX0CC32/Y0VutB+BBi2D+iePV/YZIiB4kZGw==",
|
||||
"requires": {
|
||||
"attr-accept": "^1.1.3",
|
||||
"file-selector": "^0.1.8",
|
||||
"prop-types": "^15.6.2",
|
||||
"prop-types-extra": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"react-fast-compare": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-2.0.4.tgz",
|
||||
@@ -21817,8 +22281,7 @@
|
||||
"tslib": {
|
||||
"version": "1.9.3",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.9.3.tgz",
|
||||
"integrity": "sha512-4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ==",
|
||||
"dev": true
|
||||
"integrity": "sha512-4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ=="
|
||||
},
|
||||
"tty-browserify": {
|
||||
"version": "0.0.0",
|
||||
@@ -22274,6 +22737,14 @@
|
||||
"makeerror": "1.0.x"
|
||||
}
|
||||
},
|
||||
"warning": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/warning/-/warning-4.0.3.tgz",
|
||||
"integrity": "sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==",
|
||||
"requires": {
|
||||
"loose-envify": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"watchpack": {
|
||||
"version": "1.6.0",
|
||||
"resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.6.0.tgz",
|
||||
|
||||
@@ -58,10 +58,10 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@lingui/react": "^2.7.2",
|
||||
"@patternfly/patternfly": "^2.66.0",
|
||||
"@patternfly/react-core": "^3.140.11",
|
||||
"@patternfly/react-icons": "^3.15.4",
|
||||
"@patternfly/react-tokens": "^2.8.4",
|
||||
"@patternfly/patternfly": "^2.71.3",
|
||||
"@patternfly/react-core": "^3.153.3",
|
||||
"@patternfly/react-icons": "^3.15.15",
|
||||
"@patternfly/react-tokens": "^2.8.12",
|
||||
"ansi-to-html": "^0.6.11",
|
||||
"axios": "^0.18.1",
|
||||
"codemirror": "^5.47.0",
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
const SchedulesMixin = parent =>
|
||||
class extends parent {
|
||||
createSchedule(id, data) {
|
||||
return this.http.post(`${this.baseUrl}${id}/schedules/`, data);
|
||||
}
|
||||
|
||||
readSchedules(id, params) {
|
||||
return this.http.get(`${this.baseUrl}${id}/schedules/`, { params });
|
||||
}
|
||||
|
||||
@@ -5,6 +5,28 @@ class CredentialTypes extends Base {
|
||||
super(http);
|
||||
this.baseUrl = '/api/v2/credential_types/';
|
||||
}
|
||||
|
||||
async loadAllTypes(
|
||||
acceptableKinds = ['machine', 'cloud', 'net', 'ssh', 'vault']
|
||||
) {
|
||||
const pageSize = 200;
|
||||
// The number of credential types a user can have is unlimited. In practice, it is unlikely for
|
||||
// users to have more than a page at the maximum request size.
|
||||
const {
|
||||
data: { next, results },
|
||||
} = await this.read({ page_size: pageSize });
|
||||
let nextResults = [];
|
||||
if (next) {
|
||||
const { data } = await this.read({
|
||||
page_size: pageSize,
|
||||
page: 2,
|
||||
});
|
||||
nextResults = data.results;
|
||||
}
|
||||
return results
|
||||
.concat(nextResults)
|
||||
.filter(type => acceptableKinds.includes(type.kind));
|
||||
}
|
||||
}
|
||||
|
||||
export default CredentialTypes;
|
||||
|
||||
65
awx/ui_next/src/api/models/CredentialTypes.test.js
Normal file
65
awx/ui_next/src/api/models/CredentialTypes.test.js
Normal file
@@ -0,0 +1,65 @@
|
||||
import CredentialTypes from './CredentialTypes';
|
||||
|
||||
const typesData = [{ id: 1, kind: 'machine' }, { id: 2, kind: 'cloud' }];
|
||||
|
||||
describe('CredentialTypesAPI', () => {
|
||||
test('should load all types', async () => {
|
||||
const getPromise = () =>
|
||||
Promise.resolve({
|
||||
data: {
|
||||
results: typesData,
|
||||
},
|
||||
});
|
||||
const mockHttp = { get: jest.fn(getPromise) };
|
||||
const CredentialTypesAPI = new CredentialTypes(mockHttp);
|
||||
|
||||
const types = await CredentialTypesAPI.loadAllTypes();
|
||||
|
||||
expect(mockHttp.get).toHaveBeenCalledTimes(1);
|
||||
expect(mockHttp.get.mock.calls[0]).toEqual([
|
||||
`/api/v2/credential_types/`,
|
||||
{ params: { page_size: 200 } },
|
||||
]);
|
||||
expect(types).toEqual(typesData);
|
||||
});
|
||||
|
||||
test('should load all types (2 pages)', async () => {
|
||||
const getPromise = () =>
|
||||
Promise.resolve({
|
||||
data: {
|
||||
results: typesData,
|
||||
next: 2,
|
||||
},
|
||||
});
|
||||
const mockHttp = { get: jest.fn(getPromise) };
|
||||
const CredentialTypesAPI = new CredentialTypes(mockHttp);
|
||||
|
||||
const types = await CredentialTypesAPI.loadAllTypes();
|
||||
|
||||
expect(mockHttp.get).toHaveBeenCalledTimes(2);
|
||||
expect(mockHttp.get.mock.calls[0]).toEqual([
|
||||
`/api/v2/credential_types/`,
|
||||
{ params: { page_size: 200 } },
|
||||
]);
|
||||
expect(mockHttp.get.mock.calls[1]).toEqual([
|
||||
`/api/v2/credential_types/`,
|
||||
{ params: { page_size: 200, page: 2 } },
|
||||
]);
|
||||
expect(types).toHaveLength(4);
|
||||
});
|
||||
|
||||
test('should filter by acceptable kinds', async () => {
|
||||
const getPromise = () =>
|
||||
Promise.resolve({
|
||||
data: {
|
||||
results: typesData,
|
||||
},
|
||||
});
|
||||
const mockHttp = { get: jest.fn(getPromise) };
|
||||
const CredentialTypesAPI = new CredentialTypes(mockHttp);
|
||||
|
||||
const types = await CredentialTypesAPI.loadAllTypes(['machine']);
|
||||
|
||||
expect(types).toEqual([typesData[0]]);
|
||||
});
|
||||
});
|
||||
@@ -4,11 +4,36 @@ class Hosts extends Base {
|
||||
constructor(http) {
|
||||
super(http);
|
||||
this.baseUrl = '/api/v2/hosts/';
|
||||
|
||||
this.readFacts = this.readFacts.bind(this);
|
||||
this.readAllGroups = this.readAllGroups.bind(this);
|
||||
this.readGroupsOptions = this.readGroupsOptions.bind(this);
|
||||
this.associateGroup = this.associateGroup.bind(this);
|
||||
this.disassociateGroup = this.disassociateGroup.bind(this);
|
||||
}
|
||||
|
||||
readFacts(id) {
|
||||
return this.http.get(`${this.baseUrl}${id}/ansible_facts/`);
|
||||
}
|
||||
|
||||
readAllGroups(id, params) {
|
||||
return this.http.get(`${this.baseUrl}${id}/all_groups/`, { params });
|
||||
}
|
||||
|
||||
readGroupsOptions(id) {
|
||||
return this.http.options(`${this.baseUrl}${id}/groups/`);
|
||||
}
|
||||
|
||||
associateGroup(id, groupId) {
|
||||
return this.http.post(`${this.baseUrl}${id}/groups/`, { id: groupId });
|
||||
}
|
||||
|
||||
disassociateGroup(id, group) {
|
||||
return this.http.post(`${this.baseUrl}${id}/groups/`, {
|
||||
id: group.id,
|
||||
disassociate: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default Hosts;
|
||||
|
||||
@@ -16,6 +16,7 @@ class JobTemplates extends SchedulesMixin(
|
||||
this.disassociateLabel = this.disassociateLabel.bind(this);
|
||||
this.readCredentials = this.readCredentials.bind(this);
|
||||
this.readAccessList = this.readAccessList.bind(this);
|
||||
this.readWebhookKey = this.readWebhookKey.bind(this);
|
||||
}
|
||||
|
||||
launch(id, data) {
|
||||
@@ -75,13 +76,21 @@ class JobTemplates extends SchedulesMixin(
|
||||
return this.http.get(`${this.baseUrl}${id}/survey_spec/`);
|
||||
}
|
||||
|
||||
updateSurvey(id, survey = null) {
|
||||
updateSurvey(id, survey) {
|
||||
return this.http.post(`${this.baseUrl}${id}/survey_spec/`, survey);
|
||||
}
|
||||
|
||||
destroySurvey(id) {
|
||||
return this.http.delete(`${this.baseUrl}${id}/survey_spec/`);
|
||||
}
|
||||
|
||||
readWebhookKey(id) {
|
||||
return this.http.get(`${this.baseUrl}${id}/webhook_key/`);
|
||||
}
|
||||
|
||||
updateWebhookKey(id) {
|
||||
return this.http.post(`${this.baseUrl}${id}/webhook_key/`);
|
||||
}
|
||||
}
|
||||
|
||||
export default JobTemplates;
|
||||
|
||||
@@ -13,6 +13,10 @@ class Schedules extends Base {
|
||||
readCredentials(resourceId, params) {
|
||||
return this.http.get(`${this.baseUrl}${resourceId}/credentials/`, params);
|
||||
}
|
||||
|
||||
readZoneInfo() {
|
||||
return this.http.get(`${this.baseUrl}zoneinfo/`);
|
||||
}
|
||||
}
|
||||
|
||||
export default Schedules;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user