mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 03:24:50 -03:30
Compare commits
521 Commits
12640-Refa
...
avoid_reap
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3f83647600 | ||
|
|
6461ecc762 | ||
|
|
3e6e0463b9 | ||
|
|
ededc61a71 | ||
|
|
3747f5b097 | ||
|
|
790ccd984c | ||
|
|
5d0849d746 | ||
|
|
7f1750324f | ||
|
|
a63067da38 | ||
|
|
7a45048463 | ||
|
|
97a5e87448 | ||
|
|
11475590e7 | ||
|
|
7e88a735ad | ||
|
|
2f3e65d4ef | ||
|
|
cc18c1220a | ||
|
|
d2aa1b94e3 | ||
|
|
a97c1b46c0 | ||
|
|
6a3282a689 | ||
|
|
be27d89895 | ||
|
|
160508c907 | ||
|
|
5a3900a927 | ||
|
|
f2bfaf7aca | ||
|
|
d1cf7245f7 | ||
|
|
0de7551477 | ||
|
|
ac99708952 | ||
|
|
47b7bbeda7 | ||
|
|
bca0f2dd47 | ||
|
|
3efc7d5bc4 | ||
|
|
4b9ca3deee | ||
|
|
f622d3a1e6 | ||
|
|
ede1b9af92 | ||
|
|
2becc5dda9 | ||
|
|
7aad16964c | ||
|
|
b1af27c4f6 | ||
|
|
7cb16ef91d | ||
|
|
9358d59f20 | ||
|
|
9e037f1a02 | ||
|
|
266ebe5501 | ||
|
|
ce5270434c | ||
|
|
34834252ff | ||
|
|
861ba8a727 | ||
|
|
02e5ba5f94 | ||
|
|
81ba6c0234 | ||
|
|
5c47c24e28 | ||
|
|
752289e175 | ||
|
|
a24aaba6bc | ||
|
|
349785550c | ||
|
|
ab6511a833 | ||
|
|
a7b4c03188 | ||
|
|
a5f9506f49 | ||
|
|
8e6f4fae80 | ||
|
|
a952ab0a75 | ||
|
|
7cca6c4cd9 | ||
|
|
3945db60eb | ||
|
|
252b0dda9f | ||
|
|
0a2f1622f6 | ||
|
|
00817d6b89 | ||
|
|
06808ef4c4 | ||
|
|
3aba5b5a04 | ||
|
|
5c19efdc32 | ||
|
|
f0c967c1b2 | ||
|
|
2ca0b7bc01 | ||
|
|
217dc57c24 | ||
|
|
1411d11a0e | ||
|
|
2fe1ea94bd | ||
|
|
a47cfc55ab | ||
|
|
0eb9de02f3 | ||
|
|
39ee4285ce | ||
|
|
2dcda04a9e | ||
|
|
52d46c88e4 | ||
|
|
c2df22e0f0 | ||
|
|
90f54b98cd | ||
|
|
b143df3183 | ||
|
|
6fa22f5be2 | ||
|
|
d5de1f9d11 | ||
|
|
7cca39d069 | ||
|
|
cf21eab7f4 | ||
|
|
98b2f51c18 | ||
|
|
327352feaf | ||
|
|
ccaace8b30 | ||
|
|
2902b40084 | ||
|
|
9669b9dd2f | ||
|
|
a6a9d3427c | ||
|
|
d27aada817 | ||
|
|
2fca07ee4c | ||
|
|
335ac636b5 | ||
|
|
f4bcc03ac7 | ||
|
|
3051384f95 | ||
|
|
811ecb8673 | ||
|
|
5e28f5dca1 | ||
|
|
d088d36448 | ||
|
|
89e41597a6 | ||
|
|
283adc30a8 | ||
|
|
019e6a52fe | ||
|
|
35e5610642 | ||
|
|
3a303875bb | ||
|
|
4499a50019 | ||
|
|
3fe46e2e27 | ||
|
|
6d3f39fe92 | ||
|
|
a3233b5fdd | ||
|
|
fe3aa6ce2b | ||
|
|
77ec46f6cf | ||
|
|
b5f240ce70 | ||
|
|
fb2647ff7b | ||
|
|
35fbb94aa6 | ||
|
|
f2ab8d637c | ||
|
|
166b586591 | ||
|
|
d1c608a281 | ||
|
|
b4803ca894 | ||
|
|
ce7f597c7e | ||
|
|
23a34c5dc9 | ||
|
|
bef3da6fb2 | ||
|
|
7f50679e68 | ||
|
|
52d071f9d1 | ||
|
|
26a888547d | ||
|
|
05af2972bf | ||
|
|
60458bebfd | ||
|
|
951eee944c | ||
|
|
4630757f5f | ||
|
|
46ea031566 | ||
|
|
0d7bbb4389 | ||
|
|
1dda373aaf | ||
|
|
33c1968210 | ||
|
|
049a158638 | ||
|
|
32f7295f44 | ||
|
|
6772fb876b | ||
|
|
51112b95bc | ||
|
|
6c1d4a5cfd | ||
|
|
2e9106d8ea | ||
|
|
84822784e8 | ||
|
|
0f3adb52b1 | ||
|
|
59da9a29df | ||
|
|
a949ee048a | ||
|
|
b959bc278f | ||
|
|
052644eb9d | ||
|
|
4e18827909 | ||
|
|
59ce8c4148 | ||
|
|
3b9c04bf1e | ||
|
|
f28203913f | ||
|
|
9b2725e5fe | ||
|
|
1af955d28c | ||
|
|
0815f935ca | ||
|
|
6997876da6 | ||
|
|
93d84fe2c9 | ||
|
|
f5785976be | ||
|
|
61c7d4e4ca | ||
|
|
a2f528e6e5 | ||
|
|
058ae132cf | ||
|
|
6483575437 | ||
|
|
a15a23c1d3 | ||
|
|
ffdcb9f4dd | ||
|
|
2d9da11443 | ||
|
|
5ce6c14f74 | ||
|
|
61748c072d | ||
|
|
89dae3865d | ||
|
|
808ab9803e | ||
|
|
d64b6d4dfe | ||
|
|
c9d931ceee | ||
|
|
8fb831d3de | ||
|
|
64865af3bb | ||
|
|
9f63c99bee | ||
|
|
d7025a919c | ||
|
|
dab7d91cff | ||
|
|
61821faa00 | ||
|
|
c26d211ee0 | ||
|
|
f0c91bb1f3 | ||
|
|
b1dceefac3 | ||
|
|
bb65945b4f | ||
|
|
1b8f6630bf | ||
|
|
5157838d83 | ||
|
|
6a79d19668 | ||
|
|
ebabea54e1 | ||
|
|
0eaa7816e9 | ||
|
|
47176cb31b | ||
|
|
5163795cc0 | ||
|
|
b0a4173545 | ||
|
|
eb9431ee1f | ||
|
|
fd6605932a | ||
|
|
ea9c52aca6 | ||
|
|
a7ebce1fef | ||
|
|
5de9cf748d | ||
|
|
ebea78943d | ||
|
|
bb387f939b | ||
|
|
bda806fd03 | ||
|
|
9777ce7fb8 | ||
|
|
1e33bc4020 | ||
|
|
d8e7c59fe8 | ||
|
|
4470b80059 | ||
|
|
e9ad01e806 | ||
|
|
8a4059d266 | ||
|
|
01a7076267 | ||
|
|
32b6aec66b | ||
|
|
884ab424d5 | ||
|
|
7e55305c45 | ||
|
|
7f6f57bfee | ||
|
|
ae92f8292f | ||
|
|
51e244e183 | ||
|
|
ad4e257fdb | ||
|
|
fcf56950b3 | ||
|
|
27ea239c00 | ||
|
|
128a130b84 | ||
|
|
d75f12c001 | ||
|
|
2034eac620 | ||
|
|
e9a1582b70 | ||
|
|
51ef1e808d | ||
|
|
83149519f8 | ||
|
|
11fbfc2063 | ||
|
|
f6395c69dd | ||
|
|
ca07bc85cb | ||
|
|
b87dd6dc56 | ||
|
|
f8d46d5e71 | ||
|
|
ce0a456ecc | ||
|
|
5775ff1422 | ||
|
|
82e8bcd2bb | ||
|
|
d73cc501d5 | ||
|
|
7e40a4daed | ||
|
|
47e824dd11 | ||
|
|
4643b816fe | ||
|
|
79d9329cfa | ||
|
|
6492c03965 | ||
|
|
98107301a5 | ||
|
|
4810099158 | ||
|
|
1aca9929ab | ||
|
|
2aa58bc17d | ||
|
|
be4b826259 | ||
|
|
b99a434dee | ||
|
|
6cee99a9f9 | ||
|
|
ee509aea56 | ||
|
|
b5452a48f8 | ||
|
|
68e555824d | ||
|
|
0c980fa7d5 | ||
|
|
e34ce8c795 | ||
|
|
58bad6cfa9 | ||
|
|
3543644e0e | ||
|
|
36c0d07b30 | ||
|
|
03b0281fde | ||
|
|
6f6f04a071 | ||
|
|
239827a9cf | ||
|
|
ac9871b36f | ||
|
|
f739908ccf | ||
|
|
cf1ec07eab | ||
|
|
d968b648de | ||
|
|
5dd0eab806 | ||
|
|
41f3f381ec | ||
|
|
ac8cff75ce | ||
|
|
94b34b801c | ||
|
|
8f6849fc22 | ||
|
|
821b1701bf | ||
|
|
b7f2825909 | ||
|
|
e87e041a2a | ||
|
|
cc336e791c | ||
|
|
c2a3c3b285 | ||
|
|
7b8dcc98e7 | ||
|
|
d5011492bf | ||
|
|
e363ddf470 | ||
|
|
987709cdb3 | ||
|
|
f04ac3c798 | ||
|
|
71a6baccdb | ||
|
|
d07076b686 | ||
|
|
7129f3e8cd | ||
|
|
df61a5cea1 | ||
|
|
a4b950f79b | ||
|
|
1d87e6e04c | ||
|
|
8be739d255 | ||
|
|
ca54195099 | ||
|
|
f0fcfdde39 | ||
|
|
80b1ba4a35 | ||
|
|
51f8e362dc | ||
|
|
737d6d8c8b | ||
|
|
beaf6b6058 | ||
|
|
aad1fbcef8 | ||
|
|
0b96d617ac | ||
|
|
fe768a159b | ||
|
|
c1ebea858b | ||
|
|
da9b8135e8 | ||
|
|
76cecf3f6b | ||
|
|
7b2938f515 | ||
|
|
916b5642d2 | ||
|
|
e524d3df3e | ||
|
|
01e9a611ea | ||
|
|
5d96ee084d | ||
|
|
e2cee10767 | ||
|
|
ef29589940 | ||
|
|
cec2d2dfb9 | ||
|
|
15b7ad3570 | ||
|
|
36ff9cbc6d | ||
|
|
ed74d80ecb | ||
|
|
31c2e1a450 | ||
|
|
a0b8215c06 | ||
|
|
f88b993b18 | ||
|
|
4a7f4d0ed4 | ||
|
|
6e08c3567f | ||
|
|
adbcb5c5e4 | ||
|
|
8054c6aedc | ||
|
|
58734a33c4 | ||
|
|
2832f28014 | ||
|
|
e5057691ee | ||
|
|
a0cfd8501c | ||
|
|
99b643bd77 | ||
|
|
305b39d8e5 | ||
|
|
bb047baeba | ||
|
|
9637aad37e | ||
|
|
fbc06ec623 | ||
|
|
57430afc55 | ||
|
|
7aae7e8ed4 | ||
|
|
a67d107a58 | ||
|
|
642003e207 | ||
|
|
ec7e2284df | ||
|
|
ff7facdfa2 | ||
|
|
6df4e62132 | ||
|
|
6289bfb639 | ||
|
|
95e4b2064f | ||
|
|
48eba60be4 | ||
|
|
c7efa8b4e0 | ||
|
|
657b5cb1aa | ||
|
|
06daebbecf | ||
|
|
fb37f22bf4 | ||
|
|
71f326b705 | ||
|
|
6508ab4a33 | ||
|
|
bf871bd427 | ||
|
|
e403c603d6 | ||
|
|
4b7b3c7c7d | ||
|
|
1cdd2cad67 | ||
|
|
86856f242a | ||
|
|
65c3db8cb8 | ||
|
|
7fa9dcbc2a | ||
|
|
7cfb957de3 | ||
|
|
d0d467e863 | ||
|
|
eaccf32aa3 | ||
|
|
a8fdb22ab3 | ||
|
|
ae79f94a48 | ||
|
|
40499a4084 | ||
|
|
b36fa93005 | ||
|
|
8839b4e90b | ||
|
|
7866135d6c | ||
|
|
fe48dc412f | ||
|
|
3a25c4221f | ||
|
|
7e1be3ef94 | ||
|
|
b2f8ca09ba | ||
|
|
c7692f5c56 | ||
|
|
3b24afa7f2 | ||
|
|
2b3f3e2043 | ||
|
|
68614b83c0 | ||
|
|
a1edc75c11 | ||
|
|
4b0e7a5cde | ||
|
|
01c6ac1b14 | ||
|
|
f0481d0a60 | ||
|
|
fd2a8b8531 | ||
|
|
239959a4c9 | ||
|
|
84f2b91105 | ||
|
|
9d7b249b20 | ||
|
|
5bd15dd48d | ||
|
|
d03348c6e4 | ||
|
|
5faeff6bec | ||
|
|
b94a126c02 | ||
|
|
eedd146643 | ||
|
|
d30c5ca9cd | ||
|
|
a3b21b261c | ||
|
|
d1d60c9ef1 | ||
|
|
925e055bb3 | ||
|
|
9f40d7a05c | ||
|
|
d34f6af830 | ||
|
|
163ccfd410 | ||
|
|
968c316c0c | ||
|
|
2fdce43f9e | ||
|
|
fa305a7bfa | ||
|
|
0933a96d60 | ||
|
|
8b9db837ca | ||
|
|
1106367962 | ||
|
|
721e19e1c8 | ||
|
|
f9bb26ad33 | ||
|
|
87363af615 | ||
|
|
332c433b6e | ||
|
|
e029cf7196 | ||
|
|
a1d34462b0 | ||
|
|
e4283841d6 | ||
|
|
477a63d1b4 | ||
|
|
4a30cc244f | ||
|
|
271613b86d | ||
|
|
1f939aa25e | ||
|
|
ac57f5cb28 | ||
|
|
86b0a3d4f1 | ||
|
|
b269ed48ee | ||
|
|
fe1b37afaf | ||
|
|
c39172f516 | ||
|
|
87dd8c118d | ||
|
|
d6004fd2d3 | ||
|
|
3d3e4ad150 | ||
|
|
81821fd378 | ||
|
|
9b047c2af6 | ||
|
|
f0d6bc0dc8 | ||
|
|
8e5af2b5f2 | ||
|
|
918db89dc8 | ||
|
|
7590301ae7 | ||
|
|
6e25a552d3 | ||
|
|
0db75fdbfd | ||
|
|
83c48bb5fa | ||
|
|
1c65339a24 | ||
|
|
75e6366c5e | ||
|
|
af6fec5592 | ||
|
|
893dba7076 | ||
|
|
d571b9bbbc | ||
|
|
b28cc34ff3 | ||
|
|
776d39f057 | ||
|
|
61b242d194 | ||
|
|
22b81f5dd3 | ||
|
|
99e1920d42 | ||
|
|
2218fd5c25 | ||
|
|
3c656842f0 | ||
|
|
bd7635e74e | ||
|
|
0faa999ceb | ||
|
|
1bedf32baf | ||
|
|
577f102e53 | ||
|
|
c5cf39abb7 | ||
|
|
6b315f39de | ||
|
|
529a936d0a | ||
|
|
6538d34b48 | ||
|
|
e40824bded | ||
|
|
ed318ea784 | ||
|
|
d2b69e05f6 | ||
|
|
b57ae592ed | ||
|
|
e22f887765 | ||
|
|
fc838ba44b | ||
|
|
b19aa4a88d | ||
|
|
eba24db74c | ||
|
|
153a197fad | ||
|
|
8f4c329c2a | ||
|
|
368eb46f5b | ||
|
|
d6fea77082 | ||
|
|
878035c13b | ||
|
|
2cc971a43f | ||
|
|
9d77c54612 | ||
|
|
ef651a3a21 | ||
|
|
aaf6f5f17e | ||
|
|
3303f7bfcf | ||
|
|
95dba81a9d | ||
|
|
4b308d313a | ||
|
|
d80db763bc | ||
|
|
41fd6ea37f | ||
|
|
4808a0053f | ||
|
|
de41601f27 | ||
|
|
ddd09461fb | ||
|
|
6d192927ae | ||
|
|
487efb77ce | ||
|
|
e655e1dbc2 | ||
|
|
e41f20320a | ||
|
|
192f45bbd0 | ||
|
|
e013d25e2d | ||
|
|
8a6ad47ca5 | ||
|
|
cba780a8f8 | ||
|
|
3fc67dc76c | ||
|
|
6f85aef5fe | ||
|
|
4d9b8400da | ||
|
|
eeb9d61488 | ||
|
|
234ce529fc | ||
|
|
4f36943b47 | ||
|
|
25737ba7c6 | ||
|
|
7127d18072 | ||
|
|
e5c834383c | ||
|
|
b9c9800210 | ||
|
|
c94dc08cf3 | ||
|
|
a0594c8948 | ||
|
|
ab5ea46006 | ||
|
|
6b471e468c | ||
|
|
50614b961e | ||
|
|
a2be320605 | ||
|
|
8a959e9586 | ||
|
|
1db189c7ee | ||
|
|
39c2fcd8c2 | ||
|
|
da857ea334 | ||
|
|
d50c97ae22 | ||
|
|
0f150aa3b3 | ||
|
|
cdb51a75b8 | ||
|
|
22b6ae6903 | ||
|
|
871175f97f | ||
|
|
e6497be200 | ||
|
|
3b9333be9f | ||
|
|
04b814cfd8 | ||
|
|
bb2e5cba0a | ||
|
|
42a4e9f10f | ||
|
|
882d2fdbe8 | ||
|
|
0d69d40859 | ||
|
|
2e38bbcbcd | ||
|
|
6f741b909a | ||
|
|
bbb00e0674 | ||
|
|
560b952dd6 | ||
|
|
62c773e912 | ||
|
|
fd38c926b2 | ||
|
|
7a8874b947 | ||
|
|
150c55c72a | ||
|
|
417ac3b88c | ||
|
|
9e0d1a678c | ||
|
|
1a766c09e7 | ||
|
|
7849c0fb1e | ||
|
|
35a7e43f22 | ||
|
|
47a6a73fc5 | ||
|
|
805091cfc1 | ||
|
|
8d05e339ae | ||
|
|
8472e3a26d | ||
|
|
174121cdbe | ||
|
|
385a2eabce | ||
|
|
a64467c5a6 | ||
|
|
58772d79c7 | ||
|
|
235ed2f0d0 | ||
|
|
03eaeac459 | ||
|
|
aae57378f0 | ||
|
|
a4fba37222 | ||
|
|
3a09522d3e | ||
|
|
b5db710c8b | ||
|
|
b964905c80 | ||
|
|
37717ce3d5 | ||
|
|
e7c75f3510 | ||
|
|
cfce31419d | ||
|
|
8e83f9b134 | ||
|
|
d3eb2c1975 | ||
|
|
5551874352 | ||
|
|
80a0842df1 | ||
|
|
2dd2931ab2 | ||
|
|
e83a4d7234 | ||
|
|
8e2003a36b | ||
|
|
4f52343cd9 |
17
.github/triage_replies.md
vendored
17
.github/triage_replies.md
vendored
@@ -53,6 +53,16 @@ https://github.com/ansible/awx/#get-involved \
|
||||
Thank you once again for this and your interest in AWX!
|
||||
|
||||
|
||||
### Red Hat Support Team
|
||||
- Hi! \
|
||||
\
|
||||
It appears that you are using an RPM build for RHEL. Please reach out to the Red Hat support team and submit a ticket. \
|
||||
\
|
||||
Here is the link to do so: \
|
||||
\
|
||||
https://access.redhat.com/support \
|
||||
\
|
||||
Thank you for your submission and for supporting AWX!
|
||||
|
||||
|
||||
## Common
|
||||
@@ -96,6 +106,13 @@ The Ansible Community is looking at building an EE that corresponds to all of th
|
||||
### Oracle AWX
|
||||
We'd be happy to help if you can reproduce this with AWX since we do not have Oracle's Linux Automation Manager. If you need help with this specific version of Oracles Linux Automation Manager you will need to contact your Oracle for support.
|
||||
|
||||
### Community Resolved
|
||||
Hi,
|
||||
|
||||
We are happy to see that it appears a fix has been provided for your issue, so we will go ahead and close this ticket. Please feel free to reopen if any other problems arise.
|
||||
|
||||
<name of community member who helped> thanks so much for taking the time to write a thoughtful and helpful response to this issue!
|
||||
|
||||
### AWX Release
|
||||
Subject: Announcing AWX Xa.Ya.za and AWX-Operator Xb.Yb.zb
|
||||
|
||||
|
||||
82
.github/workflows/ci.yml
vendored
82
.github/workflows/ci.yml
vendored
@@ -1,7 +1,10 @@
|
||||
---
|
||||
name: CI
|
||||
env:
|
||||
BRANCH: ${{ github.base_ref || 'devel' }}
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DEV_DOCKER_TAG_BASE: ghcr.io/${{ github.repository_owner }}
|
||||
COMPOSE_TAG: ${{ github.base_ref || 'devel' }}
|
||||
on:
|
||||
pull_request:
|
||||
jobs:
|
||||
@@ -17,85 +20,33 @@ jobs:
|
||||
tests:
|
||||
- name: api-test
|
||||
command: /start_tests.sh
|
||||
label: Run API Tests
|
||||
- name: api-lint
|
||||
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
||||
label: Run API Linters
|
||||
- name: api-swagger
|
||||
command: /start_tests.sh swagger
|
||||
label: Generate API Reference
|
||||
- name: awx-collection
|
||||
command: /start_tests.sh test_collection_all
|
||||
label: Run Collection Tests
|
||||
- name: api-schema
|
||||
label: Check API Schema
|
||||
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
|
||||
- name: ui-lint
|
||||
label: Run UI Linters
|
||||
command: make ui-lint
|
||||
- name: ui-test-screens
|
||||
label: Run UI Screens Tests
|
||||
command: make ui-test-screens
|
||||
- name: ui-test-general
|
||||
label: Run UI General Tests
|
||||
command: make ui-test-general
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
- name: Run check ${{ matrix.tests.name }}
|
||||
run: AWX_DOCKER_CMD='${{ matrix.tests.command }}' make github_ci_runner
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ env.py_version }}
|
||||
|
||||
- name: Log in to registry
|
||||
run: |
|
||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- name: Pre-pull image to warm build cache
|
||||
run: |
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} || :
|
||||
|
||||
- name: Build image
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
||||
|
||||
- name: ${{ matrix.texts.label }}
|
||||
run: |
|
||||
docker run -u $(id -u) --rm -v ${{ github.workspace}}:/awx_devel/:Z \
|
||||
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} ${{ matrix.tests.command }}
|
||||
dev-env:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ env.py_version }}
|
||||
|
||||
- name: Log in to registry
|
||||
run: |
|
||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- name: Pre-pull image to warm build cache
|
||||
run: |
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} || :
|
||||
|
||||
- name: Build image
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
||||
|
||||
- name: Run smoke test
|
||||
run: |
|
||||
export DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }}
|
||||
export COMPOSE_TAG=${{ env.BRANCH }}
|
||||
ansible-playbook tools/docker-compose/ansible/smoke-test.yml -e repo_dir=$(pwd) -v
|
||||
run: make github_ci_setup && ansible-playbook tools/docker-compose/ansible/smoke-test.yml -v
|
||||
|
||||
awx-operator:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -144,3 +95,22 @@ jobs:
|
||||
env:
|
||||
AWX_TEST_IMAGE: awx
|
||||
AWX_TEST_VERSION: ci
|
||||
|
||||
collection-sanity:
|
||||
name: awx_collection sanity
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
|
||||
- name: Upgrade ansible-core
|
||||
run: python3 -m pip install --upgrade ansible-core
|
||||
|
||||
- name: Run sanity tests
|
||||
run: make test_collection_sanity
|
||||
env:
|
||||
# needed due to cgroupsv2. This is fixed, but a stable release
|
||||
# with the fix has not been made yet.
|
||||
ANSIBLE_TEST_PREFER_PODMAN: 1
|
||||
|
||||
24
.github/workflows/devel_images.yml
vendored
24
.github/workflows/devel_images.yml
vendored
@@ -1,10 +1,13 @@
|
||||
---
|
||||
name: Build/Push Development Images
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- devel
|
||||
- release_*
|
||||
- feature_*
|
||||
jobs:
|
||||
push:
|
||||
if: endsWith(github.repository, '/awx') || startsWith(github.ref, 'refs/heads/release_')
|
||||
@@ -18,6 +21,12 @@ jobs:
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Set lower case owner name
|
||||
run: |
|
||||
echo "OWNER_LC=${OWNER,,}" >>${GITHUB_ENV}
|
||||
env:
|
||||
OWNER: '${{ github.repository_owner }}'
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
@@ -29,15 +38,18 @@ jobs:
|
||||
|
||||
- name: Pre-pull image to warm build cache
|
||||
run: |
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_kube_devel:${GITHUB_REF##*/} || :
|
||||
docker pull ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/} || :
|
||||
docker pull ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/} || :
|
||||
docker pull ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/} || :
|
||||
|
||||
- name: Build images
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-dev-build
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-dev-build
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-build
|
||||
|
||||
- name: Push image
|
||||
run: |
|
||||
docker push ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/}
|
||||
docker push ghcr.io/${{ github.repository_owner }}/awx_kube_devel:${GITHUB_REF##*/}
|
||||
docker push ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/}
|
||||
docker push ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/}
|
||||
docker push ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/}
|
||||
|
||||
7
.github/workflows/e2e_test.yml
vendored
7
.github/workflows/e2e_test.yml
vendored
@@ -1,9 +1,12 @@
|
||||
---
|
||||
name: E2E Tests
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled]
|
||||
jobs:
|
||||
jobs:
|
||||
e2e-test:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
|
||||
runs-on: ubuntu-latest
|
||||
@@ -104,5 +107,3 @@ jobs:
|
||||
with:
|
||||
name: AWX-logs-${{ matrix.job }}
|
||||
path: make-docker-compose-output.log
|
||||
|
||||
|
||||
|
||||
26
.github/workflows/feature_branch_deletion.yml
vendored
Normal file
26
.github/workflows/feature_branch_deletion.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
---
|
||||
name: Feature branch deletion cleanup
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
on:
|
||||
delete:
|
||||
branches:
|
||||
- feature_**
|
||||
jobs:
|
||||
push:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Delete API Schema
|
||||
env:
|
||||
AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
|
||||
AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
|
||||
AWS_REGION: 'us-east-1'
|
||||
run: |
|
||||
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||
ansible localhost -c local -m aws_s3 \
|
||||
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delete permission=public-read"
|
||||
|
||||
|
||||
18
.github/workflows/pr_body_check.yml
vendored
18
.github/workflows/pr_body_check.yml
vendored
@@ -13,21 +13,13 @@ jobs:
|
||||
packages: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Write PR body to a file
|
||||
run: |
|
||||
cat >> pr.body << __SOME_RANDOM_PR_EOF__
|
||||
${{ github.event.pull_request.body }}
|
||||
__SOME_RANDOM_PR_EOF__
|
||||
|
||||
- name: Display the received body for troubleshooting
|
||||
run: cat pr.body
|
||||
|
||||
# We want to write these out individually just incase the options were joined on a single line
|
||||
- name: Check for each of the lines
|
||||
env:
|
||||
PR_BODY: ${{ github.event.pull_request.body }}
|
||||
run: |
|
||||
grep "Bug, Docs Fix or other nominal change" pr.body > Z
|
||||
grep "New or Enhanced Feature" pr.body > Y
|
||||
grep "Breaking Change" pr.body > X
|
||||
echo "$PR_BODY" | grep "Bug, Docs Fix or other nominal change" > Z
|
||||
echo "$PR_BODY" | grep "New or Enhanced Feature" > Y
|
||||
echo "$PR_BODY" | grep "Breaking Change" > X
|
||||
exit 0
|
||||
# We exit 0 and set the shell to prevent the returns from the greps from failing this step
|
||||
# See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#exit-codes-and-error-action-preference
|
||||
|
||||
20
.github/workflows/promote.yml
vendored
20
.github/workflows/promote.yml
vendored
@@ -1,11 +1,16 @@
|
||||
---
|
||||
name: Promote Release
|
||||
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
promote:
|
||||
if: endsWith(github.repository, '/awx')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout awx
|
||||
@@ -34,9 +39,13 @@ jobs:
|
||||
- name: Build collection and publish to galaxy
|
||||
run: |
|
||||
COLLECTION_TEMPLATE_VERSION=true COLLECTION_NAMESPACE=${{ env.collection_namespace }} make build_collection
|
||||
ansible-galaxy collection publish \
|
||||
--token=${{ secrets.GALAXY_TOKEN }} \
|
||||
awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz
|
||||
if [ "$(curl --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz | tail -1)" == "302" ] ; then \
|
||||
echo "Galaxy release already done"; \
|
||||
else \
|
||||
ansible-galaxy collection publish \
|
||||
--token=${{ secrets.GALAXY_TOKEN }} \
|
||||
awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz; \
|
||||
fi
|
||||
|
||||
- name: Set official pypi info
|
||||
run: echo pypi_repo=pypi >> $GITHUB_ENV
|
||||
@@ -48,6 +57,7 @@ jobs:
|
||||
|
||||
- name: Build awxkit and upload to pypi
|
||||
run: |
|
||||
git reset --hard
|
||||
cd awxkit && python3 setup.py bdist_wheel
|
||||
twine upload \
|
||||
-r ${{ env.pypi_repo }} \
|
||||
@@ -70,4 +80,6 @@ jobs:
|
||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
|
||||
docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker push quay.io/${{ github.repository }}:latest
|
||||
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker push quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
|
||||
20
.github/workflows/stage.yml
vendored
20
.github/workflows/stage.yml
vendored
@@ -1,5 +1,9 @@
|
||||
---
|
||||
name: Stage Release
|
||||
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
@@ -17,6 +21,7 @@ on:
|
||||
|
||||
jobs:
|
||||
stage:
|
||||
if: endsWith(github.repository, '/awx')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
@@ -80,6 +85,20 @@ jobs:
|
||||
-e push=yes \
|
||||
-e awx_official=yes
|
||||
|
||||
- name: Log in to GHCR
|
||||
run: |
|
||||
echo ${{ secrets.GITHUB_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- name: Log in to Quay
|
||||
run: |
|
||||
echo ${{ secrets.QUAY_TOKEN }} | docker login quay.io -u ${{ secrets.QUAY_USER }} --password-stdin
|
||||
|
||||
- name: tag awx-ee:latest with version input
|
||||
run: |
|
||||
docker pull quay.io/ansible/awx-ee:latest
|
||||
docker tag quay.io/ansible/awx-ee:latest ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
docker push ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
|
||||
- name: Build and stage awx-operator
|
||||
working-directory: awx-operator
|
||||
run: |
|
||||
@@ -99,6 +118,7 @@ jobs:
|
||||
env:
|
||||
AWX_TEST_IMAGE: ${{ github.repository }}
|
||||
AWX_TEST_VERSION: ${{ github.event.inputs.version }}
|
||||
AWX_EE_TEST_IMAGE: ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
|
||||
- name: Create draft release for AWX
|
||||
working-directory: awx
|
||||
|
||||
5
.github/workflows/upload_schema.yml
vendored
5
.github/workflows/upload_schema.yml
vendored
@@ -1,10 +1,15 @@
|
||||
---
|
||||
name: Upload API Schema
|
||||
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- devel
|
||||
- release_**
|
||||
- feature_**
|
||||
jobs:
|
||||
push:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -12,7 +12,7 @@ recursive-include awx/plugins *.ps1
|
||||
recursive-include requirements *.txt
|
||||
recursive-include requirements *.yml
|
||||
recursive-include config *
|
||||
recursive-include docs/licenses *
|
||||
recursive-include licenses *
|
||||
recursive-exclude awx devonly.py*
|
||||
recursive-exclude awx/api/tests *
|
||||
recursive-exclude awx/main/tests *
|
||||
|
||||
122
Makefile
122
Makefile
@@ -1,4 +1,5 @@
|
||||
PYTHON ?= python3.9
|
||||
DOCKER_COMPOSE ?= docker-compose
|
||||
OFFICIAL ?= no
|
||||
NODE ?= node
|
||||
NPM_BIN ?= npm
|
||||
@@ -6,7 +7,20 @@ CHROMIUM_BIN=/tmp/chrome-linux/chrome
|
||||
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
MANAGEMENT_COMMAND ?= awx-manage
|
||||
VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py)
|
||||
COLLECTION_VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
|
||||
|
||||
# ansible-test requires semver compatable version, so we allow overrides to hack it
|
||||
COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
|
||||
# args for the ansible-test sanity command
|
||||
COLLECTION_SANITY_ARGS ?= --docker
|
||||
# collection unit testing directories
|
||||
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
||||
# collection integration test directories (defaults to all)
|
||||
COLLECTION_TEST_TARGET ?=
|
||||
# args for collection install
|
||||
COLLECTION_PACKAGE ?= awx
|
||||
COLLECTION_NAMESPACE ?= awx
|
||||
COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
|
||||
COLLECTION_TEMPLATE_VERSION ?= false
|
||||
|
||||
# NOTE: This defaults the container image version to the branch that's active
|
||||
COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
@@ -34,7 +48,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==58.2.0 setuptools_scm[toml]==6.4.2 wheel==0.36.2
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==7.0.5 wheel==0.38.4
|
||||
|
||||
NAME ?= awx
|
||||
|
||||
@@ -52,7 +66,7 @@ I18N_FLAG_FILE = .i18n_built
|
||||
sdist \
|
||||
ui-release ui-devel \
|
||||
VERSION PYTHON_VERSION docker-compose-sources \
|
||||
.git/hooks/pre-commit
|
||||
.git/hooks/pre-commit github_ci_setup github_ci_runner
|
||||
|
||||
clean-tmp:
|
||||
rm -rf tmp/
|
||||
@@ -85,6 +99,7 @@ clean: clean-ui clean-api clean-awxkit clean-dist
|
||||
|
||||
clean-api:
|
||||
rm -rf build $(NAME)-$(VERSION) *.egg-info
|
||||
rm -rf .tox
|
||||
find . -type f -regex ".*\.py[co]$$" -delete
|
||||
find . -type d -name "__pycache__" -delete
|
||||
rm -f awx/awx_test.sqlite3*
|
||||
@@ -117,7 +132,7 @@ virtualenv_awx:
|
||||
fi; \
|
||||
fi
|
||||
|
||||
## Install third-party requirements needed for AWX's environment.
|
||||
## Install third-party requirements needed for AWX's environment.
|
||||
# this does not use system site packages intentionally
|
||||
requirements_awx: virtualenv_awx
|
||||
if [[ "$(PIP_OPTIONS)" == *"--no-index"* ]]; then \
|
||||
@@ -181,7 +196,7 @@ collectstatic:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
mkdir -p awx/public/static && $(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||
|
||||
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
||||
|
||||
@@ -189,19 +204,7 @@ uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
uwsgi -b 32768 \
|
||||
--socket 127.0.0.1:8050 \
|
||||
--module=awx.wsgi:application \
|
||||
--home=/var/lib/awx/venv/awx \
|
||||
--chdir=/awx_devel/ \
|
||||
--vacuum \
|
||||
--processes=5 \
|
||||
--harakiri=120 --master \
|
||||
--no-orphans \
|
||||
--max-requests=1000 \
|
||||
--stats /tmp/stats.socket \
|
||||
--lazy-apps \
|
||||
--logformat "%(addr) %(method) %(uri) - %(proto) %(status)"
|
||||
uwsgi /etc/tower/uwsgi.ini
|
||||
|
||||
awx-autoreload:
|
||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
|
||||
@@ -287,19 +290,28 @@ test:
|
||||
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
|
||||
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
|
||||
|
||||
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
||||
COLLECTION_TEST_TARGET ?=
|
||||
COLLECTION_PACKAGE ?= awx
|
||||
COLLECTION_NAMESPACE ?= awx
|
||||
COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
|
||||
COLLECTION_TEMPLATE_VERSION ?= false
|
||||
## Login to Github container image registry, pull image, then build image.
|
||||
github_ci_setup:
|
||||
# GITHUB_ACTOR is automatic github actions env var
|
||||
# CI_GITHUB_TOKEN is defined in .github files
|
||||
echo $(CI_GITHUB_TOKEN) | docker login ghcr.io -u $(GITHUB_ACTOR) --password-stdin
|
||||
docker pull $(DEVEL_IMAGE_NAME) || : # Pre-pull image to warm build cache
|
||||
make docker-compose-build
|
||||
|
||||
## Runs AWX_DOCKER_CMD inside a new docker container.
|
||||
docker-runner:
|
||||
docker run -u $(shell id -u) --rm -v $(shell pwd):/awx_devel/:Z --workdir=/awx_devel $(DEVEL_IMAGE_NAME) $(AWX_DOCKER_CMD)
|
||||
|
||||
## Builds image and runs AWX_DOCKER_CMD in it, mainly for .github checks.
|
||||
github_ci_runner: github_ci_setup docker-runner
|
||||
|
||||
test_collection:
|
||||
rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi && \
|
||||
pip install ansible-core && \
|
||||
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi
|
||||
ansible --version
|
||||
py.test $(COLLECTION_TEST_DIRS) -v
|
||||
# The python path needs to be modified so that the tests can find Ansible within the container
|
||||
# First we will use anything expility set as PYTHONPATH
|
||||
@@ -329,8 +341,13 @@ install_collection: build_collection
|
||||
rm -rf $(COLLECTION_INSTALL)
|
||||
ansible-galaxy collection install awx_collection_build/$(COLLECTION_NAMESPACE)-$(COLLECTION_PACKAGE)-$(COLLECTION_VERSION).tar.gz
|
||||
|
||||
test_collection_sanity: install_collection
|
||||
cd $(COLLECTION_INSTALL) && ansible-test sanity
|
||||
test_collection_sanity:
|
||||
rm -rf awx_collection_build/
|
||||
rm -rf $(COLLECTION_INSTALL)
|
||||
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi
|
||||
ansible --version
|
||||
COLLECTION_VERSION=1.0.0 make install_collection
|
||||
cd $(COLLECTION_INSTALL) && ansible-test sanity $(COLLECTION_SANITY_ARGS)
|
||||
|
||||
test_collection_integration: install_collection
|
||||
cd $(COLLECTION_INSTALL) && ansible-test integration $(COLLECTION_TEST_TARGET)
|
||||
@@ -377,6 +394,8 @@ clean-ui:
|
||||
rm -rf awx/ui/build
|
||||
rm -rf awx/ui/src/locales/_build
|
||||
rm -rf $(UI_BUILD_FLAG_FILE)
|
||||
# the collectstatic command doesn't like it if this dir doesn't exist.
|
||||
mkdir -p awx/ui/build/static
|
||||
|
||||
awx/ui/node_modules:
|
||||
NODE_OPTIONS=--max-old-space-size=6144 $(NPM_BIN) --prefix awx/ui --loglevel warn --force ci
|
||||
@@ -386,20 +405,20 @@ $(UI_BUILD_FLAG_FILE):
|
||||
$(PYTHON) tools/scripts/compilemessages.py
|
||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
|
||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
|
||||
mkdir -p awx/public/static/css
|
||||
mkdir -p awx/public/static/js
|
||||
mkdir -p awx/public/static/media
|
||||
cp -r awx/ui/build/static/css/* awx/public/static/css
|
||||
cp -r awx/ui/build/static/js/* awx/public/static/js
|
||||
cp -r awx/ui/build/static/media/* awx/public/static/media
|
||||
touch $@
|
||||
|
||||
|
||||
|
||||
ui-release: $(UI_BUILD_FLAG_FILE)
|
||||
|
||||
ui-devel: awx/ui/node_modules
|
||||
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
|
||||
@if [ -d "/var/lib/awx" ] ; then \
|
||||
mkdir -p /var/lib/awx/public/static/css; \
|
||||
mkdir -p /var/lib/awx/public/static/js; \
|
||||
mkdir -p /var/lib/awx/public/static/media; \
|
||||
cp -r awx/ui/build/static/css/* /var/lib/awx/public/static/css; \
|
||||
cp -r awx/ui/build/static/js/* /var/lib/awx/public/static/js; \
|
||||
cp -r awx/ui/build/static/media/* /var/lib/awx/public/static/media; \
|
||||
fi
|
||||
|
||||
ui-devel-instrumented: awx/ui/node_modules
|
||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run start-instrumented
|
||||
@@ -451,8 +470,9 @@ awx/projects:
|
||||
COMPOSE_UP_OPTS ?=
|
||||
COMPOSE_OPTS ?=
|
||||
CONTROL_PLANE_NODE_COUNT ?= 1
|
||||
EXECUTION_NODE_COUNT ?= 2
|
||||
EXECUTION_NODE_COUNT ?= 0
|
||||
MINIKUBE_CONTAINER_GROUP ?= false
|
||||
MINIKUBE_SETUP ?= false # if false, run minikube separately
|
||||
EXTRA_SOURCES_ANSIBLE_OPTS ?=
|
||||
|
||||
ifneq ($(ADMIN_PASSWORD),)
|
||||
@@ -461,7 +481,7 @@ endif
|
||||
|
||||
docker-compose-sources: .git/hooks/pre-commit
|
||||
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose-minikube/deploy.yml; \
|
||||
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||
fi;
|
||||
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||
@@ -480,20 +500,20 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||
|
||||
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||
|
||||
docker-compose-test: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash
|
||||
|
||||
docker-compose-runtest: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
|
||||
|
||||
docker-compose-build-swagger: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
||||
|
||||
SCHEMA_DIFF_BASE_BRANCH ?= devel
|
||||
detect-schema-change: genschema
|
||||
@@ -502,7 +522,7 @@ detect-schema-change: genschema
|
||||
diff -u -b reference-schema.json schema.json
|
||||
|
||||
docker-compose-clean: awx/projects
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||
|
||||
docker-compose-container-group-clean:
|
||||
@if [ -f "tools/docker-compose-minikube/_sources/minikube" ]; then \
|
||||
@@ -518,10 +538,8 @@ docker-compose-build:
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||
|
||||
docker-clean:
|
||||
$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||
if [ "$(shell docker images | grep awx_devel)" ]; then \
|
||||
docker images | grep awx_devel | awk '{print $$3}' | xargs docker rmi --force; \
|
||||
fi
|
||||
-$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||
-$(foreach image_id,$(shell docker images --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
|
||||
|
||||
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
||||
docker volume rm -f tools_awx_db tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
|
||||
@@ -530,10 +548,10 @@ docker-refresh: docker-clean docker-compose
|
||||
|
||||
## Docker Development Environment with Elastic Stack Connected
|
||||
docker-compose-elk: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
docker-compose-cluster-elk: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
docker-compose-container-group:
|
||||
MINIKUBE_CONTAINER_GROUP=true make docker-compose
|
||||
@@ -555,6 +573,7 @@ VERSION:
|
||||
PYTHON_VERSION:
|
||||
@echo "$(PYTHON)" | sed 's:python::'
|
||||
|
||||
.PHONY: Dockerfile
|
||||
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml -e receptor_image=$(RECEPTOR_IMAGE)
|
||||
|
||||
@@ -591,13 +610,12 @@ pot: $(UI_BUILD_FLAG_FILE)
|
||||
po: $(UI_BUILD_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean
|
||||
|
||||
LANG = "en_us"
|
||||
## generate API django .pot .po
|
||||
messages:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py makemessages -l $(LANG) --keep-pot
|
||||
$(PYTHON) manage.py makemessages -l en_us --keep-pot
|
||||
|
||||
print-%:
|
||||
@echo $($*)
|
||||
@@ -635,4 +653,4 @@ help/generate:
|
||||
} \
|
||||
} \
|
||||
{ lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u
|
||||
@printf "\n"
|
||||
@printf "\n"
|
||||
|
||||
@@ -67,7 +67,6 @@ else:
|
||||
from django.db import connection
|
||||
|
||||
if HAS_DJANGO is True:
|
||||
|
||||
# See upgrade blocker note in requirements/README.md
|
||||
try:
|
||||
names_digest('foo', 'bar', 'baz', length=8)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
@@ -9,6 +8,7 @@ from rest_framework import serializers
|
||||
from awx.conf import fields, register, register_validate
|
||||
from awx.api.fields import OAuth2ProviderField
|
||||
from oauth2_provider.settings import oauth2_settings
|
||||
from awx.sso.common import is_remote_auth_enabled
|
||||
|
||||
|
||||
register(
|
||||
@@ -96,22 +96,20 @@ register(
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
register(
|
||||
'ALLOW_METRICS_FOR_ANONYMOUS_USERS',
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Allow anonymous users to poll metrics'),
|
||||
help_text=_('If true, anonymous users are allowed to poll metrics.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
|
||||
def authentication_validate(serializer, attrs):
|
||||
remote_auth_settings = [
|
||||
'AUTH_LDAP_SERVER_URI',
|
||||
'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_ORG_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_TEAM_KEY',
|
||||
'SOCIAL_AUTH_SAML_ENABLED_IDPS',
|
||||
'RADIUS_SERVER',
|
||||
'TACACSPLUS_HOST',
|
||||
]
|
||||
if attrs.get('DISABLE_LOCAL_AUTH', False):
|
||||
if not any(getattr(settings, s, None) for s in remote_auth_settings):
|
||||
raise serializers.ValidationError(_("There are no remote authentication systems configured."))
|
||||
if attrs.get('DISABLE_LOCAL_AUTH', False) and not is_remote_auth_enabled():
|
||||
raise serializers.ValidationError(_("There are no remote authentication systems configured."))
|
||||
return attrs
|
||||
|
||||
|
||||
|
||||
@@ -80,7 +80,6 @@ class VerbatimField(serializers.Field):
|
||||
|
||||
|
||||
class OAuth2ProviderField(fields.DictField):
|
||||
|
||||
default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')}
|
||||
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
|
||||
child = fields.IntegerField(min_value=1)
|
||||
|
||||
@@ -155,12 +155,11 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
'search',
|
||||
)
|
||||
|
||||
# A list of fields that we know can be filtered on without the possiblity
|
||||
# A list of fields that we know can be filtered on without the possibility
|
||||
# of introducing duplicates
|
||||
NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField, TextField)
|
||||
|
||||
def get_fields_from_lookup(self, model, lookup):
|
||||
|
||||
if '__' in lookup and lookup.rsplit('__', 1)[-1] in self.SUPPORTED_LOOKUPS:
|
||||
path, suffix = lookup.rsplit('__', 1)
|
||||
else:
|
||||
@@ -269,7 +268,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
continue
|
||||
|
||||
# HACK: make `created` available via API for the Django User ORM model
|
||||
# so it keep compatiblity with other objects which exposes the `created` attr.
|
||||
# so it keep compatibility with other objects which exposes the `created` attr.
|
||||
if queryset.model._meta.object_name == 'User' and key.startswith('created'):
|
||||
key = key.replace('created', 'date_joined')
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ import inspect
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
import urllib.parse
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -14,7 +13,7 @@ from django.contrib.auth import views as auth_views
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import FieldDoesNotExist
|
||||
from django.db import connection
|
||||
from django.db import connection, transaction
|
||||
from django.db.models.fields.related import OneToOneRel
|
||||
from django.http import QueryDict
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -29,8 +28,8 @@ from rest_framework import generics
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework import views
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.renderers import StaticHTMLRenderer, JSONRenderer
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.renderers import StaticHTMLRenderer
|
||||
from rest_framework.negotiation import DefaultContentNegotiation
|
||||
|
||||
# AWX
|
||||
@@ -41,7 +40,7 @@ from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd,
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.utils.licensing import server_product_name
|
||||
from awx.main.views import ApiErrorView
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||
from awx.api.versioning import URLPathVersioning
|
||||
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
|
||||
from awx.conf import settings_registry
|
||||
@@ -65,6 +64,7 @@ __all__ = [
|
||||
'ParentMixin',
|
||||
'SubListAttachDetachAPIView',
|
||||
'CopyAPIView',
|
||||
'GenericCancelView',
|
||||
'BaseUsersList',
|
||||
]
|
||||
|
||||
@@ -90,13 +90,9 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||
current_user = getattr(request, 'user', None)
|
||||
if request.user.is_authenticated:
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
||||
ret.set_cookie('userLoggedIn', 'true')
|
||||
current_user = UserSerializer(self.request.user)
|
||||
current_user = smart_str(JSONRenderer().render(current_user.data))
|
||||
current_user = urllib.parse.quote('%s' % current_user, '')
|
||||
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||
|
||||
return ret
|
||||
@@ -139,7 +135,6 @@ def get_default_schema():
|
||||
|
||||
|
||||
class APIView(views.APIView):
|
||||
|
||||
schema = get_default_schema()
|
||||
versioning_class = URLPathVersioning
|
||||
|
||||
@@ -253,7 +248,7 @@ class APIView(views.APIView):
|
||||
response['X-API-Query-Time'] = '%0.3fs' % sum(q_times)
|
||||
|
||||
if getattr(self, 'deprecated', False):
|
||||
response['Warning'] = '299 awx "This resource has been deprecated and will be removed in a future release."' # noqa
|
||||
response['Warning'] = '299 awx "This resource has been deprecated and will be removed in a future release."'
|
||||
|
||||
return response
|
||||
|
||||
@@ -679,7 +674,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
location = None
|
||||
created = True
|
||||
|
||||
# Retrive the sub object (whether created or by ID).
|
||||
# Retrieve the sub object (whether created or by ID).
|
||||
sub = get_object_or_400(self.model, pk=sub_id)
|
||||
|
||||
# Verify we have permission to attach.
|
||||
@@ -804,7 +799,6 @@ class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
|
||||
|
||||
|
||||
class ResourceAccessList(ParentMixin, ListAPIView):
|
||||
|
||||
serializer_class = ResourceAccessListElementSerializer
|
||||
ordering = ('username',)
|
||||
|
||||
@@ -827,9 +821,8 @@ def trigger_delayed_deep_copy(*args, **kwargs):
|
||||
|
||||
|
||||
class CopyAPIView(GenericAPIView):
|
||||
|
||||
serializer_class = CopySerializer
|
||||
permission_classes = (AllowAny,)
|
||||
permission_classes = (IsAuthenticated,)
|
||||
copy_return_serializer_class = None
|
||||
new_in_330 = True
|
||||
new_in_api_v2 = True
|
||||
@@ -990,6 +983,23 @@ class CopyAPIView(GenericAPIView):
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
|
||||
class GenericCancelView(RetrieveAPIView):
|
||||
# In subclass set model, serializer_class
|
||||
obj_permission_type = 'cancel'
|
||||
|
||||
@transaction.non_atomic_requests
|
||||
def dispatch(self, *args, **kwargs):
|
||||
return super(GenericCancelView, self).dispatch(*args, **kwargs)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
|
||||
|
||||
class BaseUsersList(SubListCreateAttachDetachAPIView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(BaseUsersList, self).post(request, *args, **kwargs)
|
||||
|
||||
@@ -128,7 +128,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
# Special handling of notification configuration where the required properties
|
||||
# are conditional on the type selected.
|
||||
if field.field_name == 'notification_configuration':
|
||||
for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
|
||||
for notification_type_name, notification_tr_name, notification_type_class in NotificationTemplate.NOTIFICATION_TYPES:
|
||||
field_info[notification_type_name] = notification_type_class.init_parameters
|
||||
|
||||
# Special handling of notification messages where the required properties
|
||||
@@ -138,7 +138,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
except (AttributeError, KeyError):
|
||||
view_model = None
|
||||
if view_model == NotificationTemplate and field.field_name == 'messages':
|
||||
for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
|
||||
for notification_type_name, notification_tr_name, notification_type_class in NotificationTemplate.NOTIFICATION_TYPES:
|
||||
field_info[notification_type_name] = notification_type_class.default_messages
|
||||
|
||||
# Update type of fields returned...
|
||||
|
||||
@@ -24,7 +24,6 @@ class DisabledPaginator(DjangoPaginator):
|
||||
|
||||
|
||||
class Pagination(pagination.PageNumberPagination):
|
||||
|
||||
page_size_query_param = 'page_size'
|
||||
max_page_size = settings.MAX_PAGE_SIZE
|
||||
count_disabled = False
|
||||
|
||||
@@ -24,7 +24,6 @@ __all__ = [
|
||||
'InventoryInventorySourcesUpdatePermission',
|
||||
'UserPermission',
|
||||
'IsSystemAdminOrAuditor',
|
||||
'InstanceGroupTowerPermission',
|
||||
'WorkflowApprovalPermission',
|
||||
]
|
||||
|
||||
|
||||
@@ -22,7 +22,6 @@ class SurrogateEncoder(encoders.JSONEncoder):
|
||||
|
||||
|
||||
class DefaultJSONRenderer(renderers.JSONRenderer):
|
||||
|
||||
encoder_class = SurrogateEncoder
|
||||
|
||||
|
||||
@@ -61,7 +60,7 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
delattr(renderer_context['view'], '_request')
|
||||
|
||||
def get_raw_data_form(self, data, view, method, request):
|
||||
# Set a flag on the view to indiciate to the view/serializer that we're
|
||||
# Set a flag on the view to indicate to the view/serializer that we're
|
||||
# creating a raw data form for the browsable API. Store the original
|
||||
# request method to determine how to populate the raw data form.
|
||||
if request.method in {'OPTIONS', 'DELETE'}:
|
||||
@@ -95,7 +94,6 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
|
||||
|
||||
class PlainTextRenderer(renderers.BaseRenderer):
|
||||
|
||||
media_type = 'text/plain'
|
||||
format = 'txt'
|
||||
|
||||
@@ -106,18 +104,15 @@ class PlainTextRenderer(renderers.BaseRenderer):
|
||||
|
||||
|
||||
class DownloadTextRenderer(PlainTextRenderer):
|
||||
|
||||
format = "txt_download"
|
||||
|
||||
|
||||
class AnsiTextRenderer(PlainTextRenderer):
|
||||
|
||||
media_type = 'text/plain'
|
||||
format = 'ansi'
|
||||
|
||||
|
||||
class AnsiDownloadRenderer(PlainTextRenderer):
|
||||
|
||||
format = "ansi_download"
|
||||
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ import logging
|
||||
import re
|
||||
from collections import OrderedDict
|
||||
from datetime import timedelta
|
||||
from uuid import uuid4
|
||||
|
||||
# OAuth2
|
||||
from oauthlib import oauth2
|
||||
@@ -29,6 +30,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.encoding import force_str
|
||||
from django.utils.text import capfirst
|
||||
from django.utils.timezone import now
|
||||
from django.core.validators import RegexValidator, MaxLengthValidator
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import ValidationError, PermissionDenied
|
||||
@@ -107,19 +109,26 @@ from awx.main.utils import (
|
||||
extract_ansible_vars,
|
||||
encrypt_dict,
|
||||
prefetch_page_capabilities,
|
||||
get_external_account,
|
||||
truncate_stdout,
|
||||
get_licenser,
|
||||
)
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
from awx.main.utils.named_url_graph import reset_counters
|
||||
from awx.main.scheduler.task_manager_models import TaskManagerInstanceGroups, TaskManagerInstances
|
||||
from awx.main.scheduler.task_manager_models import TaskManagerModels
|
||||
from awx.main.redact import UriCleaner, REPLACE_STR
|
||||
from awx.main.signals import update_inventory_computed_fields
|
||||
|
||||
|
||||
from awx.main.validators import vars_validate_or_raise
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, VerbatimField, DeprecatedCredentialField
|
||||
|
||||
# AWX Utils
|
||||
from awx.api.validators import HostnameRegexValidator
|
||||
|
||||
from awx.sso.common import get_external_account
|
||||
|
||||
logger = logging.getLogger('awx.api.serializers')
|
||||
|
||||
# Fields that should be summarized regardless of object type.
|
||||
@@ -151,7 +160,7 @@ SUMMARIZABLE_FK_FIELDS = {
|
||||
'default_environment': DEFAULT_SUMMARY_FIELDS + ('image',),
|
||||
'execution_environment': DEFAULT_SUMMARY_FIELDS + ('image',),
|
||||
'project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type', 'allow_override'),
|
||||
'source_project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'),
|
||||
'source_project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type', 'allow_override'),
|
||||
'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed'),
|
||||
'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'kubernetes', 'credential_type_id'),
|
||||
'signature_validation_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'credential_type_id'),
|
||||
@@ -196,7 +205,6 @@ def reverse_gfk(content_object, request):
|
||||
|
||||
|
||||
class CopySerializer(serializers.Serializer):
|
||||
|
||||
name = serializers.CharField()
|
||||
|
||||
def validate(self, attrs):
|
||||
@@ -428,7 +436,6 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
||||
continue
|
||||
summary_fields[fk] = OrderedDict()
|
||||
for field in related_fields:
|
||||
|
||||
fval = getattr(fkval, field, None)
|
||||
|
||||
if fval is None and field == 'type':
|
||||
@@ -534,7 +541,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
||||
#
|
||||
# This logic is to force rendering choice's on an uneditable field.
|
||||
# Note: Consider expanding this rendering for more than just choices fields
|
||||
# Note: This logic works in conjuction with
|
||||
# Note: This logic works in conjunction with
|
||||
if hasattr(model_field, 'choices') and model_field.choices:
|
||||
was_editable = model_field.editable
|
||||
model_field.editable = True
|
||||
@@ -926,7 +933,6 @@ class UnifiedJobListSerializer(UnifiedJobSerializer):
|
||||
|
||||
|
||||
class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
|
||||
|
||||
result_stdout = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
@@ -940,7 +946,6 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
|
||||
|
||||
|
||||
class UserSerializer(BaseSerializer):
|
||||
|
||||
password = serializers.CharField(required=False, default='', write_only=True, help_text=_('Write-only field used to change the password.'))
|
||||
ldap_dn = serializers.CharField(source='profile.ldap_dn', read_only=True)
|
||||
external_account = serializers.SerializerMethodField(help_text=_('Set if the account is managed by an external service'))
|
||||
@@ -987,23 +992,8 @@ class UserSerializer(BaseSerializer):
|
||||
def _update_password(self, obj, new_password):
|
||||
# For now we're not raising an error, just not saving password for
|
||||
# users managed by LDAP who already have an unusable password set.
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
|
||||
try:
|
||||
if obj.pk and obj.profile.ldap_dn and not obj.has_usable_password():
|
||||
new_password = None
|
||||
except AttributeError:
|
||||
pass
|
||||
if (
|
||||
getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)
|
||||
) and obj.social_auth.all():
|
||||
new_password = None
|
||||
if (getattr(settings, 'RADIUS_SERVER', None) or getattr(settings, 'TACACSPLUS_HOST', None)) and obj.enterprise_auth.all():
|
||||
new_password = None
|
||||
if new_password:
|
||||
# Get external password will return something like ldap or enterprise or None if the user isn't external. We only want to allow a password update for a None option
|
||||
if new_password and not self.get_external_account(obj):
|
||||
obj.set_password(new_password)
|
||||
obj.save(update_fields=['password'])
|
||||
|
||||
@@ -1100,7 +1090,6 @@ class UserActivityStreamSerializer(UserSerializer):
|
||||
|
||||
|
||||
class BaseOAuth2TokenSerializer(BaseSerializer):
|
||||
|
||||
refresh_token = serializers.SerializerMethodField()
|
||||
token = serializers.SerializerMethodField()
|
||||
ALLOWED_SCOPES = ['read', 'write']
|
||||
@@ -1218,7 +1207,6 @@ class UserPersonalTokenSerializer(BaseOAuth2TokenSerializer):
|
||||
|
||||
|
||||
class OAuth2ApplicationSerializer(BaseSerializer):
|
||||
|
||||
show_capabilities = ['edit', 'delete']
|
||||
|
||||
class Meta:
|
||||
@@ -1453,7 +1441,6 @@ class ExecutionEnvironmentSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
|
||||
|
||||
status = serializers.ChoiceField(choices=Project.PROJECT_STATUS_CHOICES, read_only=True)
|
||||
last_update_failed = serializers.BooleanField(read_only=True)
|
||||
last_updated = serializers.DateTimeField(read_only=True)
|
||||
@@ -1544,7 +1531,6 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
|
||||
|
||||
|
||||
class ProjectPlaybooksSerializer(ProjectSerializer):
|
||||
|
||||
playbooks = serializers.SerializerMethodField(help_text=_('Array of playbooks available within this project.'))
|
||||
|
||||
class Meta:
|
||||
@@ -1562,7 +1548,6 @@ class ProjectPlaybooksSerializer(ProjectSerializer):
|
||||
|
||||
|
||||
class ProjectInventoriesSerializer(ProjectSerializer):
|
||||
|
||||
inventory_files = serializers.ReadOnlyField(help_text=_('Array of inventory files and directories available within this project, ' 'not comprehensive.'))
|
||||
|
||||
class Meta:
|
||||
@@ -1577,7 +1562,6 @@ class ProjectInventoriesSerializer(ProjectSerializer):
|
||||
|
||||
|
||||
class ProjectUpdateViewSerializer(ProjectSerializer):
|
||||
|
||||
can_update = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
@@ -1607,7 +1591,6 @@ class ProjectUpdateSerializer(UnifiedJobSerializer, ProjectOptionsSerializer):
|
||||
|
||||
|
||||
class ProjectUpdateDetailSerializer(ProjectUpdateSerializer):
|
||||
|
||||
playbook_counts = serializers.SerializerMethodField(help_text=_('A count of all plays and tasks for the job run.'))
|
||||
|
||||
class Meta:
|
||||
@@ -1630,7 +1613,6 @@ class ProjectUpdateListSerializer(ProjectUpdateSerializer, UnifiedJobListSeriali
|
||||
|
||||
|
||||
class ProjectUpdateCancelSerializer(ProjectUpdateSerializer):
|
||||
|
||||
can_cancel = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
@@ -1875,7 +1857,7 @@ class HostSerializer(BaseSerializerWithVariables):
|
||||
vars_dict = parse_yaml_or_json(variables)
|
||||
vars_dict['ansible_ssh_port'] = port
|
||||
attrs['variables'] = json.dumps(vars_dict)
|
||||
if Group.objects.filter(name=name, inventory=inventory).exists():
|
||||
if inventory and Group.objects.filter(name=name, inventory=inventory).exists():
|
||||
raise serializers.ValidationError(_('A Group with that name already exists.'))
|
||||
|
||||
return super(HostSerializer, self).validate(attrs)
|
||||
@@ -1967,8 +1949,131 @@ class GroupSerializer(BaseSerializerWithVariables):
|
||||
return ret
|
||||
|
||||
|
||||
class GroupTreeSerializer(GroupSerializer):
|
||||
class BulkHostSerializer(HostSerializer):
|
||||
class Meta:
|
||||
model = Host
|
||||
fields = (
|
||||
'name',
|
||||
'enabled',
|
||||
'instance_id',
|
||||
'description',
|
||||
'variables',
|
||||
)
|
||||
|
||||
|
||||
class BulkHostCreateSerializer(serializers.Serializer):
|
||||
inventory = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Inventory.objects.all(), required=True, write_only=True, help_text=_('Primary Key ID of inventory to add hosts to.')
|
||||
)
|
||||
hosts = serializers.ListField(
|
||||
child=BulkHostSerializer(),
|
||||
allow_empty=False,
|
||||
max_length=100000,
|
||||
write_only=True,
|
||||
help_text=_('List of hosts to be created, JSON. e.g. [{"name": "example.com"}, {"name": "127.0.0.1"}]'),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Inventory
|
||||
fields = ('inventory', 'hosts')
|
||||
read_only_fields = ()
|
||||
|
||||
def raise_if_host_counts_violated(self, attrs):
|
||||
validation_info = get_licenser().validate()
|
||||
|
||||
org = attrs['inventory'].organization
|
||||
|
||||
if org:
|
||||
org_active_count = Host.objects.org_active_count(org.id)
|
||||
new_hosts = [h['name'] for h in attrs['hosts']]
|
||||
org_net_new_host_count = len(new_hosts) - Host.objects.filter(inventory__organization=1, name__in=new_hosts).values('name').distinct().count()
|
||||
if org.max_hosts > 0 and org_active_count + org_net_new_host_count > org.max_hosts:
|
||||
raise PermissionDenied(
|
||||
_(
|
||||
"You have already reached the maximum number of %s hosts"
|
||||
" allowed for your organization. Contact your System Administrator"
|
||||
" for assistance." % org.max_hosts
|
||||
)
|
||||
)
|
||||
|
||||
# Don't check license if it is open license
|
||||
if validation_info.get('license_type', 'UNLICENSED') == 'open':
|
||||
return
|
||||
|
||||
sys_free_instances = validation_info.get('free_instances', 0)
|
||||
system_net_new_host_count = Host.objects.exclude(name__in=new_hosts).count()
|
||||
|
||||
if system_net_new_host_count > sys_free_instances:
|
||||
hard_error = validation_info.get('trial', False) is True or validation_info['instance_count'] == 10
|
||||
if hard_error:
|
||||
# Only raise permission error for trial, otherwise just log a warning as we do in other inventory import situations
|
||||
raise PermissionDenied(_("Host count exceeds available instances."))
|
||||
logger.warning(_("Number of hosts allowed by license has been exceeded."))
|
||||
|
||||
def validate(self, attrs):
|
||||
request = self.context.get('request', None)
|
||||
inv = attrs['inventory']
|
||||
if inv.kind != '':
|
||||
raise serializers.ValidationError(_('Hosts can only be created in manual inventories (not smart or constructed types).'))
|
||||
if len(attrs['hosts']) > settings.BULK_HOST_MAX_CREATE:
|
||||
raise serializers.ValidationError(_('Number of hosts exceeds system setting BULK_HOST_MAX_CREATE'))
|
||||
if request and not request.user.is_superuser:
|
||||
if request.user not in inv.admin_role:
|
||||
raise serializers.ValidationError(_(f'Inventory with id {inv.id} not found or lack permissions to add hosts.'))
|
||||
current_hostnames = set(inv.hosts.values_list('name', flat=True))
|
||||
new_names = [host['name'] for host in attrs['hosts']]
|
||||
duplicate_new_names = [n for n in new_names if n in current_hostnames or new_names.count(n) > 1]
|
||||
if duplicate_new_names:
|
||||
raise serializers.ValidationError(_(f'Hostnames must be unique in an inventory. Duplicates found: {duplicate_new_names}'))
|
||||
|
||||
self.raise_if_host_counts_violated(attrs)
|
||||
|
||||
_now = now()
|
||||
for host in attrs['hosts']:
|
||||
host['created'] = _now
|
||||
host['modified'] = _now
|
||||
host['inventory'] = inv
|
||||
return attrs
|
||||
|
||||
def create(self, validated_data):
|
||||
# This assumes total_hosts is up to date, and it can get out of date if the inventory computed fields have not been updated lately.
|
||||
# If we wanted to side step this we could query Hosts.objects.filter(inventory...)
|
||||
old_total_hosts = validated_data['inventory'].total_hosts
|
||||
result = [Host(**attrs) for attrs in validated_data['hosts']]
|
||||
try:
|
||||
Host.objects.bulk_create(result)
|
||||
except Exception as e:
|
||||
raise serializers.ValidationError({"detail": _(f"cannot create host, host creation error {e}")})
|
||||
new_total_hosts = old_total_hosts + len(result)
|
||||
request = self.context.get('request', None)
|
||||
changes = {'total_hosts': [old_total_hosts, new_total_hosts]}
|
||||
activity_entry = ActivityStream.objects.create(
|
||||
operation='update',
|
||||
object1='inventory',
|
||||
changes=json.dumps(changes),
|
||||
actor=request.user,
|
||||
)
|
||||
activity_entry.inventory.add(validated_data['inventory'])
|
||||
|
||||
# This actually updates the cached "total_hosts" field on the inventory
|
||||
update_inventory_computed_fields.delay(validated_data['inventory'].id)
|
||||
return_keys = [k for k in BulkHostSerializer().fields.keys()] + ['id']
|
||||
return_data = {}
|
||||
host_data = []
|
||||
for r in result:
|
||||
item = {k: getattr(r, k) for k in return_keys}
|
||||
if not settings.IS_TESTING_MODE:
|
||||
# sqlite acts different with bulk_create -- it doesn't return the id of the objects
|
||||
# to get it, you have to do an additional query, which is not useful for our tests
|
||||
item['url'] = reverse('api:host_detail', kwargs={'pk': r.id})
|
||||
item['inventory'] = reverse('api:inventory_detail', kwargs={'pk': validated_data['inventory'].id})
|
||||
host_data.append(item)
|
||||
return_data['url'] = reverse('api:inventory_detail', kwargs={'pk': validated_data['inventory'].id})
|
||||
return_data['hosts'] = host_data
|
||||
return return_data
|
||||
|
||||
|
||||
class GroupTreeSerializer(GroupSerializer):
|
||||
children = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
@@ -2023,6 +2128,7 @@ class InventorySourceOptionsSerializer(BaseSerializer):
|
||||
'source',
|
||||
'source_path',
|
||||
'source_vars',
|
||||
'scm_branch',
|
||||
'credential',
|
||||
'enabled_var',
|
||||
'enabled_value',
|
||||
@@ -2066,7 +2172,6 @@ class InventorySourceOptionsSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOptionsSerializer):
|
||||
|
||||
status = serializers.ChoiceField(choices=InventorySource.INVENTORY_SOURCE_STATUS_CHOICES, read_only=True)
|
||||
last_update_failed = serializers.BooleanField(read_only=True)
|
||||
last_updated = serializers.DateTimeField(read_only=True)
|
||||
@@ -2188,10 +2293,14 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
|
||||
if ('source' in attrs or 'source_project' in attrs) and get_field_from_model_or_attrs('source_project') is None:
|
||||
raise serializers.ValidationError({"source_project": _("Project required for scm type sources.")})
|
||||
else:
|
||||
redundant_scm_fields = list(filter(lambda x: attrs.get(x, None), ['source_project', 'source_path']))
|
||||
redundant_scm_fields = list(filter(lambda x: attrs.get(x, None), ['source_project', 'source_path', 'scm_branch']))
|
||||
if redundant_scm_fields:
|
||||
raise serializers.ValidationError({"detail": _("Cannot set %s if not SCM type." % ' '.join(redundant_scm_fields))})
|
||||
|
||||
project = get_field_from_model_or_attrs('source_project')
|
||||
if get_field_from_model_or_attrs('scm_branch') and not project.allow_override:
|
||||
raise serializers.ValidationError({'scm_branch': _('Project does not allow overriding branch.')})
|
||||
|
||||
attrs = super(InventorySourceSerializer, self).validate(attrs)
|
||||
|
||||
# Check type consistency of source and cloud credential, if provided
|
||||
@@ -2211,15 +2320,22 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
|
||||
|
||||
|
||||
class InventorySourceUpdateSerializer(InventorySourceSerializer):
|
||||
|
||||
can_update = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
fields = ('can_update',)
|
||||
|
||||
def validate(self, attrs):
|
||||
project = self.instance.source_project
|
||||
if project:
|
||||
failed_reason = project.get_reason_if_failed()
|
||||
if failed_reason:
|
||||
raise serializers.ValidationError(failed_reason)
|
||||
|
||||
return super(InventorySourceUpdateSerializer, self).validate(attrs)
|
||||
|
||||
|
||||
class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSerializer):
|
||||
|
||||
custom_virtualenv = serializers.ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
@@ -2260,7 +2376,6 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri
|
||||
|
||||
|
||||
class InventoryUpdateDetailSerializer(InventoryUpdateSerializer):
|
||||
|
||||
source_project = serializers.SerializerMethodField(help_text=_('The project used for this job.'), method_name='get_source_project_id')
|
||||
|
||||
class Meta:
|
||||
@@ -2311,7 +2426,6 @@ class InventoryUpdateListSerializer(InventoryUpdateSerializer, UnifiedJobListSer
|
||||
|
||||
|
||||
class InventoryUpdateCancelSerializer(InventoryUpdateSerializer):
|
||||
|
||||
can_cancel = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
@@ -2669,7 +2783,6 @@ class CredentialSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class CredentialSerializerCreate(CredentialSerializer):
|
||||
|
||||
user = serializers.PrimaryKeyRelatedField(
|
||||
queryset=User.objects.all(),
|
||||
required=False,
|
||||
@@ -3024,7 +3137,6 @@ class JobTemplateWithSpecSerializer(JobTemplateSerializer):
|
||||
|
||||
|
||||
class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
||||
|
||||
passwords_needed_to_start = serializers.ReadOnlyField()
|
||||
artifacts = serializers.SerializerMethodField()
|
||||
|
||||
@@ -3107,7 +3219,6 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
||||
|
||||
|
||||
class JobDetailSerializer(JobSerializer):
|
||||
|
||||
playbook_counts = serializers.SerializerMethodField(help_text=_('A count of all plays and tasks for the job run.'))
|
||||
custom_virtualenv = serializers.ReadOnlyField()
|
||||
|
||||
@@ -3125,7 +3236,6 @@ class JobDetailSerializer(JobSerializer):
|
||||
|
||||
|
||||
class JobCancelSerializer(BaseSerializer):
|
||||
|
||||
can_cancel = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
@@ -3134,7 +3244,6 @@ class JobCancelSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class JobRelaunchSerializer(BaseSerializer):
|
||||
|
||||
passwords_needed_to_start = serializers.SerializerMethodField()
|
||||
retry_counts = serializers.SerializerMethodField()
|
||||
hosts = serializers.ChoiceField(
|
||||
@@ -3194,7 +3303,6 @@ class JobRelaunchSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class JobCreateScheduleSerializer(LabelsListMixin, BaseSerializer):
|
||||
|
||||
can_schedule = serializers.SerializerMethodField()
|
||||
prompts = serializers.SerializerMethodField()
|
||||
|
||||
@@ -3320,7 +3428,6 @@ class AdHocCommandDetailSerializer(AdHocCommandSerializer):
|
||||
|
||||
|
||||
class AdHocCommandCancelSerializer(AdHocCommandSerializer):
|
||||
|
||||
can_cancel = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
@@ -3359,7 +3466,6 @@ class SystemJobTemplateSerializer(UnifiedJobTemplateSerializer):
|
||||
|
||||
|
||||
class SystemJobSerializer(UnifiedJobSerializer):
|
||||
|
||||
result_stdout = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
@@ -3386,7 +3492,6 @@ class SystemJobSerializer(UnifiedJobSerializer):
|
||||
|
||||
|
||||
class SystemJobCancelSerializer(SystemJobSerializer):
|
||||
|
||||
can_cancel = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
@@ -3551,7 +3656,6 @@ class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer)
|
||||
|
||||
|
||||
class WorkflowJobCancelSerializer(WorkflowJobSerializer):
|
||||
|
||||
can_cancel = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
@@ -3565,7 +3669,6 @@ class WorkflowApprovalViewSerializer(UnifiedJobSerializer):
|
||||
|
||||
|
||||
class WorkflowApprovalSerializer(UnifiedJobSerializer):
|
||||
|
||||
can_approve_or_deny = serializers.SerializerMethodField()
|
||||
approval_expiration = serializers.SerializerMethodField()
|
||||
timed_out = serializers.ReadOnlyField()
|
||||
@@ -3746,7 +3849,11 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
|
||||
# Build unsaved version of this config, use it to detect prompts errors
|
||||
mock_obj = self._build_mock_obj(attrs)
|
||||
accepted, rejected, errors = ujt._accept_or_ignore_job_kwargs(_exclude_errors=self.exclude_errors, **mock_obj.prompts_dict())
|
||||
if set(list(ujt.get_ask_mapping().keys()) + ['extra_data']) & set(attrs.keys()):
|
||||
accepted, rejected, errors = ujt._accept_or_ignore_job_kwargs(_exclude_errors=self.exclude_errors, **mock_obj.prompts_dict())
|
||||
else:
|
||||
# Only perform validation of prompts if prompts fields are provided
|
||||
errors = {}
|
||||
|
||||
# Remove all unprocessed $encrypted$ strings, indicating default usage
|
||||
if 'extra_data' in attrs and password_dict:
|
||||
@@ -3956,7 +4063,6 @@ class JobHostSummarySerializer(BaseSerializer):
|
||||
|
||||
|
||||
class JobEventSerializer(BaseSerializer):
|
||||
|
||||
event_display = serializers.CharField(source='get_event_display2', read_only=True)
|
||||
event_level = serializers.IntegerField(read_only=True)
|
||||
|
||||
@@ -4010,7 +4116,7 @@ class JobEventSerializer(BaseSerializer):
|
||||
# Show full stdout for playbook_on_* events.
|
||||
if obj and obj.event.startswith('playbook_on'):
|
||||
return data
|
||||
# If the view logic says to not trunctate (request was to the detail view or a param was used)
|
||||
# If the view logic says to not truncate (request was to the detail view or a param was used)
|
||||
if self.context.get('no_truncate', False):
|
||||
return data
|
||||
max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY
|
||||
@@ -4041,7 +4147,7 @@ class ProjectUpdateEventSerializer(JobEventSerializer):
|
||||
# raw SCM URLs in their stdout (which *could* contain passwords)
|
||||
# attempt to detect and filter HTTP basic auth passwords in the stdout
|
||||
# of these types of events
|
||||
if obj.event_data.get('task_action') in ('git', 'svn'):
|
||||
if obj.event_data.get('task_action') in ('git', 'svn', 'ansible.builtin.git', 'ansible.builtin.svn'):
|
||||
try:
|
||||
return json.loads(UriCleaner.remove_sensitive(json.dumps(obj.event_data)))
|
||||
except Exception:
|
||||
@@ -4052,7 +4158,6 @@ class ProjectUpdateEventSerializer(JobEventSerializer):
|
||||
|
||||
|
||||
class AdHocCommandEventSerializer(BaseSerializer):
|
||||
|
||||
event_display = serializers.CharField(source='get_event_display', read_only=True)
|
||||
|
||||
class Meta:
|
||||
@@ -4086,7 +4191,7 @@ class AdHocCommandEventSerializer(BaseSerializer):
|
||||
|
||||
def to_representation(self, obj):
|
||||
data = super(AdHocCommandEventSerializer, self).to_representation(obj)
|
||||
# If the view logic says to not trunctate (request was to the detail view or a param was used)
|
||||
# If the view logic says to not truncate (request was to the detail view or a param was used)
|
||||
if self.context.get('no_truncate', False):
|
||||
return data
|
||||
max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY
|
||||
@@ -4264,17 +4369,10 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
# Basic validation - cannot run a playbook without a playbook
|
||||
if not template.project:
|
||||
errors['project'] = _("A project is required to run a job.")
|
||||
elif template.project.status in ('error', 'failed'):
|
||||
errors['playbook'] = _("Missing a revision to run due to failed project update.")
|
||||
|
||||
latest_update = template.project.project_updates.last()
|
||||
if latest_update is not None and latest_update.failed:
|
||||
failed_validation_tasks = latest_update.project_update_events.filter(
|
||||
event='runner_on_failed',
|
||||
play="Perform project signature/checksum verification",
|
||||
)
|
||||
if failed_validation_tasks:
|
||||
errors['playbook'] = _("Last project update failed due to signature validation failure.")
|
||||
else:
|
||||
failure_reason = template.project.get_reason_if_failed()
|
||||
if failure_reason:
|
||||
errors['playbook'] = failure_reason
|
||||
|
||||
# cannot run a playbook without an inventory
|
||||
if template.inventory and template.inventory.pending_deletion is True:
|
||||
@@ -4341,7 +4439,6 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class WorkflowJobLaunchSerializer(BaseSerializer):
|
||||
|
||||
can_start_without_user_input = serializers.BooleanField(read_only=True)
|
||||
defaults = serializers.SerializerMethodField()
|
||||
variables_needed_to_start = serializers.ReadOnlyField()
|
||||
@@ -4398,7 +4495,6 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
|
||||
return False
|
||||
|
||||
def get_defaults(self, obj):
|
||||
|
||||
defaults_dict = {}
|
||||
for field_name in WorkflowJobTemplate.get_ask_mapping().keys():
|
||||
if field_name == 'inventory':
|
||||
@@ -4415,7 +4511,6 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
|
||||
return dict(name=obj.name, id=obj.id, description=obj.description)
|
||||
|
||||
def validate(self, attrs):
|
||||
|
||||
template = self.instance
|
||||
|
||||
accepted, rejected, errors = template._accept_or_ignore_job_kwargs(**attrs)
|
||||
@@ -4443,6 +4538,271 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
|
||||
return accepted
|
||||
|
||||
|
||||
class BulkJobNodeSerializer(WorkflowJobNodeSerializer):
|
||||
# We don't do a PrimaryKeyRelatedField for unified_job_template and others, because that increases the number
|
||||
# of database queries, rather we take them as integer and later convert them to objects in get_objectified_jobs
|
||||
unified_job_template = serializers.IntegerField(
|
||||
required=True, min_value=1, help_text=_('Primary key of the template for this job, can be a job template or inventory source.')
|
||||
)
|
||||
inventory = serializers.IntegerField(required=False, min_value=1)
|
||||
execution_environment = serializers.IntegerField(required=False, min_value=1)
|
||||
# many-to-many fields
|
||||
credentials = serializers.ListField(child=serializers.IntegerField(min_value=1), required=False)
|
||||
labels = serializers.ListField(child=serializers.IntegerField(min_value=1), required=False)
|
||||
# TODO: Use instance group role added via PR 13584(once merged), for now everything related to instance group is commented
|
||||
# instance_groups = serializers.ListField(child=serializers.IntegerField(min_value=1), required=False)
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJobNode
|
||||
fields = ('*', 'credentials', 'labels') # m2m fields are not canonical for WJ nodes, TODO: add instance_groups once supported
|
||||
|
||||
def validate(self, attrs):
|
||||
return super(LaunchConfigurationBaseSerializer, self).validate(attrs)
|
||||
|
||||
def get_validation_exclusions(self, obj=None):
|
||||
ret = super().get_validation_exclusions(obj)
|
||||
ret.extend(['unified_job_template', 'inventory', 'execution_environment'])
|
||||
return ret
|
||||
|
||||
|
||||
class BulkJobLaunchSerializer(serializers.Serializer):
|
||||
name = serializers.CharField(default='Bulk Job Launch', max_length=512, write_only=True, required=False, allow_blank=True) # limited by max name of jobs
|
||||
jobs = BulkJobNodeSerializer(
|
||||
many=True,
|
||||
allow_empty=False,
|
||||
write_only=True,
|
||||
max_length=100000,
|
||||
help_text=_('List of jobs to be launched, JSON. e.g. [{"unified_job_template": 7}, {"unified_job_template": 10}]'),
|
||||
)
|
||||
description = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
||||
extra_vars = serializers.JSONField(write_only=True, required=False)
|
||||
organization = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Organization.objects.all(),
|
||||
required=False,
|
||||
default=None,
|
||||
allow_null=True,
|
||||
write_only=True,
|
||||
help_text=_('Inherit permissions from this organization. If not provided, a organization the user is a member of will be selected automatically.'),
|
||||
)
|
||||
inventory = serializers.PrimaryKeyRelatedField(queryset=Inventory.objects.all(), required=False, write_only=True)
|
||||
limit = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
||||
scm_branch = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
||||
skip_tags = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
||||
job_tags = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJob
|
||||
fields = ('name', 'jobs', 'description', 'extra_vars', 'organization', 'inventory', 'limit', 'scm_branch', 'skip_tags', 'job_tags')
|
||||
read_only_fields = ()
|
||||
|
||||
def validate(self, attrs):
|
||||
request = self.context.get('request', None)
|
||||
identifiers = set()
|
||||
if len(attrs['jobs']) > settings.BULK_JOB_MAX_LAUNCH:
|
||||
raise serializers.ValidationError(_('Number of requested jobs exceeds system setting BULK_JOB_MAX_LAUNCH'))
|
||||
|
||||
for node in attrs['jobs']:
|
||||
if 'identifier' in node:
|
||||
if node['identifier'] in identifiers:
|
||||
raise serializers.ValidationError(_(f"Identifier {node['identifier']} not unique"))
|
||||
identifiers.add(node['identifier'])
|
||||
else:
|
||||
node['identifier'] = str(uuid4())
|
||||
|
||||
requested_ujts = {j['unified_job_template'] for j in attrs['jobs']}
|
||||
requested_use_inventories = {job['inventory'] for job in attrs['jobs'] if 'inventory' in job}
|
||||
requested_use_execution_environments = {job['execution_environment'] for job in attrs['jobs'] if 'execution_environment' in job}
|
||||
requested_use_credentials = set()
|
||||
requested_use_labels = set()
|
||||
# requested_use_instance_groups = set()
|
||||
for job in attrs['jobs']:
|
||||
for cred in job.get('credentials', []):
|
||||
requested_use_credentials.add(cred)
|
||||
for label in job.get('labels', []):
|
||||
requested_use_labels.add(label)
|
||||
# for instance_group in job.get('instance_groups', []):
|
||||
# requested_use_instance_groups.add(instance_group)
|
||||
|
||||
key_to_obj_map = {
|
||||
"unified_job_template": {obj.id: obj for obj in UnifiedJobTemplate.objects.filter(id__in=requested_ujts)},
|
||||
"inventory": {obj.id: obj for obj in Inventory.objects.filter(id__in=requested_use_inventories)},
|
||||
"credentials": {obj.id: obj for obj in Credential.objects.filter(id__in=requested_use_credentials)},
|
||||
"labels": {obj.id: obj for obj in Label.objects.filter(id__in=requested_use_labels)},
|
||||
# "instance_groups": {obj.id: obj for obj in InstanceGroup.objects.filter(id__in=requested_use_instance_groups)},
|
||||
"execution_environment": {obj.id: obj for obj in ExecutionEnvironment.objects.filter(id__in=requested_use_execution_environments)},
|
||||
}
|
||||
|
||||
ujts = {}
|
||||
for ujt in key_to_obj_map['unified_job_template'].values():
|
||||
ujts.setdefault(type(ujt), [])
|
||||
ujts[type(ujt)].append(ujt)
|
||||
|
||||
unallowed_types = set(ujts.keys()) - set([JobTemplate, Project, InventorySource, WorkflowJobTemplate])
|
||||
if unallowed_types:
|
||||
type_names = ' '.join([cls._meta.verbose_name.title() for cls in unallowed_types])
|
||||
raise serializers.ValidationError(_("Template types {type_names} not allowed in bulk jobs").format(type_names=type_names))
|
||||
|
||||
for model, obj_list in ujts.items():
|
||||
role_field = 'execute_role' if issubclass(model, (JobTemplate, WorkflowJobTemplate)) else 'update_role'
|
||||
self.check_list_permission(model, set([obj.id for obj in obj_list]), role_field)
|
||||
|
||||
self.check_organization_permission(attrs, request)
|
||||
|
||||
if 'inventory' in attrs:
|
||||
requested_use_inventories.add(attrs['inventory'].id)
|
||||
|
||||
self.check_list_permission(Inventory, requested_use_inventories, 'use_role')
|
||||
|
||||
self.check_list_permission(Credential, requested_use_credentials, 'use_role')
|
||||
self.check_list_permission(Label, requested_use_labels)
|
||||
# self.check_list_permission(InstanceGroup, requested_use_instance_groups) # TODO: change to use_role for conflict
|
||||
self.check_list_permission(ExecutionEnvironment, requested_use_execution_environments) # TODO: change if roles introduced
|
||||
|
||||
jobs_object = self.get_objectified_jobs(attrs, key_to_obj_map)
|
||||
|
||||
attrs['jobs'] = jobs_object
|
||||
if 'extra_vars' in attrs:
|
||||
extra_vars_dict = parse_yaml_or_json(attrs['extra_vars'])
|
||||
attrs['extra_vars'] = json.dumps(extra_vars_dict)
|
||||
attrs = super().validate(attrs)
|
||||
return attrs
|
||||
|
||||
def check_list_permission(self, model, id_list, role_field=None):
|
||||
if not id_list:
|
||||
return
|
||||
user = self.context['request'].user
|
||||
if role_field is None: # implies "read" level permission is required
|
||||
access_qs = user.get_queryset(model)
|
||||
else:
|
||||
access_qs = model.accessible_objects(user, role_field)
|
||||
|
||||
not_allowed = set(id_list) - set(access_qs.filter(id__in=id_list).values_list('id', flat=True))
|
||||
if not_allowed:
|
||||
raise serializers.ValidationError(
|
||||
_("{model_name} {not_allowed} not found or you don't have permissions to access it").format(
|
||||
model_name=model._meta.verbose_name_plural.title(), not_allowed=not_allowed
|
||||
)
|
||||
)
|
||||
|
||||
def create(self, validated_data):
|
||||
request = self.context.get('request', None)
|
||||
launch_user = request.user if request else None
|
||||
job_node_data = validated_data.pop('jobs')
|
||||
wfj_deferred_attr_names = ('skip_tags', 'limit', 'job_tags')
|
||||
wfj_deferred_vals = {}
|
||||
for item in wfj_deferred_attr_names:
|
||||
wfj_deferred_vals[item] = validated_data.pop(item, None)
|
||||
|
||||
wfj = WorkflowJob.objects.create(**validated_data, is_bulk_job=True, launch_type='manual', created_by=launch_user)
|
||||
for key, val in wfj_deferred_vals.items():
|
||||
if val:
|
||||
setattr(wfj, key, val)
|
||||
nodes = []
|
||||
node_m2m_objects = {}
|
||||
node_m2m_object_types_to_through_model = {
|
||||
'credentials': WorkflowJobNode.credentials.through,
|
||||
'labels': WorkflowJobNode.labels.through,
|
||||
# 'instance_groups': WorkflowJobNode.instance_groups.through,
|
||||
}
|
||||
node_deferred_attr_names = (
|
||||
'limit',
|
||||
'scm_branch',
|
||||
'verbosity',
|
||||
'forks',
|
||||
'diff_mode',
|
||||
'job_tags',
|
||||
'job_type',
|
||||
'skip_tags',
|
||||
'job_slice_count',
|
||||
'timeout',
|
||||
)
|
||||
node_deferred_attrs = {}
|
||||
for node_attrs in job_node_data:
|
||||
# we need to add any m2m objects after creation via the through model
|
||||
node_m2m_objects[node_attrs['identifier']] = {}
|
||||
node_deferred_attrs[node_attrs['identifier']] = {}
|
||||
for item in node_m2m_object_types_to_through_model.keys():
|
||||
if item in node_attrs:
|
||||
node_m2m_objects[node_attrs['identifier']][item] = node_attrs.pop(item)
|
||||
|
||||
# Some attributes are not accepted by WorkflowJobNode __init__, we have to set them after
|
||||
for item in node_deferred_attr_names:
|
||||
if item in node_attrs:
|
||||
node_deferred_attrs[node_attrs['identifier']][item] = node_attrs.pop(item)
|
||||
|
||||
# Create the node objects
|
||||
node_obj = WorkflowJobNode(workflow_job=wfj, created=wfj.created, modified=wfj.modified, **node_attrs)
|
||||
|
||||
# we can set the deferred attrs now
|
||||
for item, value in node_deferred_attrs[node_attrs['identifier']].items():
|
||||
setattr(node_obj, item, value)
|
||||
|
||||
# the node is now ready to be bulk created
|
||||
nodes.append(node_obj)
|
||||
|
||||
# we'll need this later when we do the m2m through model bulk create
|
||||
node_m2m_objects[node_attrs['identifier']]['node'] = node_obj
|
||||
|
||||
WorkflowJobNode.objects.bulk_create(nodes)
|
||||
|
||||
# Deal with the m2m objects we have to create once the node exists
|
||||
for field_name, through_model in node_m2m_object_types_to_through_model.items():
|
||||
through_model_objects = []
|
||||
for node_identifier in node_m2m_objects.keys():
|
||||
if field_name in node_m2m_objects[node_identifier] and field_name == 'credentials':
|
||||
for cred in node_m2m_objects[node_identifier][field_name]:
|
||||
through_model_objects.append(through_model(credential=cred, workflowjobnode=node_m2m_objects[node_identifier]['node']))
|
||||
if field_name in node_m2m_objects[node_identifier] and field_name == 'labels':
|
||||
for label in node_m2m_objects[node_identifier][field_name]:
|
||||
through_model_objects.append(through_model(label=label, workflowjobnode=node_m2m_objects[node_identifier]['node']))
|
||||
# if obj_type in node_m2m_objects[node_identifier] and obj_type == 'instance_groups':
|
||||
# for instance_group in node_m2m_objects[node_identifier][obj_type]:
|
||||
# through_model_objects.append(through_model(instancegroup=instance_group, workflowjobnode=node_m2m_objects[node_identifier]['node']))
|
||||
if through_model_objects:
|
||||
through_model.objects.bulk_create(through_model_objects)
|
||||
|
||||
wfj.save()
|
||||
wfj.signal_start()
|
||||
|
||||
return WorkflowJobSerializer().to_representation(wfj)
|
||||
|
||||
def check_organization_permission(self, attrs, request):
|
||||
# validate Organization
|
||||
# - If the orgs is not set, set it to the org of the launching user
|
||||
# - If the user is part of multiple orgs, throw a validation error saying user is part of multiple orgs, please provide one
|
||||
if not request.user.is_superuser:
|
||||
read_org_qs = Organization.accessible_objects(request.user, 'member_role')
|
||||
if 'organization' not in attrs or attrs['organization'] == None or attrs['organization'] == '':
|
||||
read_org_ct = read_org_qs.count()
|
||||
if read_org_ct == 1:
|
||||
attrs['organization'] = read_org_qs.first()
|
||||
elif read_org_ct > 1:
|
||||
raise serializers.ValidationError("User has permission to multiple Organizations, please set one of them in the request")
|
||||
else:
|
||||
raise serializers.ValidationError("User not part of any organization, please assign an organization to assign to the bulk job")
|
||||
else:
|
||||
allowed_orgs = set(read_org_qs.values_list('id', flat=True))
|
||||
requested_org = attrs['organization']
|
||||
if requested_org.id not in allowed_orgs:
|
||||
raise ValidationError(_(f"Organization {requested_org.id} not found or you don't have permissions to access it"))
|
||||
|
||||
def get_objectified_jobs(self, attrs, key_to_obj_map):
|
||||
objectified_jobs = []
|
||||
# This loop is generalized so we should only have to add related items to the key_to_obj_map
|
||||
for job in attrs['jobs']:
|
||||
objectified_job = {}
|
||||
for key, value in job.items():
|
||||
if key in key_to_obj_map:
|
||||
if isinstance(value, int):
|
||||
objectified_job[key] = key_to_obj_map[key][value]
|
||||
elif isinstance(value, list):
|
||||
objectified_job[key] = [key_to_obj_map[key][item] for item in value]
|
||||
else:
|
||||
objectified_job[key] = value
|
||||
objectified_jobs.append(objectified_job)
|
||||
return objectified_jobs
|
||||
|
||||
|
||||
class NotificationTemplateSerializer(BaseSerializer):
|
||||
show_capabilities = ['edit', 'delete', 'copy']
|
||||
capabilities_prefetch = [{'copy': 'organization.admin'}]
|
||||
@@ -4656,7 +5016,6 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class NotificationSerializer(BaseSerializer):
|
||||
|
||||
body = serializers.SerializerMethodField(help_text=_('Notification body'))
|
||||
|
||||
class Meta:
|
||||
@@ -4790,7 +5149,7 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
||||
),
|
||||
)
|
||||
until = serializers.SerializerMethodField(
|
||||
help_text=_('The date this schedule will end. This field is computed from the RRULE. If the schedule does not end an emptry string will be returned'),
|
||||
help_text=_('The date this schedule will end. This field is computed from the RRULE. If the schedule does not end an empty string will be returned'),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
@@ -4921,6 +5280,19 @@ class InstanceSerializer(BaseSerializer):
|
||||
extra_kwargs = {
|
||||
'node_type': {'initial': Instance.Types.EXECUTION, 'default': Instance.Types.EXECUTION},
|
||||
'node_state': {'initial': Instance.States.INSTALLED, 'default': Instance.States.INSTALLED},
|
||||
'hostname': {
|
||||
'validators': [
|
||||
MaxLengthValidator(limit_value=250),
|
||||
validators.UniqueValidator(queryset=Instance.objects.all()),
|
||||
RegexValidator(
|
||||
regex=r'^localhost$|^127(?:\.[0-9]+){0,2}\.[0-9]+$|^(?:0*\:)*?:?0*1$',
|
||||
flags=re.IGNORECASE,
|
||||
inverse_match=True,
|
||||
message="hostname cannot be localhost or 127.0.0.1",
|
||||
),
|
||||
HostnameRegexValidator(),
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
def get_related(self, obj):
|
||||
@@ -4931,7 +5303,7 @@ class InstanceSerializer(BaseSerializer):
|
||||
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
|
||||
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
|
||||
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
||||
if obj.node_type != 'hop':
|
||||
if obj.node_type == 'execution':
|
||||
res['health_check'] = self.reverse('api:instance_health_check', kwargs={'pk': obj.pk})
|
||||
return res
|
||||
|
||||
@@ -4991,6 +5363,10 @@ class InstanceSerializer(BaseSerializer):
|
||||
return value
|
||||
|
||||
def validate_hostname(self, value):
|
||||
"""
|
||||
- Hostname cannot be "localhost" - but can be something like localhost.domain
|
||||
- Cannot change the hostname of an-already instantiated & initialized Instance object
|
||||
"""
|
||||
if self.instance and self.instance.hostname != value:
|
||||
raise serializers.ValidationError("Cannot change hostname.")
|
||||
|
||||
@@ -5011,14 +5387,11 @@ class InstanceHealthCheckSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class InstanceGroupSerializer(BaseSerializer):
|
||||
|
||||
show_capabilities = ['edit', 'delete']
|
||||
|
||||
capacity = serializers.SerializerMethodField()
|
||||
consumed_capacity = serializers.SerializerMethodField()
|
||||
percent_capacity_remaining = serializers.SerializerMethodField()
|
||||
jobs_running = serializers.IntegerField(
|
||||
help_text=_('Count of jobs in the running or waiting state that ' 'are targeted for this instance group'), read_only=True
|
||||
)
|
||||
jobs_running = serializers.SerializerMethodField()
|
||||
jobs_total = serializers.IntegerField(help_text=_('Count of all jobs that target this instance group'), read_only=True)
|
||||
instances = serializers.SerializerMethodField()
|
||||
is_container_group = serializers.BooleanField(
|
||||
@@ -5044,6 +5417,22 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
label=_('Policy Instance Minimum'),
|
||||
help_text=_("Static minimum number of Instances that will be automatically assign to " "this group when new instances come online."),
|
||||
)
|
||||
max_concurrent_jobs = serializers.IntegerField(
|
||||
default=0,
|
||||
min_value=0,
|
||||
required=False,
|
||||
initial=0,
|
||||
label=_('Max Concurrent Jobs'),
|
||||
help_text=_("Maximum number of concurrent jobs to run on a group. When set to zero, no maximum is enforced."),
|
||||
)
|
||||
max_forks = serializers.IntegerField(
|
||||
default=0,
|
||||
min_value=0,
|
||||
required=False,
|
||||
initial=0,
|
||||
label=_('Max Forks'),
|
||||
help_text=_("Maximum number of forks to execute concurrently on a group. When set to zero, no maximum is enforced."),
|
||||
)
|
||||
policy_instance_list = serializers.ListField(
|
||||
child=serializers.CharField(),
|
||||
required=False,
|
||||
@@ -5065,6 +5454,8 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
"consumed_capacity",
|
||||
"percent_capacity_remaining",
|
||||
"jobs_running",
|
||||
"max_concurrent_jobs",
|
||||
"max_forks",
|
||||
"jobs_total",
|
||||
"instances",
|
||||
"is_container_group",
|
||||
@@ -5080,6 +5471,8 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
res = super(InstanceGroupSerializer, self).get_related(obj)
|
||||
res['jobs'] = self.reverse('api:instance_group_unified_jobs_list', kwargs={'pk': obj.pk})
|
||||
res['instances'] = self.reverse('api:instance_group_instance_list', kwargs={'pk': obj.pk})
|
||||
res['access_list'] = self.reverse('api:instance_group_access_list', kwargs={'pk': obj.pk})
|
||||
res['object_roles'] = self.reverse('api:instance_group_object_role_list', kwargs={'pk': obj.pk})
|
||||
if obj.credential:
|
||||
res['credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.credential_id})
|
||||
|
||||
@@ -5146,32 +5539,42 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
# Store capacity values (globally computed) in the context
|
||||
if 'task_manager_igs' not in self.context:
|
||||
instance_groups_queryset = None
|
||||
jobs_qs = UnifiedJob.objects.filter(status__in=('running', 'waiting'))
|
||||
if self.parent: # Is ListView:
|
||||
instance_groups_queryset = self.parent.instance
|
||||
|
||||
instances = TaskManagerInstances(jobs_qs)
|
||||
instance_groups = TaskManagerInstanceGroups(instances_by_hostname=instances, instance_groups_queryset=instance_groups_queryset)
|
||||
tm_models = TaskManagerModels.init_with_consumed_capacity(
|
||||
instance_fields=['uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'enabled'],
|
||||
instance_groups_queryset=instance_groups_queryset,
|
||||
)
|
||||
|
||||
self.context['task_manager_igs'] = instance_groups
|
||||
self.context['task_manager_igs'] = tm_models.instance_groups
|
||||
return self.context['task_manager_igs']
|
||||
|
||||
def get_consumed_capacity(self, obj):
|
||||
ig_mgr = self.get_ig_mgr()
|
||||
return ig_mgr.get_consumed_capacity(obj.name)
|
||||
|
||||
def get_percent_capacity_remaining(self, obj):
|
||||
if not obj.capacity:
|
||||
return 0.0
|
||||
def get_capacity(self, obj):
|
||||
ig_mgr = self.get_ig_mgr()
|
||||
return float("{0:.2f}".format((float(ig_mgr.get_remaining_capacity(obj.name)) / (float(obj.capacity))) * 100))
|
||||
return ig_mgr.get_capacity(obj.name)
|
||||
|
||||
def get_percent_capacity_remaining(self, obj):
|
||||
capacity = self.get_capacity(obj)
|
||||
if not capacity:
|
||||
return 0.0
|
||||
consumed_capacity = self.get_consumed_capacity(obj)
|
||||
return float("{0:.2f}".format(((float(capacity) - float(consumed_capacity)) / (float(capacity))) * 100))
|
||||
|
||||
def get_instances(self, obj):
|
||||
return obj.instances.count()
|
||||
ig_mgr = self.get_ig_mgr()
|
||||
return len(ig_mgr.get_instances(obj.name))
|
||||
|
||||
def get_jobs_running(self, obj):
|
||||
ig_mgr = self.get_ig_mgr()
|
||||
return ig_mgr.get_jobs_running(obj.name)
|
||||
|
||||
|
||||
class ActivityStreamSerializer(BaseSerializer):
|
||||
|
||||
changes = serializers.SerializerMethodField()
|
||||
object_association = serializers.SerializerMethodField(help_text=_("When present, shows the field name of the role or relationship that changed."))
|
||||
object_type = serializers.SerializerMethodField(help_text=_("When present, shows the model on which the role or relationship was defined."))
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
Version 1 of the Ansible Tower REST API.
|
||||
|
||||
Make a GET request to this resource to obtain a list of all child resources
|
||||
available via the API.
|
||||
@@ -7,10 +7,12 @@ the following fields (some fields may not be visible to all users):
|
||||
* `project_base_dir`: Path on the server where projects and playbooks are \
|
||||
stored.
|
||||
* `project_local_paths`: List of directories beneath `project_base_dir` to
|
||||
use when creating/editing a project.
|
||||
use when creating/editing a manual project.
|
||||
* `time_zone`: The configured time zone for the server.
|
||||
* `license_info`: Information about the current license.
|
||||
* `version`: Version of Ansible Tower package installed.
|
||||
* `custom_virtualenvs`: Deprecated venv locations from before migration to
|
||||
execution environments. Export tooling is in `awx-manage` commands.
|
||||
* `eula`: The current End-User License Agreement
|
||||
{% endifmeth %}
|
||||
|
||||
41
awx/api/templates/api/bulk_host_create_view.md
Normal file
41
awx/api/templates/api/bulk_host_create_view.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Bulk Host Create
|
||||
|
||||
This endpoint allows the client to create multiple hosts and associate them with an inventory. They may do this by providing the inventory ID and a list of json that would normally be provided to create hosts.
|
||||
|
||||
Example:
|
||||
|
||||
{
|
||||
"inventory": 1,
|
||||
"hosts": [
|
||||
{"name": "example1.com", "variables": "ansible_connection: local"},
|
||||
{"name": "example2.com"}
|
||||
]
|
||||
}
|
||||
|
||||
Return data:
|
||||
|
||||
{
|
||||
"url": "/api/v2/inventories/3/hosts/",
|
||||
"hosts": [
|
||||
{
|
||||
"name": "example1.com",
|
||||
"enabled": true,
|
||||
"instance_id": "",
|
||||
"description": "",
|
||||
"variables": "ansible_connection: local",
|
||||
"id": 1255,
|
||||
"url": "/api/v2/hosts/1255/",
|
||||
"inventory": "/api/v2/inventories/3/"
|
||||
},
|
||||
{
|
||||
"name": "example2.com",
|
||||
"enabled": true,
|
||||
"instance_id": "",
|
||||
"description": "",
|
||||
"variables": "",
|
||||
"id": 1256,
|
||||
"url": "/api/v2/hosts/1256/",
|
||||
"inventory": "/api/v2/inventories/3/"
|
||||
}
|
||||
]
|
||||
}
|
||||
13
awx/api/templates/api/bulk_job_launch_view.md
Normal file
13
awx/api/templates/api/bulk_job_launch_view.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# Bulk Job Launch
|
||||
|
||||
This endpoint allows the client to launch multiple UnifiedJobTemplates at a time, along side any launch time parameters that they would normally set at launch time.
|
||||
|
||||
Example:
|
||||
|
||||
{
|
||||
"name": "my bulk job",
|
||||
"jobs": [
|
||||
{"unified_job_template": 7, "inventory": 2},
|
||||
{"unified_job_template": 7, "credentials": [3]}
|
||||
]
|
||||
}
|
||||
3
awx/api/templates/api/bulk_view.md
Normal file
3
awx/api/templates/api/bulk_view.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Bulk Actions
|
||||
|
||||
This endpoint lists available bulk action APIs.
|
||||
@@ -3,7 +3,7 @@ Make a GET request to this resource to retrieve aggregate statistics about inven
|
||||
Including fetching the number of total hosts tracked by Tower over an amount of time and the current success or
|
||||
failed status of hosts which have run jobs within an Inventory.
|
||||
|
||||
## Parmeters and Filtering
|
||||
## Parameters and Filtering
|
||||
|
||||
The `period` of the data can be adjusted with:
|
||||
|
||||
@@ -24,7 +24,7 @@ Data about the number of hosts will be returned in the following format:
|
||||
Each element contains an epoch timestamp represented in seconds and a numerical value indicating
|
||||
the number of hosts that exist at a given moment
|
||||
|
||||
Data about failed and successfull hosts by inventory will be given as:
|
||||
Data about failed and successful hosts by inventory will be given as:
|
||||
|
||||
{
|
||||
"sources": [
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Make a GET request to this resource to retrieve aggregate statistics about job runs suitable for graphing.
|
||||
|
||||
## Parmeters and Filtering
|
||||
## Parameters and Filtering
|
||||
|
||||
The `period` of the data can be adjusted with:
|
||||
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
# List Fact Scans for a Host Specific Host Scan
|
||||
|
||||
Make a GET request to this resource to retrieve system tracking data for a particular scan
|
||||
|
||||
You may filter by datetime:
|
||||
|
||||
`?datetime=2015-06-01`
|
||||
|
||||
and module
|
||||
|
||||
`?datetime=2015-06-01&module=ansible`
|
||||
@@ -1,11 +0,0 @@
|
||||
# List Fact Scans for a Host by Module and Date
|
||||
|
||||
Make a GET request to this resource to retrieve system tracking scans by module and date/time
|
||||
|
||||
You may filter scan runs using the `from` and `to` properties:
|
||||
|
||||
`?from=2015-06-01%2012:00:00&to=2015-06-03`
|
||||
|
||||
You may also filter by module
|
||||
|
||||
`?module=packages`
|
||||
@@ -1 +0,0 @@
|
||||
# List Red Hat Insights for a Host
|
||||
@@ -18,7 +18,7 @@ inventory sources:
|
||||
* `inventory_update`: ID of the inventory update job that was started.
|
||||
(integer, read-only)
|
||||
* `project_update`: ID of the project update job that was started if this inventory source is an SCM source.
|
||||
(interger, read-only, optional)
|
||||
(integer, read-only, optional)
|
||||
|
||||
Note: All manual inventory sources (source="") will be ignored by the update_inventory_sources endpoint. This endpoint will not update inventory sources for Smart Inventories.
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
{% ifmeth GET %}
|
||||
# Determine if a Job can be started
|
||||
|
||||
Make a GET request to this resource to determine if the job can be started and
|
||||
whether any passwords are required to start the job. The response will include
|
||||
the following fields:
|
||||
|
||||
* `can_start`: Flag indicating if this job can be started (boolean, read-only)
|
||||
* `passwords_needed_to_start`: Password names required to start the job (array,
|
||||
read-only)
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth POST %}
|
||||
# Start a Job
|
||||
Make a POST request to this resource to start the job. If any passwords are
|
||||
required, they must be passed via POST data.
|
||||
|
||||
If successful, the response status code will be 202. If any required passwords
|
||||
are not provided, a 400 status code will be returned. If the job cannot be
|
||||
started, a 405 status code will be returned.
|
||||
{% endifmeth %}
|
||||
@@ -1,5 +1,5 @@
|
||||
Launch a Job Template:
|
||||
|
||||
{% ifmeth GET %}
|
||||
Make a GET request to this resource to determine if the job_template can be
|
||||
launched and whether any passwords are required to launch the job_template.
|
||||
The response will include the following fields:
|
||||
@@ -29,8 +29,8 @@ The response will include the following fields:
|
||||
* `inventory_needed_to_start`: Flag indicating the presence of an inventory
|
||||
associated with the job template. If not then one should be supplied when
|
||||
launching the job (boolean, read-only)
|
||||
|
||||
Make a POST request to this resource to launch the job_template. If any
|
||||
{% endifmeth %}
|
||||
{% ifmeth POST %}Make a POST request to this resource to launch the job_template. If any
|
||||
passwords, inventory, or extra variables (extra_vars) are required, they must
|
||||
be passed via POST data, with extra_vars given as a YAML or JSON string and
|
||||
escaped parentheses. If the `inventory_needed_to_start` is `True` then the
|
||||
@@ -41,3 +41,4 @@ are not provided, a 400 status code will be returned. If the job cannot be
|
||||
launched, a 405 status code will be returned. If the provided credential or
|
||||
inventory are not allowed to be used by the user, then a 403 status code will
|
||||
be returned.
|
||||
{% endifmeth %}
|
||||
@@ -1,3 +1,5 @@
|
||||
receptor_user: awx
|
||||
receptor_group: awx
|
||||
receptor_verify: true
|
||||
receptor_tls: true
|
||||
receptor_work_commands:
|
||||
@@ -10,12 +12,12 @@ custom_worksign_public_keyfile: receptor/work-public-key.pem
|
||||
custom_tls_certfile: receptor/tls/receptor.crt
|
||||
custom_tls_keyfile: receptor/tls/receptor.key
|
||||
custom_ca_certfile: receptor/tls/ca/receptor-ca.crt
|
||||
receptor_user: awx
|
||||
receptor_group: awx
|
||||
receptor_protocol: 'tcp'
|
||||
receptor_listener: true
|
||||
receptor_port: {{ instance.listener_port }}
|
||||
receptor_dependencies:
|
||||
- podman
|
||||
- crun
|
||||
- python39-pip
|
||||
{% verbatim %}
|
||||
podman_user: "{{ receptor_user }}"
|
||||
podman_group: "{{ receptor_group }}"
|
||||
{% endverbatim %}
|
||||
|
||||
@@ -9,10 +9,12 @@
|
||||
shell: /bin/bash
|
||||
- name: Enable Copr repo for Receptor
|
||||
command: dnf copr enable ansible-awx/receptor -y
|
||||
- import_role:
|
||||
name: ansible.receptor.podman
|
||||
- import_role:
|
||||
name: ansible.receptor.setup
|
||||
- name: Install ansible-runner
|
||||
pip:
|
||||
name: ansible-runner
|
||||
executable: pip3.9
|
||||
{% endverbatim %}
|
||||
{% endverbatim %}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
---
|
||||
collections:
|
||||
- name: ansible.receptor
|
||||
source: https://github.com/ansible/receptor-collection/
|
||||
type: git
|
||||
version: 0.1.1
|
||||
version: 1.1.0
|
||||
|
||||
@@ -9,9 +9,9 @@ from awx.api.views import (
|
||||
InstanceUnifiedJobsList,
|
||||
InstanceInstanceGroupsList,
|
||||
InstanceHealthCheck,
|
||||
InstanceInstallBundle,
|
||||
InstancePeersList,
|
||||
)
|
||||
from awx.api.views.instance_install_bundle import InstanceInstallBundle
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -3,7 +3,14 @@
|
||||
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views import InstanceGroupList, InstanceGroupDetail, InstanceGroupUnifiedJobsList, InstanceGroupInstanceList
|
||||
from awx.api.views import (
|
||||
InstanceGroupList,
|
||||
InstanceGroupDetail,
|
||||
InstanceGroupUnifiedJobsList,
|
||||
InstanceGroupInstanceList,
|
||||
InstanceGroupAccessList,
|
||||
InstanceGroupObjectRolesList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
@@ -11,6 +18,8 @@ urls = [
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', InstanceGroupDetail.as_view(), name='instance_group_detail'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/jobs/$', InstanceGroupUnifiedJobsList.as_view(), name='instance_group_unified_jobs_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/instances/$', InstanceGroupInstanceList.as_view(), name='instance_group_instance_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/access_list/$', InstanceGroupAccessList.as_view(), name='instance_group_access_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/object_roles/$', InstanceGroupObjectRolesList.as_view(), name='instance_group_object_role_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -3,26 +3,28 @@
|
||||
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views import (
|
||||
from awx.api.views.inventory import (
|
||||
InventoryList,
|
||||
InventoryDetail,
|
||||
InventoryHostsList,
|
||||
InventoryGroupsList,
|
||||
InventoryRootGroupsList,
|
||||
InventoryVariableData,
|
||||
InventoryScriptView,
|
||||
InventoryTreeView,
|
||||
InventoryInventorySourcesList,
|
||||
InventoryInventorySourcesUpdate,
|
||||
InventoryActivityStreamList,
|
||||
InventoryJobTemplateList,
|
||||
InventoryAdHocCommandsList,
|
||||
InventoryAccessList,
|
||||
InventoryObjectRolesList,
|
||||
InventoryInstanceGroupsList,
|
||||
InventoryLabelList,
|
||||
InventoryCopy,
|
||||
)
|
||||
from awx.api.views import (
|
||||
InventoryHostsList,
|
||||
InventoryGroupsList,
|
||||
InventoryInventorySourcesList,
|
||||
InventoryInventorySourcesUpdate,
|
||||
InventoryAdHocCommandsList,
|
||||
InventoryRootGroupsList,
|
||||
InventoryScriptView,
|
||||
InventoryTreeView,
|
||||
InventoryVariableData,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views.inventory import (
|
||||
InventoryUpdateEventsList,
|
||||
)
|
||||
from awx.api.views import (
|
||||
InventoryUpdateList,
|
||||
InventoryUpdateDetail,
|
||||
@@ -10,7 +13,6 @@ from awx.api.views import (
|
||||
InventoryUpdateStdout,
|
||||
InventoryUpdateNotificationsList,
|
||||
InventoryUpdateCredentialsList,
|
||||
InventoryUpdateEventsList,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ from oauthlib import oauth2
|
||||
from oauth2_provider import views
|
||||
|
||||
from awx.main.models import RefreshToken
|
||||
from awx.api.views import ApiOAuthAuthorizationRootView
|
||||
from awx.api.views.root import ApiOAuthAuthorizationRootView
|
||||
|
||||
|
||||
class TokenView(views.TokenView):
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views import (
|
||||
from awx.api.views.organization import (
|
||||
OrganizationList,
|
||||
OrganizationDetail,
|
||||
OrganizationUsersList,
|
||||
@@ -14,7 +14,6 @@ from awx.api.views import (
|
||||
OrganizationJobTemplatesList,
|
||||
OrganizationWorkflowJobTemplatesList,
|
||||
OrganizationTeamsList,
|
||||
OrganizationCredentialList,
|
||||
OrganizationActivityStreamList,
|
||||
OrganizationNotificationTemplatesList,
|
||||
OrganizationNotificationTemplatesErrorList,
|
||||
@@ -25,8 +24,8 @@ from awx.api.views import (
|
||||
OrganizationGalaxyCredentialsList,
|
||||
OrganizationObjectRolesList,
|
||||
OrganizationAccessList,
|
||||
OrganizationApplicationList,
|
||||
)
|
||||
from awx.api.views import OrganizationCredentialList, OrganizationApplicationList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -6,13 +6,15 @@ from django.urls import include, re_path
|
||||
|
||||
from awx import MODE
|
||||
from awx.api.generics import LoggedLoginView, LoggedLogoutView
|
||||
from awx.api.views import (
|
||||
from awx.api.views.root import (
|
||||
ApiRootView,
|
||||
ApiV2RootView,
|
||||
ApiV2PingView,
|
||||
ApiV2ConfigView,
|
||||
ApiV2SubscriptionView,
|
||||
ApiV2AttachView,
|
||||
)
|
||||
from awx.api.views import (
|
||||
AuthView,
|
||||
UserMeList,
|
||||
DashboardView,
|
||||
@@ -28,9 +30,16 @@ from awx.api.views import (
|
||||
OAuth2TokenList,
|
||||
ApplicationOAuth2TokenList,
|
||||
OAuth2ApplicationDetail,
|
||||
MeshVisualizer,
|
||||
)
|
||||
|
||||
from awx.api.views.bulk import (
|
||||
BulkView,
|
||||
BulkHostCreateView,
|
||||
BulkJobLaunchView,
|
||||
)
|
||||
|
||||
from awx.api.views.mesh_visualizer import MeshVisualizer
|
||||
|
||||
from awx.api.views.metrics import MetricsView
|
||||
|
||||
from .organization import urls as organization_urls
|
||||
@@ -134,6 +143,9 @@ v2_urls = [
|
||||
re_path(r'^activity_stream/', include(activity_stream_urls)),
|
||||
re_path(r'^workflow_approval_templates/', include(workflow_approval_template_urls)),
|
||||
re_path(r'^workflow_approvals/', include(workflow_approval_urls)),
|
||||
re_path(r'^bulk/$', BulkView.as_view(), name='bulk'),
|
||||
re_path(r'^bulk/host_create/$', BulkHostCreateView.as_view(), name='bulk_host_create'),
|
||||
re_path(r'^bulk/job_launch/$', BulkJobLaunchView.as_view(), name='bulk_job_launch'),
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
|
||||
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
|
||||
54
awx/api/validators.py
Normal file
54
awx/api/validators.py
Normal file
@@ -0,0 +1,54 @@
|
||||
import re
|
||||
|
||||
from django.core.validators import RegexValidator, validate_ipv46_address
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
|
||||
class HostnameRegexValidator(RegexValidator):
|
||||
"""
|
||||
Fully validates a domain name that is compliant with norms in Linux/RHEL
|
||||
- Cannot start with a hyphen
|
||||
- Cannot begin with, or end with a "."
|
||||
- Cannot contain any whitespaces
|
||||
- Entire hostname is max 255 chars (including dots)
|
||||
- Each domain/label is between 1 and 63 characters, except top level domain, which must be at least 2 characters
|
||||
- Supports ipv4, ipv6, simple hostnames and FQDNs
|
||||
- Follows RFC 9210 (modern RFC 1123, 1178) requirements
|
||||
|
||||
Accepts an IP Address or Hostname as the argument
|
||||
"""
|
||||
|
||||
regex = '^[a-z0-9][-a-z0-9]*$|^([a-z0-9][-a-z0-9]{0,62}[.])*[a-z0-9][-a-z0-9]{1,62}$'
|
||||
flags = re.IGNORECASE
|
||||
|
||||
def __call__(self, value):
|
||||
regex_matches, err = self.__validate(value)
|
||||
invalid_input = regex_matches if self.inverse_match else not regex_matches
|
||||
if invalid_input:
|
||||
if err is None:
|
||||
err = ValidationError(self.message, code=self.code, params={"value": value})
|
||||
raise err
|
||||
|
||||
def __str__(self):
|
||||
return f"regex={self.regex}, message={self.message}, code={self.code}, inverse_match={self.inverse_match}, flags={self.flags}"
|
||||
|
||||
def __validate(self, value):
|
||||
if ' ' in value:
|
||||
return False, ValidationError("whitespaces in hostnames are illegal")
|
||||
|
||||
"""
|
||||
If we have an IP address, try and validate it.
|
||||
"""
|
||||
try:
|
||||
validate_ipv46_address(value)
|
||||
return True, None
|
||||
except ValidationError:
|
||||
pass
|
||||
|
||||
"""
|
||||
By this point in the code, we probably have a simple hostname, FQDN or a strange hostname like "192.localhost.domain.101"
|
||||
"""
|
||||
if not self.regex.match(value):
|
||||
return False, ValidationError(f"illegal characters detected in hostname={value}. Please verify.")
|
||||
|
||||
return True, None
|
||||
File diff suppressed because it is too large
Load Diff
69
awx/api/views/bulk.py
Normal file
69
awx/api/views/bulk.py
Normal file
@@ -0,0 +1,69 @@
|
||||
from collections import OrderedDict
|
||||
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.renderers import JSONRenderer
|
||||
from rest_framework.reverse import reverse
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from awx.main.models import UnifiedJob, Host
|
||||
from awx.api.generics import (
|
||||
GenericAPIView,
|
||||
APIView,
|
||||
)
|
||||
from awx.api import (
|
||||
serializers,
|
||||
renderers,
|
||||
)
|
||||
|
||||
|
||||
class BulkView(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
renderer_classes = [
|
||||
renderers.BrowsableAPIRenderer,
|
||||
JSONRenderer,
|
||||
]
|
||||
allowed_methods = ['GET', 'OPTIONS']
|
||||
|
||||
def get(self, request, format=None):
|
||||
'''List top level resources'''
|
||||
data = OrderedDict()
|
||||
data['host_create'] = reverse('api:bulk_host_create', request=request)
|
||||
data['job_launch'] = reverse('api:bulk_job_launch', request=request)
|
||||
return Response(data)
|
||||
|
||||
|
||||
class BulkJobLaunchView(GenericAPIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
model = UnifiedJob
|
||||
serializer_class = serializers.BulkJobLaunchSerializer
|
||||
allowed_methods = ['GET', 'POST', 'OPTIONS']
|
||||
|
||||
def get(self, request):
|
||||
data = OrderedDict()
|
||||
data['detail'] = "Specify a list of unified job templates to launch alongside their launchtime parameters"
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
def post(self, request):
|
||||
bulkjob_serializer = serializers.BulkJobLaunchSerializer(data=request.data, context={'request': request})
|
||||
if bulkjob_serializer.is_valid():
|
||||
result = bulkjob_serializer.create(bulkjob_serializer.validated_data)
|
||||
return Response(result, status=status.HTTP_201_CREATED)
|
||||
return Response(bulkjob_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class BulkHostCreateView(GenericAPIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
model = Host
|
||||
serializer_class = serializers.BulkHostCreateSerializer
|
||||
allowed_methods = ['GET', 'POST', 'OPTIONS']
|
||||
|
||||
def get(self, request):
|
||||
return Response({"detail": "Bulk create hosts with this endpoint"}, status=status.HTTP_200_OK)
|
||||
|
||||
def post(self, request):
|
||||
serializer = serializers.BulkHostCreateSerializer(data=request.data, context={'request': request})
|
||||
if serializer.is_valid():
|
||||
result = serializer.create(serializer.validated_data)
|
||||
return Response(result, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -25,6 +25,7 @@ from rest_framework import status
|
||||
# Red Hat has an OID namespace (RHANANA). Receptor has its own designation under that.
|
||||
RECEPTOR_OID = "1.3.6.1.4.1.2312.19.1"
|
||||
|
||||
|
||||
# generate install bundle for the instance
|
||||
# install bundle directory structure
|
||||
# ├── install_receptor.yml (playbook)
|
||||
@@ -40,7 +41,6 @@ RECEPTOR_OID = "1.3.6.1.4.1.2312.19.1"
|
||||
# │ └── work-public-key.pem
|
||||
# └── requirements.yml
|
||||
class InstanceInstallBundle(GenericAPIView):
|
||||
|
||||
name = _('Install Bundle')
|
||||
model = models.Instance
|
||||
serializer_class = serializers.InstanceSerializer
|
||||
@@ -178,7 +178,7 @@ def generate_receptor_tls(instance_obj):
|
||||
.public_key(csr.public_key())
|
||||
.serial_number(x509.random_serial_number())
|
||||
.not_valid_before(datetime.datetime.utcnow())
|
||||
.not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=10))
|
||||
.not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=3650))
|
||||
.add_extension(
|
||||
csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).value,
|
||||
critical=csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).critical,
|
||||
|
||||
@@ -46,7 +46,6 @@ logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
|
||||
class InventoryUpdateEventsList(SubListAPIView):
|
||||
|
||||
model = InventoryUpdateEvent
|
||||
serializer_class = InventoryUpdateEventSerializer
|
||||
parent_model = InventoryUpdate
|
||||
@@ -66,13 +65,11 @@ class InventoryUpdateEventsList(SubListAPIView):
|
||||
|
||||
|
||||
class InventoryList(ListCreateAPIView):
|
||||
|
||||
model = Inventory
|
||||
serializer_class = InventorySerializer
|
||||
|
||||
|
||||
class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
|
||||
model = Inventory
|
||||
serializer_class = InventorySerializer
|
||||
|
||||
@@ -98,7 +95,6 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIVie
|
||||
|
||||
|
||||
class InventoryActivityStreamList(SubListAPIView):
|
||||
|
||||
model = ActivityStream
|
||||
serializer_class = ActivityStreamSerializer
|
||||
parent_model = Inventory
|
||||
@@ -113,7 +109,6 @@ class InventoryActivityStreamList(SubListAPIView):
|
||||
|
||||
|
||||
class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
|
||||
model = InstanceGroup
|
||||
serializer_class = InstanceGroupSerializer
|
||||
parent_model = Inventory
|
||||
@@ -121,13 +116,11 @@ class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
|
||||
|
||||
class InventoryAccessList(ResourceAccessList):
|
||||
|
||||
model = User # needs to be User for AccessLists's
|
||||
parent_model = Inventory
|
||||
|
||||
|
||||
class InventoryObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = Inventory
|
||||
@@ -140,7 +133,6 @@ class InventoryObjectRolesList(SubListAPIView):
|
||||
|
||||
|
||||
class InventoryJobTemplateList(SubListAPIView):
|
||||
|
||||
model = JobTemplate
|
||||
serializer_class = JobTemplateSerializer
|
||||
parent_model = Inventory
|
||||
@@ -154,11 +146,9 @@ class InventoryJobTemplateList(SubListAPIView):
|
||||
|
||||
|
||||
class InventoryLabelList(LabelSubListCreateAttachDetachView):
|
||||
|
||||
parent_model = Inventory
|
||||
|
||||
|
||||
class InventoryCopy(CopyAPIView):
|
||||
|
||||
model = Inventory
|
||||
copy_return_serializer_class = InventorySerializer
|
||||
|
||||
@@ -59,13 +59,11 @@ class LabelSubListCreateAttachDetachView(SubListCreateAttachDetachAPIView):
|
||||
|
||||
|
||||
class LabelDetail(RetrieveUpdateAPIView):
|
||||
|
||||
model = Label
|
||||
serializer_class = LabelSerializer
|
||||
|
||||
|
||||
class LabelList(ListCreateAPIView):
|
||||
|
||||
name = _("Labels")
|
||||
model = Label
|
||||
serializer_class = LabelSerializer
|
||||
|
||||
@@ -10,13 +10,11 @@ from awx.main.models import InstanceLink, Instance
|
||||
|
||||
|
||||
class MeshVisualizer(APIView):
|
||||
|
||||
name = _("Mesh Visualizer")
|
||||
permission_classes = (IsSystemAdminOrAuditor,)
|
||||
swagger_topic = "System Configuration"
|
||||
|
||||
def get(self, request, format=None):
|
||||
|
||||
data = {
|
||||
'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data,
|
||||
'links': InstanceLinkSerializer(InstanceLink.objects.select_related('target', 'source'), many=True).data,
|
||||
|
||||
@@ -5,9 +5,11 @@
|
||||
import logging
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
@@ -25,15 +27,19 @@ logger = logging.getLogger('awx.analytics')
|
||||
|
||||
|
||||
class MetricsView(APIView):
|
||||
|
||||
name = _('Metrics')
|
||||
swagger_topic = 'Metrics'
|
||||
|
||||
renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer]
|
||||
|
||||
def initialize_request(self, request, *args, **kwargs):
|
||||
if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS:
|
||||
self.permission_classes = (AllowAny,)
|
||||
return super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||
|
||||
def get(self, request):
|
||||
'''Show Metrics Details'''
|
||||
if request.user.is_superuser or request.user.is_system_auditor:
|
||||
if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS or request.user.is_superuser or request.user.is_system_auditor:
|
||||
metrics_to_show = ''
|
||||
if not request.query_params.get('subsystemonly', "0") == "1":
|
||||
metrics_to_show += metrics().decode('UTF-8')
|
||||
|
||||
@@ -16,7 +16,7 @@ from rest_framework import status
|
||||
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.utils import get_object_or_400
|
||||
from awx.main.models.ha import Instance, InstanceGroup
|
||||
from awx.main.models.ha import Instance, InstanceGroup, schedule_policy_task
|
||||
from awx.main.models.organization import Team
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.inventory import Inventory
|
||||
@@ -107,6 +107,11 @@ class InstanceGroupMembershipMixin(object):
|
||||
if inst_name in ig_obj.policy_instance_list:
|
||||
ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name))
|
||||
ig_obj.save(update_fields=['policy_instance_list'])
|
||||
|
||||
# sometimes removing an instance has a non-obvious consequence
|
||||
# this is almost always true if policy_instance_percentage or _minimum is non-zero
|
||||
# after removing a single instance, the other memberships need to be re-balanced
|
||||
schedule_policy_task()
|
||||
return response
|
||||
|
||||
|
||||
|
||||
@@ -58,7 +58,6 @@ logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
|
||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
@@ -70,7 +69,6 @@ class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
|
||||
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
@@ -106,7 +104,6 @@ class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPI
|
||||
|
||||
|
||||
class OrganizationInventoriesList(SubListAPIView):
|
||||
|
||||
model = Inventory
|
||||
serializer_class = InventorySerializer
|
||||
parent_model = Organization
|
||||
@@ -114,7 +111,6 @@ class OrganizationInventoriesList(SubListAPIView):
|
||||
|
||||
|
||||
class OrganizationUsersList(BaseUsersList):
|
||||
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
parent_model = Organization
|
||||
@@ -123,7 +119,6 @@ class OrganizationUsersList(BaseUsersList):
|
||||
|
||||
|
||||
class OrganizationAdminsList(BaseUsersList):
|
||||
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
parent_model = Organization
|
||||
@@ -132,7 +127,6 @@ class OrganizationAdminsList(BaseUsersList):
|
||||
|
||||
|
||||
class OrganizationProjectsList(SubListCreateAPIView):
|
||||
|
||||
model = Project
|
||||
serializer_class = ProjectSerializer
|
||||
parent_model = Organization
|
||||
@@ -140,7 +134,6 @@ class OrganizationProjectsList(SubListCreateAPIView):
|
||||
|
||||
|
||||
class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = ExecutionEnvironment
|
||||
serializer_class = ExecutionEnvironmentSerializer
|
||||
parent_model = Organization
|
||||
@@ -150,7 +143,6 @@ class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
|
||||
class OrganizationJobTemplatesList(SubListCreateAPIView):
|
||||
|
||||
model = JobTemplate
|
||||
serializer_class = JobTemplateSerializer
|
||||
parent_model = Organization
|
||||
@@ -158,7 +150,6 @@ class OrganizationJobTemplatesList(SubListCreateAPIView):
|
||||
|
||||
|
||||
class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
||||
|
||||
model = WorkflowJobTemplate
|
||||
serializer_class = WorkflowJobTemplateSerializer
|
||||
parent_model = Organization
|
||||
@@ -166,7 +157,6 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
||||
|
||||
|
||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = Team
|
||||
serializer_class = TeamSerializer
|
||||
parent_model = Organization
|
||||
@@ -175,7 +165,6 @@ class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
|
||||
class OrganizationActivityStreamList(SubListAPIView):
|
||||
|
||||
model = ActivityStream
|
||||
serializer_class = ActivityStreamSerializer
|
||||
parent_model = Organization
|
||||
@@ -184,7 +173,6 @@ class OrganizationActivityStreamList(SubListAPIView):
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
@@ -193,34 +181,28 @@ class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesStartedList(OrganizationNotificationTemplatesAnyList):
|
||||
|
||||
relationship = 'notification_templates_started'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesErrorList(OrganizationNotificationTemplatesAnyList):
|
||||
|
||||
relationship = 'notification_templates_error'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesSuccessList(OrganizationNotificationTemplatesAnyList):
|
||||
|
||||
relationship = 'notification_templates_success'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesApprovalList(OrganizationNotificationTemplatesAnyList):
|
||||
|
||||
relationship = 'notification_templates_approvals'
|
||||
|
||||
|
||||
class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
|
||||
model = InstanceGroup
|
||||
serializer_class = InstanceGroupSerializer
|
||||
parent_model = Organization
|
||||
@@ -228,7 +210,6 @@ class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
|
||||
|
||||
class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
||||
|
||||
model = Credential
|
||||
serializer_class = CredentialSerializer
|
||||
parent_model = Organization
|
||||
@@ -240,13 +221,11 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
||||
|
||||
|
||||
class OrganizationAccessList(ResourceAccessList):
|
||||
|
||||
model = User # needs to be User for AccessLists's
|
||||
parent_model = Organization
|
||||
|
||||
|
||||
class OrganizationObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = Organization
|
||||
|
||||
@@ -36,7 +36,6 @@ logger = logging.getLogger('awx.api.views.root')
|
||||
|
||||
|
||||
class ApiRootView(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
name = _('REST API')
|
||||
versioning_class = None
|
||||
@@ -59,7 +58,6 @@ class ApiRootView(APIView):
|
||||
|
||||
|
||||
class ApiOAuthAuthorizationRootView(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
name = _("API OAuth 2 Authorization Root")
|
||||
versioning_class = None
|
||||
@@ -74,7 +72,6 @@ class ApiOAuthAuthorizationRootView(APIView):
|
||||
|
||||
|
||||
class ApiVersionRootView(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
swagger_topic = 'Versioning'
|
||||
|
||||
@@ -124,6 +121,7 @@ class ApiVersionRootView(APIView):
|
||||
data['workflow_job_template_nodes'] = reverse('api:workflow_job_template_node_list', request=request)
|
||||
data['workflow_job_nodes'] = reverse('api:workflow_job_node_list', request=request)
|
||||
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
|
||||
data['bulk'] = reverse('api:bulk', request=request)
|
||||
return Response(data)
|
||||
|
||||
|
||||
@@ -172,7 +170,6 @@ class ApiV2PingView(APIView):
|
||||
|
||||
|
||||
class ApiV2SubscriptionView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
name = _('Subscriptions')
|
||||
swagger_topic = 'System Configuration'
|
||||
@@ -212,7 +209,6 @@ class ApiV2SubscriptionView(APIView):
|
||||
|
||||
|
||||
class ApiV2AttachView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
name = _('Attach Subscription')
|
||||
swagger_topic = 'System Configuration'
|
||||
@@ -230,7 +226,6 @@ class ApiV2AttachView(APIView):
|
||||
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||
if pool_id and user and pw:
|
||||
|
||||
data = request.data.copy()
|
||||
try:
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
@@ -258,7 +253,6 @@ class ApiV2AttachView(APIView):
|
||||
|
||||
|
||||
class ApiV2ConfigView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
name = _('Configuration')
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
@@ -8,7 +8,6 @@ from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
class ConfConfig(AppConfig):
|
||||
|
||||
name = 'awx.conf'
|
||||
verbose_name = _('Configuration')
|
||||
|
||||
@@ -16,7 +15,6 @@ class ConfConfig(AppConfig):
|
||||
self.module.autodiscover()
|
||||
|
||||
if not set(sys.argv) & {'migrate', 'check_migrations'}:
|
||||
|
||||
from .settings import SettingsWrapper
|
||||
|
||||
SettingsWrapper.initialize()
|
||||
|
||||
@@ -21,7 +21,7 @@ logger = logging.getLogger('awx.conf.fields')
|
||||
# Use DRF fields to convert/validate settings:
|
||||
# - to_representation(obj) should convert a native Python object to a primitive
|
||||
# serializable type. This primitive type will be what is presented in the API
|
||||
# and stored in the JSON field in the datbase.
|
||||
# and stored in the JSON field in the database.
|
||||
# - to_internal_value(data) should convert the primitive type back into the
|
||||
# appropriate Python type to be used in settings.
|
||||
|
||||
@@ -47,7 +47,6 @@ class IntegerField(IntegerField):
|
||||
|
||||
|
||||
class StringListField(ListField):
|
||||
|
||||
child = CharField()
|
||||
|
||||
def to_representation(self, value):
|
||||
@@ -57,7 +56,6 @@ class StringListField(ListField):
|
||||
|
||||
|
||||
class StringListBooleanField(ListField):
|
||||
|
||||
default_error_messages = {'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.')}
|
||||
child = CharField()
|
||||
|
||||
@@ -96,7 +94,6 @@ class StringListBooleanField(ListField):
|
||||
|
||||
|
||||
class StringListPathField(StringListField):
|
||||
|
||||
default_error_messages = {'type_error': _('Expected list of strings but got {input_type} instead.'), 'path_error': _('{path} is not a valid path choice.')}
|
||||
|
||||
def to_internal_value(self, paths):
|
||||
@@ -126,7 +123,6 @@ class StringListIsolatedPathField(StringListField):
|
||||
}
|
||||
|
||||
def to_internal_value(self, paths):
|
||||
|
||||
if isinstance(paths, (list, tuple)):
|
||||
for p in paths:
|
||||
if not isinstance(p, str):
|
||||
|
||||
@@ -8,7 +8,6 @@ import awx.main.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
|
||||
|
||||
operations = [
|
||||
|
||||
@@ -48,7 +48,6 @@ def revert_tower_settings(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [('conf', '0001_initial'), ('main', '0004_squashed_v310_release')]
|
||||
|
||||
run_before = [('main', '0005_squashed_v310_v313_updates')]
|
||||
|
||||
@@ -7,7 +7,6 @@ import awx.main.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [('conf', '0002_v310_copy_tower_settings')]
|
||||
|
||||
operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONBlob(null=True))]
|
||||
|
||||
@@ -5,7 +5,6 @@ from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [('conf', '0003_v310_JSONField_changes')]
|
||||
|
||||
operations = [
|
||||
|
||||
@@ -15,7 +15,6 @@ def reverse_copy_session_settings(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [('conf', '0004_v320_reencrypt')]
|
||||
|
||||
operations = [migrations.RunPython(copy_session_settings, reverse_copy_session_settings)]
|
||||
|
||||
@@ -8,7 +8,6 @@ from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [('conf', '0005_v330_rename_two_session_settings')]
|
||||
|
||||
operations = [migrations.RunPython(fill_ldap_group_type_params)]
|
||||
|
||||
@@ -9,7 +9,6 @@ def copy_allowed_ips(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [('conf', '0006_v331_ldap_group_type')]
|
||||
|
||||
operations = [migrations.RunPython(copy_allowed_ips)]
|
||||
|
||||
@@ -14,7 +14,6 @@ def _noop(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [('conf', '0007_v380_rename_more_settings')]
|
||||
|
||||
operations = [migrations.RunPython(clear_old_license, _noop), migrations.RunPython(prefill_rh_credentials, _noop)]
|
||||
|
||||
@@ -10,7 +10,6 @@ def rename_proot_settings(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [('conf', '0008_subscriptions')]
|
||||
|
||||
operations = [migrations.RunPython(rename_proot_settings)]
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import inspect
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.timezone import now
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.conf.migrations')
|
||||
|
||||
|
||||
def fill_ldap_group_type_params(apps, schema_editor):
|
||||
@@ -15,7 +19,7 @@ def fill_ldap_group_type_params(apps, schema_editor):
|
||||
entry = qs[0]
|
||||
group_type_params = entry.value
|
||||
else:
|
||||
entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS', value=group_type_params, created=now(), modified=now())
|
||||
return # for new installs we prefer to use the default value
|
||||
|
||||
init_attrs = set(inspect.getfullargspec(group_type.__init__).args[1:])
|
||||
for k in list(group_type_params.keys()):
|
||||
@@ -23,4 +27,5 @@ def fill_ldap_group_type_params(apps, schema_editor):
|
||||
del group_type_params[k]
|
||||
|
||||
entry.value = group_type_params
|
||||
logger.warning(f'Migration updating AUTH_LDAP_GROUP_TYPE_PARAMS with value {entry.value}')
|
||||
entry.save()
|
||||
|
||||
@@ -10,7 +10,6 @@ __all__ = ['rename_setting']
|
||||
|
||||
|
||||
def rename_setting(apps, schema_editor, old_key, new_key):
|
||||
|
||||
old_setting = None
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
if Setting.objects.filter(key=new_key).exists() or hasattr(settings, new_key):
|
||||
|
||||
@@ -17,7 +17,6 @@ __all__ = ['Setting']
|
||||
|
||||
|
||||
class Setting(CreatedModifiedModel):
|
||||
|
||||
key = models.CharField(max_length=255)
|
||||
value = JSONBlob(null=True)
|
||||
user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE))
|
||||
|
||||
@@ -104,7 +104,6 @@ def filter_sensitive(registry, key, value):
|
||||
|
||||
|
||||
class TransientSetting(object):
|
||||
|
||||
__slots__ = ('pk', 'value')
|
||||
|
||||
def __init__(self, pk, value):
|
||||
|
||||
25
awx/conf/tests/functional/test_migrations.py
Normal file
25
awx/conf/tests/functional/test_migrations.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import pytest
|
||||
|
||||
from awx.conf.migrations._ldap_group_type import fill_ldap_group_type_params
|
||||
from awx.conf.models import Setting
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fill_group_type_params_no_op():
|
||||
fill_ldap_group_type_params(apps, 'dont-use-me')
|
||||
assert Setting.objects.count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_keep_old_setting_with_default_value():
|
||||
Setting.objects.create(key='AUTH_LDAP_GROUP_TYPE', value={'name_attr': 'cn', 'member_attr': 'member'})
|
||||
fill_ldap_group_type_params(apps, 'dont-use-me')
|
||||
assert Setting.objects.count() == 1
|
||||
s = Setting.objects.first()
|
||||
assert s.value == {'name_attr': 'cn', 'member_attr': 'member'}
|
||||
|
||||
|
||||
# NOTE: would be good to test the removal of attributes by migration
|
||||
# but this requires fighting with the validator and is not done here
|
||||
@@ -5,7 +5,6 @@ from awx.conf.fields import StringListBooleanField, StringListPathField, ListTup
|
||||
|
||||
|
||||
class TestStringListBooleanField:
|
||||
|
||||
FIELD_VALUES = [
|
||||
("hello", "hello"),
|
||||
(("a", "b"), ["a", "b"]),
|
||||
@@ -53,7 +52,6 @@ class TestStringListBooleanField:
|
||||
|
||||
|
||||
class TestListTuplesField:
|
||||
|
||||
FIELD_VALUES = [([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")])]
|
||||
|
||||
FIELD_VALUES_INVALID = [("abc", type("abc")), ([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))), (['a', 'b'], type('a')), (123, type(123))]
|
||||
@@ -73,7 +71,6 @@ class TestListTuplesField:
|
||||
|
||||
|
||||
class TestStringListPathField:
|
||||
|
||||
FIELD_VALUES = [
|
||||
((".", "..", "/"), [".", "..", "/"]),
|
||||
(("/home",), ["/home"]),
|
||||
|
||||
@@ -36,7 +36,6 @@ SettingCategory = collections.namedtuple('SettingCategory', ('url', 'slug', 'nam
|
||||
|
||||
|
||||
class SettingCategoryList(ListAPIView):
|
||||
|
||||
model = Setting # Not exactly, but needed for the view.
|
||||
serializer_class = SettingCategorySerializer
|
||||
filter_backends = []
|
||||
@@ -58,7 +57,6 @@ class SettingCategoryList(ListAPIView):
|
||||
|
||||
|
||||
class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
model = Setting # Not exactly, but needed for the view.
|
||||
serializer_class = SettingSingletonSerializer
|
||||
filter_backends = []
|
||||
@@ -146,7 +144,6 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
|
||||
class SettingLoggingTest(GenericAPIView):
|
||||
|
||||
name = _('Logging Connectivity Test')
|
||||
model = Setting
|
||||
serializer_class = SettingSingletonSerializer
|
||||
@@ -183,7 +180,7 @@ class SettingLoggingTest(GenericAPIView):
|
||||
if not port:
|
||||
return Response({'error': 'Port required for ' + protocol}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
# if http/https by this point, domain is reacheable
|
||||
# if http/https by this point, domain is reachable
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
|
||||
if protocol == 'udp':
|
||||
|
||||
@@ -1972,7 +1972,7 @@ msgid ""
|
||||
"HTTP headers and meta keys to search to determine remote host name or IP. "
|
||||
"Add additional items to this list, such as \"HTTP_X_FORWARDED_FOR\", if "
|
||||
"behind a reverse proxy. See the \"Proxy Support\" section of the "
|
||||
"Adminstrator guide for more details."
|
||||
"Administrator guide for more details."
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/conf.py:85
|
||||
@@ -2457,7 +2457,7 @@ msgid ""
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/conf.py:631
|
||||
msgid "Maximum disk persistance for external log aggregation (in GB)"
|
||||
msgid "Maximum disk persistence for external log aggregation (in GB)"
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/conf.py:633
|
||||
@@ -2548,7 +2548,7 @@ msgid "Enable"
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/constants.py:27
|
||||
msgid "Doas"
|
||||
msgid "Does"
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/constants.py:28
|
||||
@@ -4801,7 +4801,7 @@ msgstr ""
|
||||
|
||||
#: awx/main/models/workflow.py:251
|
||||
msgid ""
|
||||
"An identifier coresponding to the workflow job template node that this node "
|
||||
"An identifier corresponding to the workflow job template node that this node "
|
||||
"was created from."
|
||||
msgstr ""
|
||||
|
||||
@@ -5521,7 +5521,7 @@ msgstr ""
|
||||
#: awx/sso/conf.py:606
|
||||
msgid ""
|
||||
"Extra arguments for Google OAuth2 login. You can restrict it to only allow a "
|
||||
"single domain to authenticate, even if the user is logged in with multple "
|
||||
"single domain to authenticate, even if the user is logged in with multiple "
|
||||
"Google accounts. Refer to the documentation for more detail."
|
||||
msgstr ""
|
||||
|
||||
@@ -5905,7 +5905,7 @@ msgstr ""
|
||||
|
||||
#: awx/sso/conf.py:1290
|
||||
msgid ""
|
||||
"Create a keypair to use as a service provider (SP) and include the "
|
||||
"Create a key pair to use as a service provider (SP) and include the "
|
||||
"certificate content here."
|
||||
msgstr ""
|
||||
|
||||
@@ -5915,7 +5915,7 @@ msgstr ""
|
||||
|
||||
#: awx/sso/conf.py:1302
|
||||
msgid ""
|
||||
"Create a keypair to use as a service provider (SP) and include the private "
|
||||
"Create a key pair to use as a service provider (SP) and include the private "
|
||||
"key content here."
|
||||
msgstr ""
|
||||
|
||||
|
||||
@@ -1971,7 +1971,7 @@ msgid ""
|
||||
"HTTP headers and meta keys to search to determine remote host name or IP. "
|
||||
"Add additional items to this list, such as \"HTTP_X_FORWARDED_FOR\", if "
|
||||
"behind a reverse proxy. See the \"Proxy Support\" section of the "
|
||||
"Adminstrator guide for more details."
|
||||
"Administrator guide for more details."
|
||||
msgstr "Los encabezados HTTP y las llaves de activación para buscar y determinar el nombre de host remoto o IP. Añada elementos adicionales a esta lista, como \"HTTP_X_FORWARDED_FOR\", si está detrás de un proxy inverso. Consulte la sección \"Soporte de proxy\" de la guía del adminstrador para obtener más información."
|
||||
|
||||
#: awx/main/conf.py:85
|
||||
@@ -4804,7 +4804,7 @@ msgstr "Indica que un trabajo no se creará cuando es sea True. La semántica de
|
||||
|
||||
#: awx/main/models/workflow.py:251
|
||||
msgid ""
|
||||
"An identifier coresponding to the workflow job template node that this node "
|
||||
"An identifier corresponding to the workflow job template node that this node "
|
||||
"was created from."
|
||||
msgstr "Un identificador que corresponde al nodo de plantilla de tarea del flujo de trabajo a partir del cual se creó este nodo."
|
||||
|
||||
@@ -5526,7 +5526,7 @@ msgstr "Argumentos adicionales para Google OAuth2"
|
||||
#: awx/sso/conf.py:606
|
||||
msgid ""
|
||||
"Extra arguments for Google OAuth2 login. You can restrict it to only allow a "
|
||||
"single domain to authenticate, even if the user is logged in with multple "
|
||||
"single domain to authenticate, even if the user is logged in with multiple "
|
||||
"Google accounts. Refer to the documentation for more detail."
|
||||
msgstr "Argumentos adicionales para el inicio de sesión en Google OAuth2. Puede limitarlo para permitir la autenticación de un solo dominio, incluso si el usuario ha iniciado sesión con varias cuentas de Google. Consulte la documentación para obtener información detallada."
|
||||
|
||||
@@ -5910,7 +5910,7 @@ msgstr "Certificado público del proveedor de servicio SAML"
|
||||
|
||||
#: awx/sso/conf.py:1290
|
||||
msgid ""
|
||||
"Create a keypair to use as a service provider (SP) and include the "
|
||||
"Create a key pair to use as a service provider (SP) and include the "
|
||||
"certificate content here."
|
||||
msgstr "Crear un par de claves para usar como proveedor de servicio (SP) e incluir el contenido del certificado aquí."
|
||||
|
||||
@@ -5920,7 +5920,7 @@ msgstr "Clave privada del proveedor de servicio SAML"
|
||||
|
||||
#: awx/sso/conf.py:1302
|
||||
msgid ""
|
||||
"Create a keypair to use as a service provider (SP) and include the private "
|
||||
"Create a key pair to use as a service provider (SP) and include the private "
|
||||
"key content here."
|
||||
msgstr "Crear un par de claves para usar como proveedor de servicio (SP) e incluir el contenido de la clave privada aquí."
|
||||
|
||||
@@ -6237,4 +6237,5 @@ msgstr "%s se está actualizando."
|
||||
|
||||
#: awx/ui/urls.py:24
|
||||
msgid "This page will refresh when complete."
|
||||
msgstr "Esta página se actualizará cuando se complete."
|
||||
msgstr "Esta página se actualizará cuando se complete."
|
||||
|
||||
|
||||
@@ -721,7 +721,7 @@ msgstr "DTSTART valide obligatoire dans rrule. La valeur doit commencer par : DT
|
||||
#: awx/api/serializers.py:4657
|
||||
msgid ""
|
||||
"DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ."
|
||||
msgstr "DTSTART ne peut correspondre à une DateHeure naïve. Spécifier ;TZINFO= ou YYYYMMDDTHHMMSSZZ."
|
||||
msgstr "DTSTART ne peut correspondre à une date-heure naïve. Spécifier ;TZINFO= ou YYYYMMDDTHHMMSSZZ."
|
||||
|
||||
#: awx/api/serializers.py:4659
|
||||
msgid "Multiple DTSTART is not supported."
|
||||
@@ -6239,4 +6239,5 @@ msgstr "%s est en cours de mise à niveau."
|
||||
|
||||
#: awx/ui/urls.py:24
|
||||
msgid "This page will refresh when complete."
|
||||
msgstr "Cette page sera rafraîchie une fois terminée."
|
||||
msgstr "Cette page sera rafraîchie une fois terminée."
|
||||
|
||||
|
||||
@@ -6237,4 +6237,5 @@ msgstr "Er wordt momenteel een upgrade van%s geïnstalleerd."
|
||||
|
||||
#: awx/ui/urls.py:24
|
||||
msgid "This page will refresh when complete."
|
||||
msgstr "Deze pagina wordt vernieuwd als hij klaar is."
|
||||
msgstr "Deze pagina wordt vernieuwd als hij klaar is."
|
||||
|
||||
|
||||
@@ -561,7 +561,6 @@ class NotificationAttachMixin(BaseAccess):
|
||||
|
||||
|
||||
class InstanceAccess(BaseAccess):
|
||||
|
||||
model = Instance
|
||||
prefetch_related = ('rampart_groups',)
|
||||
|
||||
@@ -579,7 +578,6 @@ class InstanceAccess(BaseAccess):
|
||||
return super(InstanceAccess, self).can_unattach(obj, sub_obj, relationship, relationship, data=data)
|
||||
|
||||
def can_add(self, data):
|
||||
|
||||
return self.user.is_superuser
|
||||
|
||||
def can_change(self, obj, data):
|
||||
@@ -590,18 +588,39 @@ class InstanceAccess(BaseAccess):
|
||||
|
||||
|
||||
class InstanceGroupAccess(BaseAccess):
|
||||
"""
|
||||
I can see Instance Groups when I am:
|
||||
- a superuser(system administrator)
|
||||
- at least read_role on the instance group
|
||||
I can edit Instance Groups when I am:
|
||||
- a superuser
|
||||
- admin role on the Instance group
|
||||
I can add/delete Instance Groups:
|
||||
- a superuser(system administrator)
|
||||
I can use Instance Groups when I have:
|
||||
- use_role on the instance group
|
||||
"""
|
||||
|
||||
model = InstanceGroup
|
||||
prefetch_related = ('instances',)
|
||||
|
||||
def filtered_queryset(self):
|
||||
return InstanceGroup.objects.filter(organization__in=Organization.accessible_pk_qs(self.user, 'admin_role')).distinct()
|
||||
return self.model.accessible_objects(self.user, 'read_role')
|
||||
|
||||
@check_superuser
|
||||
def can_use(self, obj):
|
||||
return self.user in obj.use_role
|
||||
|
||||
def can_add(self, data):
|
||||
return self.user.is_superuser
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
return self.user.is_superuser
|
||||
return self.can_admin(obj)
|
||||
|
||||
@check_superuser
|
||||
def can_admin(self, obj):
|
||||
return self.user in obj.admin_role
|
||||
|
||||
def can_delete(self, obj):
|
||||
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
|
||||
@@ -848,7 +867,7 @@ class OrganizationAccess(NotificationAttachMixin, BaseAccess):
|
||||
return RoleAccess(self.user).can_attach(rel_role, sub_obj, 'members', *args, **kwargs)
|
||||
|
||||
if relationship == "instance_groups":
|
||||
if self.user.is_superuser:
|
||||
if self.user in obj.admin_role and self.user in sub_obj.use_role:
|
||||
return True
|
||||
return False
|
||||
return super(OrganizationAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
@@ -937,7 +956,7 @@ class InventoryAccess(BaseAccess):
|
||||
|
||||
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
if relationship == "instance_groups":
|
||||
if self.user.can_access(type(sub_obj), "read", sub_obj) and self.user in obj.organization.admin_role:
|
||||
if self.user in sub_obj.use_role and self.user in obj.admin_role:
|
||||
return True
|
||||
return False
|
||||
return super(InventoryAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
@@ -993,9 +1012,6 @@ class HostAccess(BaseAccess):
|
||||
if data and 'name' in data:
|
||||
self.check_license(add_host_name=data['name'])
|
||||
|
||||
# Check the per-org limit
|
||||
self.check_org_host_limit({'inventory': obj.inventory}, add_host_name=data['name'])
|
||||
|
||||
# Checks for admin or change permission on inventory, controls whether
|
||||
# the user can edit variable data.
|
||||
return obj and self.user in obj.inventory.admin_role
|
||||
@@ -1033,7 +1049,9 @@ class GroupAccess(BaseAccess):
|
||||
return Group.objects.filter(inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role'))
|
||||
|
||||
def can_add(self, data):
|
||||
if not data or 'inventory' not in data:
|
||||
if not data: # So the browseable API will work
|
||||
return Inventory.accessible_objects(self.user, 'admin_role').exists()
|
||||
if 'inventory' not in data:
|
||||
return False
|
||||
# Checks for admin or change permission on inventory.
|
||||
return self.check_related('inventory', Inventory, data)
|
||||
@@ -1675,11 +1693,12 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
|
||||
return self.user.is_superuser or self.user in obj.admin_role
|
||||
|
||||
@check_superuser
|
||||
# object here is the job template. sub_object here is what is being attached
|
||||
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
||||
if relationship == "instance_groups":
|
||||
if not obj.organization:
|
||||
return False
|
||||
return self.user.can_access(type(sub_obj), "read", sub_obj) and self.user in obj.organization.admin_role
|
||||
return self.user in sub_obj.use_role and self.user in obj.admin_role
|
||||
return super(JobTemplateAccess, self).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
||||
|
||||
@check_superuser
|
||||
@@ -1856,8 +1875,6 @@ class JobLaunchConfigAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||
def _related_filtered_queryset(self, cls):
|
||||
if cls is Label:
|
||||
return LabelAccess(self.user).filtered_queryset()
|
||||
elif cls is InstanceGroup:
|
||||
return InstanceGroupAccess(self.user).filtered_queryset()
|
||||
else:
|
||||
return cls._accessible_pk_qs(cls, self.user, 'use_role')
|
||||
|
||||
@@ -1869,6 +1886,7 @@ class JobLaunchConfigAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_add(self, data, template=None):
|
||||
# WARNING: duplicated with BulkJobLaunchSerializer, check when changing permission levels
|
||||
# This is a special case, we don't check related many-to-many elsewhere
|
||||
# launch RBAC checks use this
|
||||
if 'reference_obj' in data:
|
||||
@@ -2001,7 +2019,16 @@ class WorkflowJobNodeAccess(BaseAccess):
|
||||
)
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.objects.filter(workflow_job__unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
|
||||
return self.model.objects.filter(
|
||||
Q(workflow_job__unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
|
||||
| Q(workflow_job__organization__in=Organization.objects.filter(Q(admin_role__members=self.user)))
|
||||
)
|
||||
|
||||
def can_read(self, obj):
|
||||
"""Overriding this opens up detail view access for bulk jobs, where the workflow job has no associated workflow job template."""
|
||||
if obj.workflow_job.is_bulk_job and obj.workflow_job.created_by_id == self.user.id:
|
||||
return True
|
||||
return super().can_read(obj)
|
||||
|
||||
@check_superuser
|
||||
def can_add(self, data):
|
||||
@@ -2127,7 +2154,16 @@ class WorkflowJobAccess(BaseAccess):
|
||||
)
|
||||
|
||||
def filtered_queryset(self):
|
||||
return WorkflowJob.objects.filter(unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
|
||||
return WorkflowJob.objects.filter(
|
||||
Q(unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
|
||||
| Q(organization__in=Organization.objects.filter(Q(admin_role__members=self.user)), is_bulk_job=True)
|
||||
)
|
||||
|
||||
def can_read(self, obj):
|
||||
"""Overriding this opens up detail view access for bulk jobs, where the workflow job has no associated workflow job template."""
|
||||
if obj.is_bulk_job and obj.created_by_id == self.user.id:
|
||||
return True
|
||||
return super().can_read(obj)
|
||||
|
||||
def can_add(self, data):
|
||||
# Old add-start system for launching jobs is being depreciated, and
|
||||
@@ -2355,7 +2391,6 @@ class JobEventAccess(BaseAccess):
|
||||
|
||||
|
||||
class UnpartitionedJobEventAccess(JobEventAccess):
|
||||
|
||||
model = UnpartitionedJobEvent
|
||||
|
||||
|
||||
@@ -2700,46 +2735,66 @@ class ActivityStreamAccess(BaseAccess):
|
||||
# 'job_template', 'job', 'project', 'project_update', 'workflow_job',
|
||||
# 'inventory_source', 'workflow_job_template'
|
||||
|
||||
inventory_set = Inventory.accessible_objects(self.user, 'read_role')
|
||||
credential_set = Credential.accessible_objects(self.user, 'read_role')
|
||||
q = Q(user=self.user)
|
||||
inventory_set = Inventory.accessible_pk_qs(self.user, 'read_role')
|
||||
if inventory_set:
|
||||
q |= (
|
||||
Q(ad_hoc_command__inventory__in=inventory_set)
|
||||
| Q(inventory__in=inventory_set)
|
||||
| Q(host__inventory__in=inventory_set)
|
||||
| Q(group__inventory__in=inventory_set)
|
||||
| Q(inventory_source__inventory__in=inventory_set)
|
||||
| Q(inventory_update__inventory_source__inventory__in=inventory_set)
|
||||
)
|
||||
|
||||
credential_set = Credential.accessible_pk_qs(self.user, 'read_role')
|
||||
if credential_set:
|
||||
q |= Q(credential__in=credential_set)
|
||||
|
||||
auditing_orgs = (
|
||||
(Organization.accessible_objects(self.user, 'admin_role') | Organization.accessible_objects(self.user, 'auditor_role'))
|
||||
.distinct()
|
||||
.values_list('id', flat=True)
|
||||
)
|
||||
project_set = Project.accessible_objects(self.user, 'read_role')
|
||||
jt_set = JobTemplate.accessible_objects(self.user, 'read_role')
|
||||
team_set = Team.accessible_objects(self.user, 'read_role')
|
||||
wfjt_set = WorkflowJobTemplate.accessible_objects(self.user, 'read_role')
|
||||
app_set = OAuth2ApplicationAccess(self.user).filtered_queryset()
|
||||
token_set = OAuth2TokenAccess(self.user).filtered_queryset()
|
||||
if auditing_orgs:
|
||||
q |= (
|
||||
Q(user__in=auditing_orgs.values('member_role__members'))
|
||||
| Q(organization__in=auditing_orgs)
|
||||
| Q(notification_template__organization__in=auditing_orgs)
|
||||
| Q(notification__notification_template__organization__in=auditing_orgs)
|
||||
| Q(label__organization__in=auditing_orgs)
|
||||
| Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
|
||||
)
|
||||
|
||||
return qs.filter(
|
||||
Q(ad_hoc_command__inventory__in=inventory_set)
|
||||
| Q(o_auth2_application__in=app_set)
|
||||
| Q(o_auth2_access_token__in=token_set)
|
||||
| Q(user__in=auditing_orgs.values('member_role__members'))
|
||||
| Q(user=self.user)
|
||||
| Q(organization__in=auditing_orgs)
|
||||
| Q(inventory__in=inventory_set)
|
||||
| Q(host__inventory__in=inventory_set)
|
||||
| Q(group__inventory__in=inventory_set)
|
||||
| Q(inventory_source__inventory__in=inventory_set)
|
||||
| Q(inventory_update__inventory_source__inventory__in=inventory_set)
|
||||
| Q(credential__in=credential_set)
|
||||
| Q(team__in=team_set)
|
||||
| Q(project__in=project_set)
|
||||
| Q(project_update__project__in=project_set)
|
||||
| Q(job_template__in=jt_set)
|
||||
| Q(job__job_template__in=jt_set)
|
||||
| Q(workflow_job_template__in=wfjt_set)
|
||||
| Q(workflow_job_template_node__workflow_job_template__in=wfjt_set)
|
||||
| Q(workflow_job__workflow_job_template__in=wfjt_set)
|
||||
| Q(notification_template__organization__in=auditing_orgs)
|
||||
| Q(notification__notification_template__organization__in=auditing_orgs)
|
||||
| Q(label__organization__in=auditing_orgs)
|
||||
| Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
|
||||
).distinct()
|
||||
project_set = Project.accessible_pk_qs(self.user, 'read_role')
|
||||
if project_set:
|
||||
q |= Q(project__in=project_set) | Q(project_update__project__in=project_set)
|
||||
|
||||
jt_set = JobTemplate.accessible_pk_qs(self.user, 'read_role')
|
||||
if jt_set:
|
||||
q |= Q(job_template__in=jt_set) | Q(job__job_template__in=jt_set)
|
||||
|
||||
wfjt_set = WorkflowJobTemplate.accessible_pk_qs(self.user, 'read_role')
|
||||
if wfjt_set:
|
||||
q |= (
|
||||
Q(workflow_job_template__in=wfjt_set)
|
||||
| Q(workflow_job_template_node__workflow_job_template__in=wfjt_set)
|
||||
| Q(workflow_job__workflow_job_template__in=wfjt_set)
|
||||
)
|
||||
|
||||
team_set = Team.accessible_pk_qs(self.user, 'read_role')
|
||||
if team_set:
|
||||
q |= Q(team__in=team_set)
|
||||
|
||||
app_set = OAuth2ApplicationAccess(self.user).filtered_queryset()
|
||||
if app_set:
|
||||
q |= Q(o_auth2_application__in=app_set)
|
||||
|
||||
token_set = OAuth2TokenAccess(self.user).filtered_queryset()
|
||||
if token_set:
|
||||
q |= Q(o_auth2_access_token__in=token_set)
|
||||
|
||||
return qs.filter(q).distinct()
|
||||
|
||||
def can_add(self, data):
|
||||
return False
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import datetime
|
||||
import asyncio
|
||||
import logging
|
||||
import aioredis
|
||||
import redis
|
||||
import redis.asyncio
|
||||
import re
|
||||
|
||||
from prometheus_client import (
|
||||
@@ -82,7 +82,7 @@ class BroadcastWebsocketStatsManager:
|
||||
|
||||
async def run_loop(self):
|
||||
try:
|
||||
redis_conn = await aioredis.create_redis_pool(settings.BROKER_URL)
|
||||
redis_conn = await redis.asyncio.Redis.from_url(settings.BROKER_URL)
|
||||
while True:
|
||||
stats_data_str = ''.join(stat.serialize() for stat in self._stats.values())
|
||||
await redis_conn.set(self._redis_key, stats_data_str)
|
||||
@@ -122,8 +122,8 @@ class BroadcastWebsocketStats:
|
||||
'Number of messages received, to be forwarded, by the broadcast websocket system',
|
||||
registry=self._registry,
|
||||
)
|
||||
self._messages_received = Gauge(
|
||||
f'awx_{self.remote_name}_messages_received',
|
||||
self._messages_received_current_conn = Gauge(
|
||||
f'awx_{self.remote_name}_messages_received_currrent_conn',
|
||||
'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection',
|
||||
registry=self._registry,
|
||||
)
|
||||
@@ -144,13 +144,13 @@ class BroadcastWebsocketStats:
|
||||
|
||||
def record_message_received(self):
|
||||
self._internal_messages_received_per_minute.record()
|
||||
self._messages_received.inc()
|
||||
self._messages_received_current_conn.inc()
|
||||
self._messages_received_total.inc()
|
||||
|
||||
def record_connection_established(self):
|
||||
self._connection.state('connected')
|
||||
self._connection_start.set_to_current_time()
|
||||
self._messages_received.set(0)
|
||||
self._messages_received_current_conn.set(0)
|
||||
|
||||
def record_connection_lost(self):
|
||||
self._connection.state('disconnected')
|
||||
|
||||
@@ -16,7 +16,7 @@ from awx.conf.license import get_license
|
||||
from awx.main.utils import get_awx_version, camelcase_to_underscore, datetime_hook
|
||||
from awx.main import models
|
||||
from awx.main.analytics import register
|
||||
from awx.main.scheduler.task_manager_models import TaskManagerInstances
|
||||
from awx.main.scheduler.task_manager_models import TaskManagerModels
|
||||
|
||||
"""
|
||||
This module is used to define metrics collected by awx.main.analytics.gather()
|
||||
@@ -233,13 +233,14 @@ def projects_by_scm_type(since, **kwargs):
|
||||
return counts
|
||||
|
||||
|
||||
@register('instance_info', '1.2', description=_('Cluster topology and capacity'))
|
||||
@register('instance_info', '1.3', description=_('Cluster topology and capacity'))
|
||||
def instance_info(since, include_hostnames=False, **kwargs):
|
||||
info = {}
|
||||
# Use same method that the TaskManager does to compute consumed capacity without querying all running jobs for each Instance
|
||||
active_tasks = models.UnifiedJob.objects.filter(status__in=['running', 'waiting']).only('task_impact', 'controller_node', 'execution_node')
|
||||
tm_instances = TaskManagerInstances(active_tasks, instance_fields=['uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'enabled'])
|
||||
for tm_instance in tm_instances.instances_by_hostname.values():
|
||||
tm_models = TaskManagerModels.init_with_consumed_capacity(
|
||||
instance_fields=['uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'enabled', 'node_type']
|
||||
)
|
||||
for tm_instance in tm_models.instances.instances_by_hostname.values():
|
||||
instance = tm_instance.obj
|
||||
instance_info = {
|
||||
'uuid': instance.uuid,
|
||||
@@ -251,6 +252,7 @@ def instance_info(since, include_hostnames=False, **kwargs):
|
||||
'enabled': instance.enabled,
|
||||
'consumed_capacity': tm_instance.consumed_capacity,
|
||||
'remaining_capacity': instance.capacity - tm_instance.consumed_capacity,
|
||||
'node_type': instance.node_type,
|
||||
}
|
||||
if include_hostnames is True:
|
||||
instance_info['hostname'] = instance.hostname
|
||||
|
||||
@@ -57,6 +57,7 @@ def metrics():
|
||||
[
|
||||
'hostname',
|
||||
'instance_uuid',
|
||||
'node_type',
|
||||
],
|
||||
registry=REGISTRY,
|
||||
)
|
||||
@@ -84,6 +85,7 @@ def metrics():
|
||||
[
|
||||
'hostname',
|
||||
'instance_uuid',
|
||||
'node_type',
|
||||
],
|
||||
registry=REGISTRY,
|
||||
)
|
||||
@@ -111,6 +113,7 @@ def metrics():
|
||||
[
|
||||
'hostname',
|
||||
'instance_uuid',
|
||||
'node_type',
|
||||
],
|
||||
registry=REGISTRY,
|
||||
)
|
||||
@@ -120,6 +123,7 @@ def metrics():
|
||||
[
|
||||
'hostname',
|
||||
'instance_uuid',
|
||||
'node_type',
|
||||
],
|
||||
registry=REGISTRY,
|
||||
)
|
||||
@@ -180,12 +184,13 @@ def metrics():
|
||||
instance_data = instance_info(None, include_hostnames=True)
|
||||
for uuid, info in instance_data.items():
|
||||
hostname = info['hostname']
|
||||
INSTANCE_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['capacity'])
|
||||
node_type = info['node_type']
|
||||
INSTANCE_CAPACITY.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).set(instance_data[uuid]['capacity'])
|
||||
INSTANCE_CPU.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['cpu'])
|
||||
INSTANCE_MEMORY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['memory'])
|
||||
INSTANCE_CONSUMED_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['consumed_capacity'])
|
||||
INSTANCE_REMAINING_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['remaining_capacity'])
|
||||
INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid).info(
|
||||
INSTANCE_CONSUMED_CAPACITY.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).set(instance_data[uuid]['consumed_capacity'])
|
||||
INSTANCE_REMAINING_CAPACITY.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).set(instance_data[uuid]['remaining_capacity'])
|
||||
INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).info(
|
||||
{
|
||||
'enabled': str(instance_data[uuid]['enabled']),
|
||||
'managed_by_policy': str(instance_data[uuid]['managed_by_policy']),
|
||||
|
||||
@@ -5,7 +5,9 @@ import logging
|
||||
|
||||
from django.conf import settings
|
||||
from django.apps import apps
|
||||
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
from awx.main.utils import is_testing
|
||||
|
||||
root_key = 'awx_metrics'
|
||||
logger = logging.getLogger('awx.main.analytics')
|
||||
@@ -163,14 +165,10 @@ class Metrics:
|
||||
Instance = apps.get_model('main', 'Instance')
|
||||
if instance_name:
|
||||
self.instance_name = instance_name
|
||||
elif settings.IS_TESTING():
|
||||
elif is_testing():
|
||||
self.instance_name = "awx_testing"
|
||||
else:
|
||||
try:
|
||||
self.instance_name = Instance.objects.me().hostname
|
||||
except Exception as e:
|
||||
self.instance_name = settings.CLUSTER_HOST_ID
|
||||
logger.info(f'Instance {self.instance_name} seems to be unregistered, error: {e}')
|
||||
self.instance_name = Instance.objects.my_hostname()
|
||||
|
||||
# metric name, help_text
|
||||
METRICSLIST = [
|
||||
|
||||
@@ -3,6 +3,5 @@ from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
class MainConfig(AppConfig):
|
||||
|
||||
name = 'awx.main'
|
||||
verbose_name = _('Main')
|
||||
|
||||
@@ -282,6 +282,16 @@ register(
|
||||
placeholder={'HTTP_PROXY': 'myproxy.local:8080'},
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_RUNNER_KEEPALIVE_SECONDS',
|
||||
field_class=fields.IntegerField,
|
||||
label=_('K8S Ansible Runner Keep-Alive Message Interval'),
|
||||
help_text=_('Only applies to jobs running in a Container Group. If not 0, send a message every so-many seconds to keep connection open.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
placeholder=240, # intended to be under common 5 minute idle timeout
|
||||
)
|
||||
|
||||
register(
|
||||
'GALAXY_TASK_ENV',
|
||||
field_class=fields.KeyValueField,
|
||||
@@ -569,7 +579,7 @@ register(
|
||||
register(
|
||||
'LOG_AGGREGATOR_LOGGERS',
|
||||
field_class=fields.StringListField,
|
||||
default=['awx', 'activity_stream', 'job_events', 'system_tracking'],
|
||||
default=['awx', 'activity_stream', 'job_events', 'system_tracking', 'broadcast_websocket'],
|
||||
label=_('Loggers Sending Data to Log Aggregator Form'),
|
||||
help_text=_(
|
||||
'List of loggers that will send HTTP logs to the collector, these can '
|
||||
@@ -577,7 +587,8 @@ register(
|
||||
'awx - service logs\n'
|
||||
'activity_stream - activity stream records\n'
|
||||
'job_events - callback data from Ansible job events\n'
|
||||
'system_tracking - facts gathered from scan jobs.'
|
||||
'system_tracking - facts gathered from scan jobs\n'
|
||||
'broadcast_websocket - errors pertaining to websockets broadcast metrics\n'
|
||||
),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
@@ -764,6 +775,26 @@ register(
|
||||
help_text=_('Indicates whether the instance is part of a kubernetes-based deployment.'),
|
||||
)
|
||||
|
||||
register(
|
||||
'BULK_JOB_MAX_LAUNCH',
|
||||
field_class=fields.IntegerField,
|
||||
default=100,
|
||||
label=_('Max jobs to allow bulk jobs to launch'),
|
||||
help_text=_('Max jobs to allow bulk jobs to launch'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
)
|
||||
|
||||
register(
|
||||
'BULK_HOST_MAX_CREATE',
|
||||
field_class=fields.IntegerField,
|
||||
default=100,
|
||||
label=_('Max number of hosts to allow to be created in a single bulk action'),
|
||||
help_text=_('Max number of hosts to allow to be created in a single bulk action'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
)
|
||||
|
||||
|
||||
def logging_validate(serializer, attrs):
|
||||
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
||||
|
||||
@@ -9,10 +9,16 @@ aim_inputs = {
|
||||
'fields': [
|
||||
{
|
||||
'id': 'url',
|
||||
'label': _('CyberArk AIM URL'),
|
||||
'label': _('CyberArk CCP URL'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
},
|
||||
{
|
||||
'id': 'webservice_id',
|
||||
'label': _('Web Service ID'),
|
||||
'type': 'string',
|
||||
'help_text': _('The CCP Web Service ID. Leave blank to default to AIMWebService.'),
|
||||
},
|
||||
{
|
||||
'id': 'app_id',
|
||||
'label': _('Application ID'),
|
||||
@@ -64,10 +70,13 @@ def aim_backend(**kwargs):
|
||||
client_cert = kwargs.get('client_cert', None)
|
||||
client_key = kwargs.get('client_key', None)
|
||||
verify = kwargs['verify']
|
||||
webservice_id = kwargs.get('webservice_id', '')
|
||||
app_id = kwargs['app_id']
|
||||
object_query = kwargs['object_query']
|
||||
object_query_format = kwargs['object_query_format']
|
||||
reason = kwargs.get('reason', None)
|
||||
if webservice_id == '':
|
||||
webservice_id = 'AIMWebService'
|
||||
|
||||
query_params = {
|
||||
'AppId': app_id,
|
||||
@@ -78,7 +87,7 @@ def aim_backend(**kwargs):
|
||||
query_params['reason'] = reason
|
||||
|
||||
request_qs = '?' + urlencode(query_params, quote_via=quote)
|
||||
request_url = urljoin(url, '/'.join(['AIMWebService', 'api', 'Accounts']))
|
||||
request_url = urljoin(url, '/'.join([webservice_id, 'api', 'Accounts']))
|
||||
|
||||
with CertFiles(client_cert, client_key) as cert:
|
||||
res = requests.get(
|
||||
@@ -92,4 +101,4 @@ def aim_backend(**kwargs):
|
||||
return res.json()['Content']
|
||||
|
||||
|
||||
aim_plugin = CredentialPlugin('CyberArk AIM Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
|
||||
aim_plugin = CredentialPlugin('CyberArk Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
||||
|
||||
import base64
|
||||
from urllib.parse import urljoin, quote
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -61,7 +60,7 @@ def conjur_backend(**kwargs):
|
||||
cacert = kwargs.get('cacert', None)
|
||||
|
||||
auth_kwargs = {
|
||||
'headers': {'Content-Type': 'text/plain'},
|
||||
'headers': {'Content-Type': 'text/plain', 'Accept-Encoding': 'base64'},
|
||||
'data': api_key,
|
||||
'allow_redirects': False,
|
||||
}
|
||||
@@ -69,9 +68,13 @@ def conjur_backend(**kwargs):
|
||||
with CertFiles(cacert) as cert:
|
||||
# https://www.conjur.org/api.html#authentication-authenticate-post
|
||||
auth_kwargs['verify'] = cert
|
||||
resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||
try:
|
||||
resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||
resp.raise_for_status()
|
||||
except requests.exceptions.HTTPError:
|
||||
resp = requests.post(urljoin(url, '/'.join(['api', 'authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||
raise_for_status(resp)
|
||||
token = base64.b64encode(resp.content).decode('utf-8')
|
||||
token = resp.content.decode('utf-8')
|
||||
|
||||
lookup_kwargs = {
|
||||
'headers': {'Authorization': 'Token token="{}"'.format(token)},
|
||||
@@ -80,14 +83,21 @@ def conjur_backend(**kwargs):
|
||||
|
||||
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
|
||||
path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path]))
|
||||
path_conjurcloud = urljoin(url, '/'.join(['api', 'secrets', account, 'variable', secret_path]))
|
||||
if version:
|
||||
path = '?'.join([path, version])
|
||||
ver = "version={}".format(version)
|
||||
path = '?'.join([path, ver])
|
||||
path_conjurcloud = '?'.join([path_conjurcloud, ver])
|
||||
|
||||
with CertFiles(cacert) as cert:
|
||||
lookup_kwargs['verify'] = cert
|
||||
resp = requests.get(path, timeout=30, **lookup_kwargs)
|
||||
try:
|
||||
resp = requests.get(path, timeout=30, **lookup_kwargs)
|
||||
resp.raise_for_status()
|
||||
except requests.exceptions.HTTPError:
|
||||
resp = requests.get(path_conjurcloud, timeout=30, **lookup_kwargs)
|
||||
raise_for_status(resp)
|
||||
return resp.text
|
||||
|
||||
|
||||
conjur_plugin = CredentialPlugin('CyberArk Conjur Secret Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
||||
conjur_plugin = CredentialPlugin('CyberArk Conjur Secrets Manager Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import copy
|
||||
import os
|
||||
import pathlib
|
||||
import time
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
||||
@@ -247,7 +248,15 @@ def kv_backend(**kwargs):
|
||||
request_url = urljoin(url, '/'.join(['v1'] + path_segments)).rstrip('/')
|
||||
with CertFiles(cacert) as cert:
|
||||
request_kwargs['verify'] = cert
|
||||
response = sess.get(request_url, **request_kwargs)
|
||||
request_retries = 0
|
||||
while request_retries < 5:
|
||||
response = sess.get(request_url, **request_kwargs)
|
||||
# https://developer.hashicorp.com/vault/docs/enterprise/consistency
|
||||
if response.status_code == 412:
|
||||
request_retries += 1
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
raise_for_status(response)
|
||||
|
||||
json = response.json()
|
||||
@@ -289,8 +298,15 @@ def ssh_backend(**kwargs):
|
||||
|
||||
with CertFiles(cacert) as cert:
|
||||
request_kwargs['verify'] = cert
|
||||
resp = sess.post(request_url, **request_kwargs)
|
||||
|
||||
request_retries = 0
|
||||
while request_retries < 5:
|
||||
resp = sess.post(request_url, **request_kwargs)
|
||||
# https://developer.hashicorp.com/vault/docs/enterprise/consistency
|
||||
if resp.status_code == 412:
|
||||
request_retries += 1
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
raise_for_status(resp)
|
||||
return resp.json()['data']['signed_key']
|
||||
|
||||
|
||||
@@ -49,7 +49,10 @@ def tss_backend(**kwargs):
|
||||
secret_dict = secret_server.get_secret(kwargs['secret_id'])
|
||||
secret = ServerSecret(**secret_dict)
|
||||
|
||||
return secret.fields[kwargs['secret_field']].value
|
||||
if isinstance(secret.fields[kwargs['secret_field']].value, str) == False:
|
||||
return secret.fields[kwargs['secret_field']].value.text
|
||||
else:
|
||||
return secret.fields[kwargs['secret_field']].value
|
||||
|
||||
|
||||
tss_plugin = CredentialPlugin(
|
||||
|
||||
@@ -3,6 +3,7 @@ import uuid
|
||||
import json
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
import redis
|
||||
|
||||
from awx.main.dispatch import get_local_queuename
|
||||
@@ -13,7 +14,6 @@ logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
|
||||
class Control(object):
|
||||
|
||||
services = ('dispatcher', 'callback_receiver')
|
||||
result = None
|
||||
|
||||
@@ -49,7 +49,10 @@ class Control(object):
|
||||
reply_queue = Control.generate_reply_queue_name()
|
||||
self.result = None
|
||||
|
||||
with pg_bus_conn(new_connection=True) as conn:
|
||||
if not connection.get_autocommit():
|
||||
raise RuntimeError('Control-with-reply messages can only be done in autocommit mode')
|
||||
|
||||
with pg_bus_conn() as conn:
|
||||
conn.listen(reply_queue)
|
||||
send_data = {'control': command, 'reply_to': reply_queue}
|
||||
if extra_data:
|
||||
|
||||
@@ -192,7 +192,6 @@ class PoolWorker(object):
|
||||
|
||||
|
||||
class StatefulPoolWorker(PoolWorker):
|
||||
|
||||
track_managed_tasks = True
|
||||
|
||||
|
||||
@@ -387,6 +386,8 @@ class AutoscalePool(WorkerPool):
|
||||
reaper.reap_job(j, 'failed')
|
||||
except Exception:
|
||||
logger.exception('failed to reap job UUID {}'.format(w.current_task['uuid']))
|
||||
else:
|
||||
logger.warning(f'Worker was told to quit but has not, pid={w.pid}')
|
||||
orphaned.extend(w.orphaned_tasks)
|
||||
self.workers.remove(w)
|
||||
elif w.idle and len(self.workers) > self.min_workers:
|
||||
@@ -450,9 +451,6 @@ class AutoscalePool(WorkerPool):
|
||||
try:
|
||||
if isinstance(body, dict) and body.get('bind_kwargs'):
|
||||
self.add_bind_kwargs(body)
|
||||
# when the cluster heartbeat occurs, clean up internally
|
||||
if isinstance(body, dict) and 'cluster_node_heartbeat' in body['task']:
|
||||
self.cleanup()
|
||||
if self.should_grow:
|
||||
self.up()
|
||||
# we don't care about "preferred queue" round robin distribution, just
|
||||
@@ -467,7 +465,7 @@ class AutoscalePool(WorkerPool):
|
||||
task_name = 'unknown'
|
||||
if isinstance(body, dict):
|
||||
task_name = body.get('task')
|
||||
logger.warn(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
|
||||
logger.warning(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
|
||||
return super(AutoscalePool, self).write(preferred_queue, body)
|
||||
except Exception:
|
||||
for conn in connections.all():
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
import inspect
|
||||
import logging
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
from uuid import uuid4
|
||||
|
||||
from django.conf import settings
|
||||
from django_guid import get_guid
|
||||
|
||||
from . import pg_bus_conn
|
||||
from awx.main.utils import is_testing
|
||||
|
||||
logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
@@ -67,7 +66,6 @@ class task:
|
||||
bind_kwargs = self.bind_kwargs
|
||||
|
||||
class PublisherMixin(object):
|
||||
|
||||
queue = None
|
||||
|
||||
@classmethod
|
||||
@@ -93,7 +91,7 @@ class task:
|
||||
obj.update(**kw)
|
||||
if callable(queue):
|
||||
queue = queue()
|
||||
if not settings.IS_TESTING(sys.argv):
|
||||
if not is_testing():
|
||||
with pg_bus_conn() as conn:
|
||||
conn.notify(queue, json.dumps(obj))
|
||||
return (obj, queue)
|
||||
|
||||
@@ -16,12 +16,7 @@ def startup_reaping():
|
||||
If this particular instance is starting, then we know that any running jobs are invalid
|
||||
so we will reap those jobs as a special action here
|
||||
"""
|
||||
try:
|
||||
me = Instance.objects.me()
|
||||
except RuntimeError as e:
|
||||
logger.warning(f'Local instance is not registered, not running startup reaper: {e}')
|
||||
return
|
||||
jobs = UnifiedJob.objects.filter(status='running', controller_node=me.hostname)
|
||||
jobs = UnifiedJob.objects.filter(status='running', controller_node=Instance.objects.my_hostname())
|
||||
job_ids = []
|
||||
for j in jobs:
|
||||
job_ids.append(j.id)
|
||||
@@ -62,36 +57,30 @@ def reap_waiting(instance=None, status='failed', job_explanation=None, grace_per
|
||||
if grace_period is None:
|
||||
grace_period = settings.JOB_WAITING_GRACE_PERIOD + settings.TASK_MANAGER_TIMEOUT
|
||||
|
||||
me = instance
|
||||
if me is None:
|
||||
try:
|
||||
me = Instance.objects.me()
|
||||
except RuntimeError as e:
|
||||
logger.warning(f'Local instance is not registered, not running reaper: {e}')
|
||||
return
|
||||
if instance is None:
|
||||
hostname = Instance.objects.my_hostname()
|
||||
else:
|
||||
hostname = instance.hostname
|
||||
if ref_time is None:
|
||||
ref_time = tz_now()
|
||||
jobs = UnifiedJob.objects.filter(status='waiting', modified__lte=ref_time - timedelta(seconds=grace_period), controller_node=me.hostname)
|
||||
jobs = UnifiedJob.objects.filter(status='waiting', modified__lte=ref_time - timedelta(seconds=grace_period), controller_node=hostname)
|
||||
if excluded_uuids:
|
||||
jobs = jobs.exclude(celery_task_id__in=excluded_uuids)
|
||||
for j in jobs:
|
||||
reap_job(j, status, job_explanation=job_explanation)
|
||||
|
||||
|
||||
def reap(instance=None, status='failed', job_explanation=None, excluded_uuids=None):
|
||||
def reap(instance=None, status='failed', job_explanation=None, excluded_uuids=None, ref_time=None):
|
||||
"""
|
||||
Reap all jobs in running for this instance.
|
||||
"""
|
||||
me = instance
|
||||
if me is None:
|
||||
try:
|
||||
me = Instance.objects.me()
|
||||
except RuntimeError as e:
|
||||
logger.warning(f'Local instance is not registered, not running reaper: {e}')
|
||||
return
|
||||
if instance is None:
|
||||
hostname = Instance.objects.my_hostname()
|
||||
else:
|
||||
hostname = instance.hostname
|
||||
workflow_ctype_id = ContentType.objects.get_for_model(WorkflowJob).id
|
||||
jobs = UnifiedJob.objects.filter(
|
||||
Q(status='running') & (Q(execution_node=me.hostname) | Q(controller_node=me.hostname)) & ~Q(polymorphic_ctype_id=workflow_ctype_id)
|
||||
Q(status='running', modified__lte=ref_time) & (Q(execution_node=hostname) | Q(controller_node=hostname)) & ~Q(polymorphic_ctype_id=workflow_ctype_id)
|
||||
)
|
||||
if excluded_uuids:
|
||||
jobs = jobs.exclude(celery_task_id__in=excluded_uuids)
|
||||
|
||||
@@ -40,7 +40,6 @@ class WorkerSignalHandler:
|
||||
|
||||
|
||||
class AWXConsumerBase(object):
|
||||
|
||||
last_stats = time.time()
|
||||
|
||||
def __init__(self, name, worker, queues=[], pool=None):
|
||||
@@ -114,7 +113,6 @@ class AWXConsumerBase(object):
|
||||
queue = 0
|
||||
self.pool.write(queue, body)
|
||||
self.total_messages += 1
|
||||
self.record_statistics()
|
||||
|
||||
@log_excess_runtime(logger)
|
||||
def record_statistics(self):
|
||||
@@ -156,6 +154,16 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
# if no successful loops have ran since startup, then we should fail right away
|
||||
self.pg_is_down = True # set so that we fail if we get database errors on startup
|
||||
self.pg_down_time = time.time() - self.pg_max_wait # allow no grace period
|
||||
self.last_cleanup = time.time()
|
||||
|
||||
def run_periodic_tasks(self):
|
||||
self.record_statistics() # maintains time buffer in method
|
||||
|
||||
if time.time() - self.last_cleanup > 60: # same as cluster_node_heartbeat
|
||||
# NOTE: if we run out of database connections, it is important to still run cleanup
|
||||
# so that we scale down workers and free up connections
|
||||
self.pool.cleanup()
|
||||
self.last_cleanup = time.time()
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
super(AWXConsumerPG, self).run(*args, **kwargs)
|
||||
@@ -171,8 +179,10 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
if init is False:
|
||||
self.worker.on_start()
|
||||
init = True
|
||||
for e in conn.events():
|
||||
self.process_task(json.loads(e.payload))
|
||||
for e in conn.events(yield_timeouts=True):
|
||||
if e is not None:
|
||||
self.process_task(json.loads(e.payload))
|
||||
self.run_periodic_tasks()
|
||||
self.pg_is_down = False
|
||||
if self.should_stop:
|
||||
return
|
||||
@@ -229,6 +239,8 @@ class BaseWorker(object):
|
||||
# so we can establish a new connection
|
||||
conn.close_if_unusable_or_obsolete()
|
||||
self.perform_work(body, *args)
|
||||
except Exception:
|
||||
logger.exception(f'Unhandled exception in perform_work in worker pid={os.getpid()}')
|
||||
finally:
|
||||
if 'uuid' in body:
|
||||
uuid = body['uuid']
|
||||
|
||||
@@ -3,14 +3,12 @@ import logging
|
||||
import os
|
||||
import signal
|
||||
import time
|
||||
import traceback
|
||||
import datetime
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.timezone import now as tz_now
|
||||
from django.db import DatabaseError, OperationalError, transaction, connection as django_connection
|
||||
from django.db.utils import InterfaceError, InternalError
|
||||
from django.db import transaction, connection as django_connection
|
||||
from django_guid import set_guid
|
||||
|
||||
import psutil
|
||||
@@ -64,6 +62,7 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
"""
|
||||
|
||||
MAX_RETRIES = 2
|
||||
INDIVIDUAL_EVENT_RETRIES = 3
|
||||
last_stats = time.time()
|
||||
last_flush = time.time()
|
||||
total = 0
|
||||
@@ -155,6 +154,8 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
metrics_events_missing_created = 0
|
||||
metrics_total_job_event_processing_seconds = datetime.timedelta(seconds=0)
|
||||
for cls, events in self.buff.items():
|
||||
if not events:
|
||||
continue
|
||||
logger.debug(f'{cls.__name__}.objects.bulk_create({len(events)})')
|
||||
for e in events:
|
||||
e.modified = now # this can be set before created because now is set above on line 149
|
||||
@@ -164,38 +165,48 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
else: # only calculate the seconds if the created time already has been set
|
||||
metrics_total_job_event_processing_seconds += e.modified - e.created
|
||||
metrics_duration_to_save = time.perf_counter()
|
||||
saved_events = []
|
||||
try:
|
||||
cls.objects.bulk_create(events)
|
||||
metrics_bulk_events_saved += len(events)
|
||||
saved_events = events
|
||||
self.buff[cls] = []
|
||||
except Exception as exc:
|
||||
logger.warning(f'Error in events bulk_create, will try indiviually up to 5 errors, error {str(exc)}')
|
||||
# If the database is flaking, let ensure_connection throw a general exception
|
||||
# will be caught by the outer loop, which goes into a proper sleep and retry loop
|
||||
django_connection.ensure_connection()
|
||||
logger.warning(f'Error in events bulk_create, will try indiviually, error: {str(exc)}')
|
||||
# if an exception occurs, we should re-attempt to save the
|
||||
# events one-by-one, because something in the list is
|
||||
# broken/stale
|
||||
consecutive_errors = 0
|
||||
events_saved = 0
|
||||
metrics_events_batch_save_errors += 1
|
||||
for e in events:
|
||||
for e in events.copy():
|
||||
try:
|
||||
e.save()
|
||||
events_saved += 1
|
||||
consecutive_errors = 0
|
||||
metrics_singular_events_saved += 1
|
||||
events.remove(e)
|
||||
saved_events.append(e) # Importantly, remove successfully saved events from the buffer
|
||||
except Exception as exc_indv:
|
||||
consecutive_errors += 1
|
||||
logger.info(f'Database Error Saving individual Job Event, error {str(exc_indv)}')
|
||||
if consecutive_errors >= 5:
|
||||
raise
|
||||
metrics_singular_events_saved += events_saved
|
||||
if events_saved == 0:
|
||||
raise
|
||||
retry_count = getattr(e, '_retry_count', 0) + 1
|
||||
e._retry_count = retry_count
|
||||
|
||||
# special sanitization logic for postgres treatment of NUL 0x00 char
|
||||
if (retry_count == 1) and isinstance(exc_indv, ValueError) and ("\x00" in e.stdout):
|
||||
e.stdout = e.stdout.replace("\x00", "")
|
||||
|
||||
if retry_count >= self.INDIVIDUAL_EVENT_RETRIES:
|
||||
logger.error(f'Hit max retries ({retry_count}) saving individual Event error: {str(exc_indv)}\ndata:\n{e.__dict__}')
|
||||
events.remove(e)
|
||||
else:
|
||||
logger.info(f'Database Error Saving individual Event uuid={e.uuid} try={retry_count}, error: {str(exc_indv)}')
|
||||
|
||||
metrics_duration_to_save = time.perf_counter() - metrics_duration_to_save
|
||||
for e in events:
|
||||
for e in saved_events:
|
||||
if not getattr(e, '_skip_websocket_message', False):
|
||||
metrics_events_broadcast += 1
|
||||
emit_event_detail(e)
|
||||
if getattr(e, '_notification_trigger_event', False):
|
||||
job_stats_wrapup(getattr(e, e.JOB_REFERENCE), event=e)
|
||||
self.buff = {}
|
||||
self.last_flush = time.time()
|
||||
# only update metrics if we saved events
|
||||
if (metrics_bulk_events_saved + metrics_singular_events_saved) > 0:
|
||||
@@ -267,20 +278,16 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
try:
|
||||
self.flush(force=flush)
|
||||
break
|
||||
except (OperationalError, InterfaceError, InternalError) as exc:
|
||||
except Exception as exc:
|
||||
# Aside form bugs, exceptions here are assumed to be due to database flake
|
||||
if retries >= self.MAX_RETRIES:
|
||||
logger.exception('Worker could not re-establish database connectivity, giving up on one or more events.')
|
||||
self.buff = {}
|
||||
return
|
||||
delay = 60 * retries
|
||||
logger.warning(f'Database Error Flushing Job Events, retry #{retries + 1} in {delay} seconds: {str(exc)}')
|
||||
django_connection.close()
|
||||
time.sleep(delay)
|
||||
retries += 1
|
||||
except DatabaseError:
|
||||
logger.exception('Database Error Flushing Job Events')
|
||||
django_connection.close()
|
||||
break
|
||||
except Exception as exc:
|
||||
tb = traceback.format_exc()
|
||||
logger.error('Callback Task Processor Raised Exception: %r', exc)
|
||||
logger.error('Detail: {}'.format(tb))
|
||||
except Exception:
|
||||
logger.exception(f'Callback Task Processor Raised Unexpected Exception processing event data:\n{body}')
|
||||
|
||||
@@ -232,7 +232,6 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
field_names = [field_names]
|
||||
|
||||
for field_name in field_names:
|
||||
|
||||
if field_name.startswith('singleton:'):
|
||||
continue
|
||||
|
||||
@@ -244,7 +243,6 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
field = getattr(cls, field_name, None)
|
||||
|
||||
if field and type(field) is ReverseManyToOneDescriptor or type(field) is ManyToManyDescriptor:
|
||||
|
||||
if '.' in field_attr:
|
||||
raise Exception('Referencing deep roles through ManyToMany fields is unsupported.')
|
||||
|
||||
@@ -629,7 +627,6 @@ class CredentialInputField(JSONSchemaField):
|
||||
# `ssh_key_unlock` requirements are very specific and can't be
|
||||
# represented without complicated JSON schema
|
||||
if model_instance.credential_type.managed is True and 'ssh_key_unlock' in defined_fields:
|
||||
|
||||
# in order to properly test the necessity of `ssh_key_unlock`, we
|
||||
# need to know the real value of `ssh_key_data`; for a payload like:
|
||||
# {
|
||||
@@ -791,7 +788,8 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
||||
'type': 'object',
|
||||
'patternProperties': {
|
||||
# http://docs.ansible.com/ansible/playbooks_variables.html#what-makes-a-valid-variable-name
|
||||
'^[a-zA-Z_]+[a-zA-Z0-9_]*$': {'type': 'string'},
|
||||
# plus, add ability to template
|
||||
r'^[a-zA-Z_\{\}]+[a-zA-Z0-9_\{\}]*$': {"anyOf": [{'type': 'string'}, {'type': 'array'}, {'$ref': '#/properties/extra_vars'}]}
|
||||
},
|
||||
'additionalProperties': False,
|
||||
},
|
||||
@@ -858,27 +856,44 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
||||
template_name = template_name.split('.')[1]
|
||||
setattr(valid_namespace['tower'].filename, template_name, 'EXAMPLE_FILENAME')
|
||||
|
||||
def validate_template_string(type_, key, tmpl):
|
||||
try:
|
||||
sandbox.ImmutableSandboxedEnvironment(undefined=StrictUndefined).from_string(tmpl).render(valid_namespace)
|
||||
except UndefinedError as e:
|
||||
raise django_exceptions.ValidationError(
|
||||
_('{sub_key} uses an undefined field ({error_msg})').format(sub_key=key, error_msg=e),
|
||||
code='invalid',
|
||||
params={'value': value},
|
||||
)
|
||||
except SecurityError as e:
|
||||
raise django_exceptions.ValidationError(_('Encountered unsafe code execution: {}').format(e))
|
||||
except TemplateSyntaxError as e:
|
||||
raise django_exceptions.ValidationError(
|
||||
_('Syntax error rendering template for {sub_key} inside of {type} ({error_msg})').format(sub_key=key, type=type_, error_msg=e),
|
||||
code='invalid',
|
||||
params={'value': value},
|
||||
)
|
||||
|
||||
def validate_extra_vars(key, node):
|
||||
if isinstance(node, dict):
|
||||
for k, v in node.items():
|
||||
validate_template_string("extra_vars", 'a key' if key is None else key, k)
|
||||
validate_extra_vars(k if key is None else "{key}.{k}".format(key=key, k=k), v)
|
||||
elif isinstance(node, list):
|
||||
for i, x in enumerate(node):
|
||||
validate_extra_vars("{key}[{i}]".format(key=key, i=i), x)
|
||||
else:
|
||||
validate_template_string("extra_vars", key, node)
|
||||
|
||||
for type_, injector in value.items():
|
||||
if type_ == 'env':
|
||||
for key in injector.keys():
|
||||
self.validate_env_var_allowed(key)
|
||||
for key, tmpl in injector.items():
|
||||
try:
|
||||
sandbox.ImmutableSandboxedEnvironment(undefined=StrictUndefined).from_string(tmpl).render(valid_namespace)
|
||||
except UndefinedError as e:
|
||||
raise django_exceptions.ValidationError(
|
||||
_('{sub_key} uses an undefined field ({error_msg})').format(sub_key=key, error_msg=e),
|
||||
code='invalid',
|
||||
params={'value': value},
|
||||
)
|
||||
except SecurityError as e:
|
||||
raise django_exceptions.ValidationError(_('Encountered unsafe code execution: {}').format(e))
|
||||
except TemplateSyntaxError as e:
|
||||
raise django_exceptions.ValidationError(
|
||||
_('Syntax error rendering template for {sub_key} inside of {type} ({error_msg})').format(sub_key=key, type=type_, error_msg=e),
|
||||
code='invalid',
|
||||
params={'value': value},
|
||||
)
|
||||
if type_ == 'extra_vars':
|
||||
validate_extra_vars(None, injector)
|
||||
else:
|
||||
for key, tmpl in injector.items():
|
||||
validate_template_string(type_, key, tmpl)
|
||||
|
||||
|
||||
class AskForField(models.BooleanField):
|
||||
|
||||
@@ -25,7 +25,7 @@ class Command(BaseCommand):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
f'''
|
||||
SELECT
|
||||
SELECT
|
||||
b.id, b.job_id, b.host_name, b.created - a.created delta,
|
||||
b.task task,
|
||||
b.event_data::json->'task_action' task_action,
|
||||
|
||||
@@ -9,7 +9,6 @@ class Command(BaseCommand):
|
||||
"""Checks connection to the database, and prints out connection info if not connected"""
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT version()")
|
||||
version = str(cursor.fetchone()[0])
|
||||
|
||||
@@ -82,7 +82,6 @@ class DeleteMeta:
|
||||
part_drop = {}
|
||||
|
||||
for pk, status, created in self.jobs_qs:
|
||||
|
||||
part_key = partition_table_name(self.job_class, created)
|
||||
if status in ['pending', 'waiting', 'running']:
|
||||
part_drop[part_key] = False
|
||||
|
||||
@@ -17,7 +17,6 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if not options['user']:
|
||||
|
||||
raise CommandError('Username not supplied. Usage: awx-manage create_oauth2_token --user=username.')
|
||||
try:
|
||||
user = User.objects.get(username=options['user'])
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user