mirror of
https://github.com/ansible/awx.git
synced 2026-02-08 21:14:47 -03:30
Compare commits
843 Commits
21.4.0
...
constructe
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e985b98d61 | ||
|
|
0fae313338 | ||
|
|
f1cab55051 | ||
|
|
0d88cee6bf | ||
|
|
295ec4f22a | ||
|
|
103b4567fe | ||
|
|
8ba2b1b50c | ||
|
|
83ccf1dd36 | ||
|
|
dc049af0eb | ||
|
|
8ea8558605 | ||
|
|
3335afcd3a | ||
|
|
1f2a5cf7e4 | ||
|
|
7f2933c43c | ||
|
|
d55af032f7 | ||
|
|
05af2972bf | ||
|
|
60458bebfd | ||
|
|
3ff65db2e6 | ||
|
|
e25c767a47 | ||
|
|
0866bfc549 | ||
|
|
2d9f2d36a1 | ||
|
|
c60ba5cec9 | ||
|
|
c98f86a355 | ||
|
|
3f0d28dd7f | ||
|
|
49e5d76062 | ||
|
|
46ea031566 | ||
|
|
0d7bbb4389 | ||
|
|
1dda373aaf | ||
|
|
ecd788312e | ||
|
|
e1e27a028c | ||
|
|
33c1968210 | ||
|
|
049a158638 | ||
|
|
0961ca06c9 | ||
|
|
a3d7c02802 | ||
|
|
280ceae267 | ||
|
|
ea719e053e | ||
|
|
f275c2a9c5 | ||
|
|
32f7295f44 | ||
|
|
6772fb876b | ||
|
|
3242dbcbe6 | ||
|
|
51112b95bc | ||
|
|
6c1d4a5cfd | ||
|
|
2e9106d8ea | ||
|
|
84822784e8 | ||
|
|
0f3adb52b1 | ||
|
|
59da9a29df | ||
|
|
a949ee048a | ||
|
|
341f8e385c | ||
|
|
b959bc278f | ||
|
|
052644eb9d | ||
|
|
659853dcea | ||
|
|
80c15e286f | ||
|
|
c22d8f1d7e | ||
|
|
4e18827909 | ||
|
|
27a97017dd | ||
|
|
59ce8c4148 | ||
|
|
3b9c04bf1e | ||
|
|
c72dca3ea5 | ||
|
|
ddb3cde872 | ||
|
|
f28203913f | ||
|
|
9b2725e5fe | ||
|
|
1af955d28c | ||
|
|
0815f935ca | ||
|
|
6997876da6 | ||
|
|
93d84fe2c9 | ||
|
|
f5785976be | ||
|
|
c2ec8396cd | ||
|
|
de115ed1c8 | ||
|
|
61c7d4e4ca | ||
|
|
a2f528e6e5 | ||
|
|
058ae132cf | ||
|
|
6483575437 | ||
|
|
a15a23c1d3 | ||
|
|
ffdcb9f4dd | ||
|
|
2d9da11443 | ||
|
|
5ce6c14f74 | ||
|
|
61748c072d | ||
|
|
89dae3865d | ||
|
|
87918bd275 | ||
|
|
808ab9803e | ||
|
|
d64b6d4dfe | ||
|
|
c9d931ceee | ||
|
|
7598e117d4 | ||
|
|
8fb831d3de | ||
|
|
64865af3bb | ||
|
|
9f63c99bee | ||
|
|
d7025a919c | ||
|
|
dab7d91cff | ||
|
|
700055801a | ||
|
|
61821faa00 | ||
|
|
c26d211ee0 | ||
|
|
6a79d19668 | ||
|
|
47176cb31b | ||
|
|
1c6a48ffb6 | ||
|
|
5163795cc0 | ||
|
|
fab83715e9 | ||
|
|
b0a4173545 | ||
|
|
0ebe57cbf4 | ||
|
|
d4840b240b | ||
|
|
ea9c52aca6 | ||
|
|
a7ebce1fef | ||
|
|
5de9cf748d | ||
|
|
ebea78943d | ||
|
|
bb387f939b | ||
|
|
bda806fd03 | ||
|
|
9777ce7fb8 | ||
|
|
8538d37702 | ||
|
|
1e33bc4020 | ||
|
|
5550086b3b | ||
|
|
d8e7c59fe8 | ||
|
|
4470b80059 | ||
|
|
e9ad01e806 | ||
|
|
980bfc4b6f | ||
|
|
8a4059d266 | ||
|
|
6351e8bbc9 | ||
|
|
325e566a3d | ||
|
|
d7f87ed27c | ||
|
|
01a7076267 | ||
|
|
32b6aec66b | ||
|
|
a5baee1b3a | ||
|
|
dd8c9f87a9 | ||
|
|
884ab424d5 | ||
|
|
7e55305c45 | ||
|
|
7f6f57bfee | ||
|
|
ae92f8292f | ||
|
|
51e244e183 | ||
|
|
ad4e257fdb | ||
|
|
fcf56950b3 | ||
|
|
27ea239c00 | ||
|
|
128a130b84 | ||
|
|
d75f12c001 | ||
|
|
2034eac620 | ||
|
|
e9a1582b70 | ||
|
|
51ef1e808d | ||
|
|
11fbfc2063 | ||
|
|
f6395c69dd | ||
|
|
ca07bc85cb | ||
|
|
b87dd6dc56 | ||
|
|
f8d46d5e71 | ||
|
|
ce0a456ecc | ||
|
|
5775ff1422 | ||
|
|
82e8bcd2bb | ||
|
|
d73cc501d5 | ||
|
|
7e40a4daed | ||
|
|
47e824dd11 | ||
|
|
4643b816fe | ||
|
|
79d9329cfa | ||
|
|
6492c03965 | ||
|
|
98107301a5 | ||
|
|
4810099158 | ||
|
|
1aca9929ab | ||
|
|
2aa58bc17d | ||
|
|
be4b826259 | ||
|
|
b99a434dee | ||
|
|
6cee99a9f9 | ||
|
|
ee509aea56 | ||
|
|
b5452a48f8 | ||
|
|
68e555824d | ||
|
|
0c980fa7d5 | ||
|
|
e34ce8c795 | ||
|
|
58bad6cfa9 | ||
|
|
3543644e0e | ||
|
|
36c0d07b30 | ||
|
|
03b0281fde | ||
|
|
6f6f04a071 | ||
|
|
239827a9cf | ||
|
|
ac9871b36f | ||
|
|
f739908ccf | ||
|
|
cf1ec07eab | ||
|
|
d968b648de | ||
|
|
5dd0eab806 | ||
|
|
41f3f381ec | ||
|
|
ac8cff75ce | ||
|
|
94b34b801c | ||
|
|
8f6849fc22 | ||
|
|
821b1701bf | ||
|
|
b7f2825909 | ||
|
|
e87e041a2a | ||
|
|
cc336e791c | ||
|
|
c2a3c3b285 | ||
|
|
7b8dcc98e7 | ||
|
|
d5011492bf | ||
|
|
e363ddf470 | ||
|
|
987709cdb3 | ||
|
|
f04ac3c798 | ||
|
|
71a6baccdb | ||
|
|
d07076b686 | ||
|
|
7129f3e8cd | ||
|
|
df61a5cea1 | ||
|
|
a4b950f79b | ||
|
|
1d87e6e04c | ||
|
|
8be739d255 | ||
|
|
ca54195099 | ||
|
|
f0fcfdde39 | ||
|
|
80b1ba4a35 | ||
|
|
51f8e362dc | ||
|
|
737d6d8c8b | ||
|
|
beaf6b6058 | ||
|
|
aad1fbcef8 | ||
|
|
0b96d617ac | ||
|
|
fe768a159b | ||
|
|
c1ebea858b | ||
|
|
da9b8135e8 | ||
|
|
76cecf3f6b | ||
|
|
7b2938f515 | ||
|
|
916b5642d2 | ||
|
|
e524d3df3e | ||
|
|
01e9a611ea | ||
|
|
ef29589940 | ||
|
|
cec2d2dfb9 | ||
|
|
15b7ad3570 | ||
|
|
36ff9cbc6d | ||
|
|
ed74d80ecb | ||
|
|
a0b8215c06 | ||
|
|
f88b993b18 | ||
|
|
4a7f4d0ed4 | ||
|
|
6e08c3567f | ||
|
|
adbcb5c5e4 | ||
|
|
8054c6aedc | ||
|
|
58734a33c4 | ||
|
|
2832f28014 | ||
|
|
e5057691ee | ||
|
|
a0cfd8501c | ||
|
|
99b643bd77 | ||
|
|
305b39d8e5 | ||
|
|
bb047baeba | ||
|
|
9637aad37e | ||
|
|
fbc06ec623 | ||
|
|
57430afc55 | ||
|
|
7aae7e8ed4 | ||
|
|
a67d107a58 | ||
|
|
642003e207 | ||
|
|
ec7e2284df | ||
|
|
ff7facdfa2 | ||
|
|
6df4e62132 | ||
|
|
6289bfb639 | ||
|
|
95e4b2064f | ||
|
|
48eba60be4 | ||
|
|
c7efa8b4e0 | ||
|
|
657b5cb1aa | ||
|
|
06daebbecf | ||
|
|
fb37f22bf4 | ||
|
|
71f326b705 | ||
|
|
6508ab4a33 | ||
|
|
bf871bd427 | ||
|
|
e403c603d6 | ||
|
|
4b7b3c7c7d | ||
|
|
1cdd2cad67 | ||
|
|
86856f242a | ||
|
|
65c3db8cb8 | ||
|
|
7fa9dcbc2a | ||
|
|
7cfb957de3 | ||
|
|
d0d467e863 | ||
|
|
eaccf32aa3 | ||
|
|
a8fdb22ab3 | ||
|
|
ae79f94a48 | ||
|
|
40499a4084 | ||
|
|
b36fa93005 | ||
|
|
8839b4e90b | ||
|
|
7866135d6c | ||
|
|
fe48dc412f | ||
|
|
3a25c4221f | ||
|
|
7e1be3ef94 | ||
|
|
b2f8ca09ba | ||
|
|
c7692f5c56 | ||
|
|
3b24afa7f2 | ||
|
|
2b3f3e2043 | ||
|
|
68614b83c0 | ||
|
|
a1edc75c11 | ||
|
|
4b0e7a5cde | ||
|
|
01c6ac1b14 | ||
|
|
f0481d0a60 | ||
|
|
fd2a8b8531 | ||
|
|
239959a4c9 | ||
|
|
84f2b91105 | ||
|
|
9d7b249b20 | ||
|
|
5bd15dd48d | ||
|
|
d03348c6e4 | ||
|
|
5faeff6bec | ||
|
|
b94a126c02 | ||
|
|
eedd146643 | ||
|
|
d30c5ca9cd | ||
|
|
a3b21b261c | ||
|
|
d1d60c9ef1 | ||
|
|
925e055bb3 | ||
|
|
9f40d7a05c | ||
|
|
d34f6af830 | ||
|
|
163ccfd410 | ||
|
|
968c316c0c | ||
|
|
2fdce43f9e | ||
|
|
fa305a7bfa | ||
|
|
0933a96d60 | ||
|
|
8b9db837ca | ||
|
|
1106367962 | ||
|
|
721e19e1c8 | ||
|
|
f9bb26ad33 | ||
|
|
87363af615 | ||
|
|
332c433b6e | ||
|
|
e029cf7196 | ||
|
|
a1d34462b0 | ||
|
|
e4283841d6 | ||
|
|
477a63d1b4 | ||
|
|
4a30cc244f | ||
|
|
271613b86d | ||
|
|
1f939aa25e | ||
|
|
ac57f5cb28 | ||
|
|
86b0a3d4f1 | ||
|
|
b269ed48ee | ||
|
|
fe1b37afaf | ||
|
|
c39172f516 | ||
|
|
87dd8c118d | ||
|
|
d6004fd2d3 | ||
|
|
3d3e4ad150 | ||
|
|
81821fd378 | ||
|
|
9b047c2af6 | ||
|
|
f0d6bc0dc8 | ||
|
|
8e5af2b5f2 | ||
|
|
918db89dc8 | ||
|
|
7590301ae7 | ||
|
|
6e25a552d3 | ||
|
|
0db75fdbfd | ||
|
|
83c48bb5fa | ||
|
|
1c65339a24 | ||
|
|
75e6366c5e | ||
|
|
af6fec5592 | ||
|
|
893dba7076 | ||
|
|
d571b9bbbc | ||
|
|
b28cc34ff3 | ||
|
|
776d39f057 | ||
|
|
61b242d194 | ||
|
|
22b81f5dd3 | ||
|
|
99e1920d42 | ||
|
|
2218fd5c25 | ||
|
|
3c656842f0 | ||
|
|
bd7635e74e | ||
|
|
0faa999ceb | ||
|
|
1bedf32baf | ||
|
|
577f102e53 | ||
|
|
c5cf39abb7 | ||
|
|
6b315f39de | ||
|
|
529a936d0a | ||
|
|
6538d34b48 | ||
|
|
e40824bded | ||
|
|
ed318ea784 | ||
|
|
d2b69e05f6 | ||
|
|
b57ae592ed | ||
|
|
e22f887765 | ||
|
|
fc838ba44b | ||
|
|
b19aa4a88d | ||
|
|
eba24db74c | ||
|
|
153a197fad | ||
|
|
8f4c329c2a | ||
|
|
368eb46f5b | ||
|
|
d6fea77082 | ||
|
|
878035c13b | ||
|
|
2cc971a43f | ||
|
|
9d77c54612 | ||
|
|
ef651a3a21 | ||
|
|
aaf6f5f17e | ||
|
|
3303f7bfcf | ||
|
|
95dba81a9d | ||
|
|
4b308d313a | ||
|
|
d80db763bc | ||
|
|
41fd6ea37f | ||
|
|
4808a0053f | ||
|
|
de41601f27 | ||
|
|
ddd09461fb | ||
|
|
6d192927ae | ||
|
|
487efb77ce | ||
|
|
e655e1dbc2 | ||
|
|
e41f20320a | ||
|
|
192f45bbd0 | ||
|
|
e013d25e2d | ||
|
|
8a6ad47ca5 | ||
|
|
cba780a8f8 | ||
|
|
3fc67dc76c | ||
|
|
6f85aef5fe | ||
|
|
4d9b8400da | ||
|
|
eeb9d61488 | ||
|
|
234ce529fc | ||
|
|
4f36943b47 | ||
|
|
25737ba7c6 | ||
|
|
7127d18072 | ||
|
|
e5c834383c | ||
|
|
b9c9800210 | ||
|
|
c94dc08cf3 | ||
|
|
a0594c8948 | ||
|
|
ab5ea46006 | ||
|
|
6b471e468c | ||
|
|
50614b961e | ||
|
|
a2be320605 | ||
|
|
8a959e9586 | ||
|
|
1db189c7ee | ||
|
|
39c2fcd8c2 | ||
|
|
da857ea334 | ||
|
|
d50c97ae22 | ||
|
|
0f150aa3b3 | ||
|
|
cdb51a75b8 | ||
|
|
22b6ae6903 | ||
|
|
871175f97f | ||
|
|
e6497be200 | ||
|
|
3b9333be9f | ||
|
|
04b814cfd8 | ||
|
|
bb2e5cba0a | ||
|
|
42a4e9f10f | ||
|
|
882d2fdbe8 | ||
|
|
0d69d40859 | ||
|
|
2e38bbcbcd | ||
|
|
6f741b909a | ||
|
|
bbb00e0674 | ||
|
|
560b952dd6 | ||
|
|
62c773e912 | ||
|
|
fd38c926b2 | ||
|
|
7a8874b947 | ||
|
|
150c55c72a | ||
|
|
417ac3b88c | ||
|
|
9e0d1a678c | ||
|
|
1a766c09e7 | ||
|
|
7849c0fb1e | ||
|
|
35a7e43f22 | ||
|
|
47a6a73fc5 | ||
|
|
805091cfc1 | ||
|
|
8d05e339ae | ||
|
|
8472e3a26d | ||
|
|
174121cdbe | ||
|
|
385a2eabce | ||
|
|
a64467c5a6 | ||
|
|
58772d79c7 | ||
|
|
235ed2f0d0 | ||
|
|
03eaeac459 | ||
|
|
63fd18edcb | ||
|
|
208254ab81 | ||
|
|
aae57378f0 | ||
|
|
a4fba37222 | ||
|
|
3a09522d3e | ||
|
|
b5db710c8b | ||
|
|
534763727f | ||
|
|
b964905c80 | ||
|
|
37717ce3d5 | ||
|
|
8333b0cf66 | ||
|
|
d1588b94b0 | ||
|
|
2dcc7ec749 | ||
|
|
2d756959d3 | ||
|
|
e6518a1d1c | ||
|
|
84d00722b9 | ||
|
|
a95a76ec56 | ||
|
|
420b3c8b84 | ||
|
|
5ba0bf3a64 | ||
|
|
7031753a6d | ||
|
|
6415671d93 | ||
|
|
e5fd42c4da | ||
|
|
0f675cd375 | ||
|
|
a85268f74a | ||
|
|
0983bd8dc0 | ||
|
|
87c65c9997 | ||
|
|
1b46805373 | ||
|
|
d48e31b928 | ||
|
|
ea51e137eb | ||
|
|
d9f5193a18 | ||
|
|
710b02a443 | ||
|
|
e7c75f3510 | ||
|
|
cfce31419d | ||
|
|
5b5aac675b | ||
|
|
6b0618b244 | ||
|
|
ceea0a0a39 | ||
|
|
6b86c450b1 | ||
|
|
8e83f9b134 | ||
|
|
d3eb2c1975 | ||
|
|
1a696c4f25 | ||
|
|
34501fee24 | ||
|
|
5aa55d7347 | ||
|
|
65179d9cd0 | ||
|
|
42109fb45a | ||
|
|
ca46aec483 | ||
|
|
2e9956c9fc | ||
|
|
5648d9d96f | ||
|
|
2b2ddb68cf | ||
|
|
12e8608f98 | ||
|
|
eaad749cc9 | ||
|
|
4ffa577d05 | ||
|
|
7143777638 | ||
|
|
cc6eaa7f44 | ||
|
|
5551874352 | ||
|
|
84fa19f2ad | ||
|
|
c101619d08 | ||
|
|
cdd2282282 | ||
|
|
6e57bc47aa | ||
|
|
a1a4f26f19 | ||
|
|
fb4a7373a1 | ||
|
|
9c2185c68f | ||
|
|
a66b27edff | ||
|
|
80a0842df1 | ||
|
|
2dcb127d4e | ||
|
|
790998335c | ||
|
|
2dd2931ab2 | ||
|
|
e83a4d7234 | ||
|
|
88f0ab0233 | ||
|
|
3ad7913353 | ||
|
|
795569227a | ||
|
|
93f50b5211 | ||
|
|
c53228daf5 | ||
|
|
5b7a359c91 | ||
|
|
01b41afa0f | ||
|
|
bf8ba63860 | ||
|
|
ba26909dc5 | ||
|
|
7d645c8ff6 | ||
|
|
b879cbc2ec | ||
|
|
af8b5243a3 | ||
|
|
4bf612851f | ||
|
|
ada0d45654 | ||
|
|
c153ac9d3b | ||
|
|
78cc9fb019 | ||
|
|
301807466d | ||
|
|
e0c9013d9c | ||
|
|
9c6aa93093 | ||
|
|
4a41098b24 | ||
|
|
0510978516 | ||
|
|
6009d98163 | ||
|
|
532ad777a3 | ||
|
|
b4edfc24ac | ||
|
|
0e578534fa | ||
|
|
6619cc39f7 | ||
|
|
d4b25058cd | ||
|
|
c1ba769b20 | ||
|
|
fd10d83893 | ||
|
|
b1168ce77d | ||
|
|
1fde9c4f0c | ||
|
|
03685e51b5 | ||
|
|
08c18d71bf | ||
|
|
dfe6ce1ba8 | ||
|
|
eaa4f2483f | ||
|
|
68a44529b6 | ||
|
|
25afb8477e | ||
|
|
f3a9d4db07 | ||
|
|
cb49eec2b5 | ||
|
|
3333080616 | ||
|
|
e2b9352dad | ||
|
|
da945eed93 | ||
|
|
ebd200380a | ||
|
|
1b650d6927 | ||
|
|
b6946c7e35 | ||
|
|
0b1891d82a | ||
|
|
3bc86ca8cb | ||
|
|
dba03616f4 | ||
|
|
a59aa44249 | ||
|
|
3b024a057f | ||
|
|
e1c33935fb | ||
|
|
8ebeeaf148 | ||
|
|
28f24c8811 | ||
|
|
89a6162dcd | ||
|
|
7e627e1d1e | ||
|
|
0465a10df5 | ||
|
|
5051224781 | ||
|
|
7956fc3c31 | ||
|
|
9b034ad574 | ||
|
|
4bf9925cf7 | ||
|
|
d2c63a9b36 | ||
|
|
5d3a19e542 | ||
|
|
e4518f7b13 | ||
|
|
350efc12f5 | ||
|
|
604fac2295 | ||
|
|
24bfacb654 | ||
|
|
3bcd539b3d | ||
|
|
81e68cb9bf | ||
|
|
a575f17db5 | ||
|
|
2fba3db48f | ||
|
|
ff6fb32297 | ||
|
|
4c64fb3323 | ||
|
|
1cfbc02d98 | ||
|
|
e231e08869 | ||
|
|
e069150fbf | ||
|
|
61093b2532 | ||
|
|
23f4f7bb00 | ||
|
|
816e491d17 | ||
|
|
dca27b59c9 | ||
|
|
7de5f77262 | ||
|
|
86e7151508 | ||
|
|
75597cf29c | ||
|
|
d07177be9c | ||
|
|
b38e08174a | ||
|
|
b501b30db4 | ||
|
|
64dad61b29 | ||
|
|
2369dc9621 | ||
|
|
ef90adb67e | ||
|
|
a528a78e0e | ||
|
|
ffe970aee5 | ||
|
|
4579ab0d60 | ||
|
|
efeeeefd4c | ||
|
|
c1b20a8ba7 | ||
|
|
2a30a9b10f | ||
|
|
34e8087aee | ||
|
|
ead56bfa1b | ||
|
|
d63c940e2f | ||
|
|
e05eaeccab | ||
|
|
e076f1ee2a | ||
|
|
68e11d2b81 | ||
|
|
697193d3d6 | ||
|
|
4f5596eb0c | ||
|
|
42a7866da9 | ||
|
|
809df74050 | ||
|
|
2e217ed466 | ||
|
|
d5d24e421b | ||
|
|
663ef2cc64 | ||
|
|
4e665ca77f | ||
|
|
33c0fb79d6 | ||
|
|
04d0e3915c | ||
|
|
8e2003a36b | ||
|
|
a27680f7e9 | ||
|
|
4f52343cd9 | ||
|
|
4072b2786a | ||
|
|
d0b95c063b | ||
|
|
948d300f43 | ||
|
|
1b9326888e | ||
|
|
d67aef9d8e | ||
|
|
358024d029 | ||
|
|
9df447fe75 | ||
|
|
7e7991bb63 | ||
|
|
35e9d00beb | ||
|
|
461b5221f3 | ||
|
|
10d06f219d | ||
|
|
ecc4f46334 | ||
|
|
a227fea5ef | ||
|
|
3f4d0bc15d | ||
|
|
0812425671 | ||
|
|
94344c0214 | ||
|
|
16da9b784a | ||
|
|
1e952bab95 | ||
|
|
484db004db | ||
|
|
7465d7685f | ||
|
|
15fd5559a7 | ||
|
|
f0c125efb3 | ||
|
|
2d39b81e12 | ||
|
|
1044d34d98 | ||
|
|
63567fcc52 | ||
|
|
492ef6cf64 | ||
|
|
9041dc9dcd | ||
|
|
78973f845b | ||
|
|
cea8c16064 | ||
|
|
e7c97923a3 | ||
|
|
078c3ae6d8 | ||
|
|
1ab3dba476 | ||
|
|
15964dc395 | ||
|
|
b83b65da16 | ||
|
|
430f1986c7 | ||
|
|
c589f8776c | ||
|
|
82679ce9a3 | ||
|
|
6d2e28bfb0 | ||
|
|
7a4da5a8fa | ||
|
|
c475a7b6c0 | ||
|
|
32bb603554 | ||
|
|
8d71292d1a | ||
|
|
e896dc1aa7 | ||
|
|
f5a2246817 | ||
|
|
c467b6ea13 | ||
|
|
1636f6b196 | ||
|
|
5da528ffbb | ||
|
|
2e65ae49a5 | ||
|
|
d06bc815f8 | ||
|
|
0290784f9b | ||
|
|
1cc52afc42 | ||
|
|
88f7f987cd | ||
|
|
f512971991 | ||
|
|
53de245877 | ||
|
|
749622427c | ||
|
|
725d6fa896 | ||
|
|
a107bb684c | ||
|
|
ccbc8ce7de | ||
|
|
260e1d4f2d | ||
|
|
1afa49f3ff | ||
|
|
6f88ea1dc7 | ||
|
|
c59bbdecdb | ||
|
|
f9428c10b9 | ||
|
|
1ca054f43d | ||
|
|
374f76b527 | ||
|
|
f9dd5e0f1c | ||
|
|
bb7509498e | ||
|
|
8a06ffbe15 | ||
|
|
8ad948f268 | ||
|
|
73f808dee7 | ||
|
|
fecab52f86 | ||
|
|
609c67d85e | ||
|
|
0005d249c0 | ||
|
|
8828ea706e | ||
|
|
4070ef3f33 | ||
|
|
39f6e2fa32 | ||
|
|
1dfdff4a9e | ||
|
|
310e354164 | ||
|
|
dda2931e60 | ||
|
|
6d207d2490 | ||
|
|
01037fa561 | ||
|
|
61f3e5cbed | ||
|
|
44995e944a | ||
|
|
4a92fcfc62 | ||
|
|
d3f15f5784 | ||
|
|
2437a84b48 | ||
|
|
696f099940 | ||
|
|
3f0f538c40 | ||
|
|
66529d0f70 | ||
|
|
974f845059 | ||
|
|
f6b3413a11 | ||
|
|
b4ef687b60 | ||
|
|
2ef531b2dc | ||
|
|
125801ec5b | ||
|
|
691d9d7dc4 | ||
|
|
5ca898541f | ||
|
|
24821ff030 | ||
|
|
99815f8962 | ||
|
|
d752e6ce6d | ||
|
|
457dd890cb | ||
|
|
4fbf5e9e2f | ||
|
|
687b4ac71d | ||
|
|
a1b364f80c | ||
|
|
271938c5fc | ||
|
|
ff49cc5636 | ||
|
|
9946e644c8 | ||
|
|
1ed7a50755 | ||
|
|
9f3396d867 | ||
|
|
bcd018707a | ||
|
|
a462978433 | ||
|
|
6d11003975 | ||
|
|
017e474325 | ||
|
|
5d717af778 | ||
|
|
8d08ac559d | ||
|
|
4e24867a0b | ||
|
|
2b4b8839d1 | ||
|
|
dba33f9ef5 | ||
|
|
db2649d7ba | ||
|
|
edc3da85cc | ||
|
|
2357e24d1d | ||
|
|
e4d1056450 | ||
|
|
37d9c9eb1b | ||
|
|
d42a85714a | ||
|
|
88bf03c6bf | ||
|
|
4b8a56be39 | ||
|
|
2aa99234f4 | ||
|
|
bf9f1b1d56 | ||
|
|
704e4781d9 | ||
|
|
4a8613ce4c | ||
|
|
e87fabe6bb | ||
|
|
532aa83555 | ||
|
|
d87bb973d5 | ||
|
|
a72da3bd1a | ||
|
|
56df3f0c2a | ||
|
|
e0c59d12c1 | ||
|
|
7645cc2707 | ||
|
|
6719010050 | ||
|
|
ccd46a1c0f | ||
|
|
cc1e349ea8 | ||
|
|
e509d5f1de | ||
|
|
4fca27c664 | ||
|
|
51be22aebd | ||
|
|
54b21e5872 | ||
|
|
85beb9eb70 | ||
|
|
56739ac246 | ||
|
|
1ea3c564df | ||
|
|
621833ef0e | ||
|
|
16be38bb54 | ||
|
|
c5976e2584 | ||
|
|
3c51cb130f | ||
|
|
c649809eb2 | ||
|
|
43a53f41dd | ||
|
|
a3fef27002 | ||
|
|
cfc1255812 | ||
|
|
278db2cdde | ||
|
|
64157f7207 | ||
|
|
9e8ba6ca09 | ||
|
|
268ab128d7 | ||
|
|
fad5934c1e | ||
|
|
c9e3873a28 | ||
|
|
6a19aabd44 | ||
|
|
11e63e2e89 | ||
|
|
7c885dcadb | ||
|
|
b84a192bad | ||
|
|
35afb10add | ||
|
|
13fc845bcc | ||
|
|
f1bd1f1dfc | ||
|
|
67c9e1a0cb | ||
|
|
f6da9a5073 | ||
|
|
38a0950f46 | ||
|
|
55d295c2a6 | ||
|
|
be45919ee4 | ||
|
|
0a4a9f96c2 | ||
|
|
1ae1da3f9c | ||
|
|
cae2c06190 | ||
|
|
993dd61024 | ||
|
|
ea07aef73e | ||
|
|
268a4ad32d | ||
|
|
3712af4df8 | ||
|
|
8cf75fce8c | ||
|
|
46be2d9e5b | ||
|
|
998000bfbe | ||
|
|
43a50cc62c | ||
|
|
30f556f845 | ||
|
|
c5985c4c81 | ||
|
|
a9170236e1 | ||
|
|
85a5b58d18 | ||
|
|
6fb3c8daa8 | ||
|
|
a0103acbef | ||
|
|
f7e6a32444 | ||
|
|
7bbc256ff1 | ||
|
|
64f62d6755 | ||
|
|
b4cfe868fb | ||
|
|
8d8681580d | ||
|
|
8892cf2622 | ||
|
|
585d3f4e2a | ||
|
|
2c9a0444e6 | ||
|
|
279cebcef3 | ||
|
|
e6f8852b05 | ||
|
|
d06a3f060d | ||
|
|
957b2b7188 | ||
|
|
b94b3a1e91 | ||
|
|
7776a81e22 | ||
|
|
bf89093fac | ||
|
|
76d76d13b0 | ||
|
|
e603c23b40 | ||
|
|
8af4dd5988 | ||
|
|
0a47d05d26 | ||
|
|
b3eb9e0193 | ||
|
|
b26d2ab0e9 | ||
|
|
7eb0c7dd28 | ||
|
|
236c1df676 | ||
|
|
ff118f2177 | ||
|
|
29d91da1d2 | ||
|
|
ad08eafb9a | ||
|
|
431b9370df | ||
|
|
3e93eefe62 | ||
|
|
782667a34e | ||
|
|
90524611ea | ||
|
|
583086ae62 | ||
|
|
19c24cba10 | ||
|
|
5290c692c1 | ||
|
|
90a19057d5 | ||
|
|
a05c328081 | ||
|
|
6d9e353a4e | ||
|
|
82c062eab9 | ||
|
|
c0d59801d5 | ||
|
|
93ea8a0919 | ||
|
|
6d0d8e57a4 | ||
|
|
1fca505b61 | ||
|
|
a0e9c30b4a | ||
|
|
bc94dc0257 | ||
|
|
3aa8320fc7 | ||
|
|
a8e3c37bb9 | ||
|
|
29702400f1 |
@@ -1,3 +1,2 @@
|
|||||||
awx/ui/node_modules
|
|
||||||
Dockerfile
|
Dockerfile
|
||||||
.git
|
.git
|
||||||
|
|||||||
46
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
46
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -20,6 +20,19 @@ body:
|
|||||||
- label: I understand that AWX is open source software provided for free and that I might not receive a timely response.
|
- label: I understand that AWX is open source software provided for free and that I might not receive a timely response.
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
id: feature-type
|
||||||
|
attributes:
|
||||||
|
label: Feature type
|
||||||
|
description: >-
|
||||||
|
What kind of feature is this?
|
||||||
|
multiple: false
|
||||||
|
options:
|
||||||
|
- "New Feature"
|
||||||
|
- "Enhancement to Existing Feature"
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: summary
|
id: summary
|
||||||
attributes:
|
attributes:
|
||||||
@@ -40,3 +53,36 @@ body:
|
|||||||
- label: CLI
|
- label: CLI
|
||||||
- label: Other
|
- label: Other
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: steps-to-reproduce
|
||||||
|
attributes:
|
||||||
|
label: Steps to reproduce
|
||||||
|
description: >-
|
||||||
|
Describe the necessary steps to understand the scenario of the requested enhancement.
|
||||||
|
Include all the steps that will help the developer and QE team understand what you are requesting.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: current-results
|
||||||
|
attributes:
|
||||||
|
label: Current results
|
||||||
|
description: What is currently happening on the scenario?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: sugested-results
|
||||||
|
attributes:
|
||||||
|
label: Sugested feature result
|
||||||
|
description: What is the result this new feature will bring?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: additional-information
|
||||||
|
attributes:
|
||||||
|
label: Additional information
|
||||||
|
description: Please provide any other information you think is relevant that could help us understand your feature request.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
17
.github/triage_replies.md
vendored
17
.github/triage_replies.md
vendored
@@ -53,6 +53,16 @@ https://github.com/ansible/awx/#get-involved \
|
|||||||
Thank you once again for this and your interest in AWX!
|
Thank you once again for this and your interest in AWX!
|
||||||
|
|
||||||
|
|
||||||
|
### Red Hat Support Team
|
||||||
|
- Hi! \
|
||||||
|
\
|
||||||
|
It appears that you are using an RPM build for RHEL. Please reach out to the Red Hat support team and submit a ticket. \
|
||||||
|
\
|
||||||
|
Here is the link to do so: \
|
||||||
|
\
|
||||||
|
https://access.redhat.com/support \
|
||||||
|
\
|
||||||
|
Thank you for your submission and for supporting AWX!
|
||||||
|
|
||||||
|
|
||||||
## Common
|
## Common
|
||||||
@@ -96,6 +106,13 @@ The Ansible Community is looking at building an EE that corresponds to all of th
|
|||||||
### Oracle AWX
|
### Oracle AWX
|
||||||
We'd be happy to help if you can reproduce this with AWX since we do not have Oracle's Linux Automation Manager. If you need help with this specific version of Oracles Linux Automation Manager you will need to contact your Oracle for support.
|
We'd be happy to help if you can reproduce this with AWX since we do not have Oracle's Linux Automation Manager. If you need help with this specific version of Oracles Linux Automation Manager you will need to contact your Oracle for support.
|
||||||
|
|
||||||
|
### Community Resolved
|
||||||
|
Hi,
|
||||||
|
|
||||||
|
We are happy to see that it appears a fix has been provided for your issue, so we will go ahead and close this ticket. Please feel free to reopen if any other problems arise.
|
||||||
|
|
||||||
|
<name of community member who helped> thanks so much for taking the time to write a thoughtful and helpful response to this issue!
|
||||||
|
|
||||||
### AWX Release
|
### AWX Release
|
||||||
Subject: Announcing AWX Xa.Ya.za and AWX-Operator Xb.Yb.zb
|
Subject: Announcing AWX Xa.Ya.za and AWX-Operator Xb.Yb.zb
|
||||||
|
|
||||||
|
|||||||
82
.github/workflows/ci.yml
vendored
82
.github/workflows/ci.yml
vendored
@@ -1,7 +1,10 @@
|
|||||||
---
|
---
|
||||||
name: CI
|
name: CI
|
||||||
env:
|
env:
|
||||||
BRANCH: ${{ github.base_ref || 'devel' }}
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
DEV_DOCKER_TAG_BASE: ghcr.io/${{ github.repository_owner }}
|
||||||
|
COMPOSE_TAG: ${{ github.base_ref || 'devel' }}
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
jobs:
|
jobs:
|
||||||
@@ -17,85 +20,33 @@ jobs:
|
|||||||
tests:
|
tests:
|
||||||
- name: api-test
|
- name: api-test
|
||||||
command: /start_tests.sh
|
command: /start_tests.sh
|
||||||
label: Run API Tests
|
|
||||||
- name: api-lint
|
- name: api-lint
|
||||||
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
||||||
label: Run API Linters
|
|
||||||
- name: api-swagger
|
- name: api-swagger
|
||||||
command: /start_tests.sh swagger
|
command: /start_tests.sh swagger
|
||||||
label: Generate API Reference
|
|
||||||
- name: awx-collection
|
- name: awx-collection
|
||||||
command: /start_tests.sh test_collection_all
|
command: /start_tests.sh test_collection_all
|
||||||
label: Run Collection Tests
|
|
||||||
- name: api-schema
|
- name: api-schema
|
||||||
label: Check API Schema
|
|
||||||
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
|
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
|
||||||
- name: ui-lint
|
- name: ui-lint
|
||||||
label: Run UI Linters
|
|
||||||
command: make ui-lint
|
command: make ui-lint
|
||||||
- name: ui-test-screens
|
- name: ui-test-screens
|
||||||
label: Run UI Screens Tests
|
|
||||||
command: make ui-test-screens
|
command: make ui-test-screens
|
||||||
- name: ui-test-general
|
- name: ui-test-general
|
||||||
label: Run UI General Tests
|
|
||||||
command: make ui-test-general
|
command: make ui-test-general
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
- name: Run check ${{ matrix.tests.name }}
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
run: AWX_DOCKER_CMD='${{ matrix.tests.command }}' make github_ci_runner
|
||||||
|
|
||||||
- name: Install python ${{ env.py_version }}
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.py_version }}
|
|
||||||
|
|
||||||
- name: Log in to registry
|
|
||||||
run: |
|
|
||||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
|
||||||
|
|
||||||
- name: Pre-pull image to warm build cache
|
|
||||||
run: |
|
|
||||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} || :
|
|
||||||
|
|
||||||
- name: Build image
|
|
||||||
run: |
|
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
|
||||||
|
|
||||||
- name: ${{ matrix.texts.label }}
|
|
||||||
run: |
|
|
||||||
docker run -u $(id -u) --rm -v ${{ github.workspace}}:/awx_devel/:Z \
|
|
||||||
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} ${{ matrix.tests.command }}
|
|
||||||
dev-env:
|
dev-env:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Install python ${{ env.py_version }}
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.py_version }}
|
|
||||||
|
|
||||||
- name: Log in to registry
|
|
||||||
run: |
|
|
||||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
|
||||||
|
|
||||||
- name: Pre-pull image to warm build cache
|
|
||||||
run: |
|
|
||||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} || :
|
|
||||||
|
|
||||||
- name: Build image
|
|
||||||
run: |
|
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
|
||||||
|
|
||||||
- name: Run smoke test
|
- name: Run smoke test
|
||||||
run: |
|
run: make github_ci_setup && ansible-playbook tools/docker-compose/ansible/smoke-test.yml -v
|
||||||
export DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }}
|
|
||||||
export COMPOSE_TAG=${{ env.BRANCH }}
|
|
||||||
ansible-playbook tools/docker-compose/ansible/smoke-test.yml -e repo_dir=$(pwd) -v
|
|
||||||
|
|
||||||
awx-operator:
|
awx-operator:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -144,3 +95,22 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
AWX_TEST_IMAGE: awx
|
AWX_TEST_IMAGE: awx
|
||||||
AWX_TEST_VERSION: ci
|
AWX_TEST_VERSION: ci
|
||||||
|
|
||||||
|
collection-sanity:
|
||||||
|
name: awx_collection sanity
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
|
||||||
|
- name: Upgrade ansible-core
|
||||||
|
run: python3 -m pip install --upgrade ansible-core
|
||||||
|
|
||||||
|
- name: Run sanity tests
|
||||||
|
run: make test_collection_sanity
|
||||||
|
env:
|
||||||
|
# needed due to cgroupsv2. This is fixed, but a stable release
|
||||||
|
# with the fix has not been made yet.
|
||||||
|
ANSIBLE_TEST_PREFER_PODMAN: 1
|
||||||
|
|||||||
2
.github/workflows/devel_images.yml
vendored
2
.github/workflows/devel_images.yml
vendored
@@ -1,5 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: Build/Push Development Images
|
name: Build/Push Development Images
|
||||||
|
env:
|
||||||
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
|
|||||||
7
.github/workflows/e2e_test.yml
vendored
7
.github/workflows/e2e_test.yml
vendored
@@ -1,9 +1,12 @@
|
|||||||
---
|
---
|
||||||
name: E2E Tests
|
name: E2E Tests
|
||||||
|
env:
|
||||||
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request_target:
|
pull_request_target:
|
||||||
types: [labeled]
|
types: [labeled]
|
||||||
jobs:
|
jobs:
|
||||||
e2e-test:
|
e2e-test:
|
||||||
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
|
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -104,5 +107,3 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: AWX-logs-${{ matrix.job }}
|
name: AWX-logs-${{ matrix.job }}
|
||||||
path: make-docker-compose-output.log
|
path: make-docker-compose-output.log
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
26
.github/workflows/feature_branch_deletion.yml
vendored
Normal file
26
.github/workflows/feature_branch_deletion.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
---
|
||||||
|
name: Feature branch deletion cleanup
|
||||||
|
env:
|
||||||
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
on:
|
||||||
|
delete:
|
||||||
|
branches:
|
||||||
|
- feature_**
|
||||||
|
jobs:
|
||||||
|
push:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Delete API Schema
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
|
||||||
|
AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
|
||||||
|
AWS_REGION: 'us-east-1'
|
||||||
|
run: |
|
||||||
|
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||||
|
ansible localhost -c local -m aws_s3 \
|
||||||
|
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delete permission=public-read"
|
||||||
|
|
||||||
|
|
||||||
31
.github/workflows/label_issue.yml
vendored
31
.github/workflows/label_issue.yml
vendored
@@ -19,3 +19,34 @@ jobs:
|
|||||||
not-before: 2021-12-07T07:00:00Z
|
not-before: 2021-12-07T07:00:00Z
|
||||||
configuration-path: .github/issue_labeler.yml
|
configuration-path: .github/issue_labeler.yml
|
||||||
enable-versioned-regex: 0
|
enable-versioned-regex: 0
|
||||||
|
|
||||||
|
community:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Label Issue - Community
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
- name: Install python requests
|
||||||
|
run: pip install requests
|
||||||
|
- name: Check if user is a member of Ansible org
|
||||||
|
uses: jannekem/run-python-script-action@v1
|
||||||
|
id: check_user
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
import requests
|
||||||
|
headers = {'Accept': 'application/vnd.github+json', 'Authorization': 'token ${{ secrets.GITHUB_TOKEN }}'}
|
||||||
|
response = requests.get('${{ fromJson(toJson(github.event.issue.user.url)) }}/orgs?per_page=100', headers=headers)
|
||||||
|
is_member = False
|
||||||
|
for org in response.json():
|
||||||
|
if org['login'] == 'ansible':
|
||||||
|
is_member = True
|
||||||
|
if is_member:
|
||||||
|
print("User is member")
|
||||||
|
else:
|
||||||
|
print("User is community")
|
||||||
|
- name: Add community label if not a member
|
||||||
|
if: contains(steps.check_user.outputs.stdout, 'community')
|
||||||
|
uses: andymckay/labeler@e6c4322d0397f3240f0e7e30a33b5c5df2d39e90
|
||||||
|
with:
|
||||||
|
add-labels: "community"
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
31
.github/workflows/label_pr.yml
vendored
31
.github/workflows/label_pr.yml
vendored
@@ -18,3 +18,34 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
configuration-path: .github/pr_labeler.yml
|
configuration-path: .github/pr_labeler.yml
|
||||||
|
|
||||||
|
community:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Label PR - Community
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
- name: Install python requests
|
||||||
|
run: pip install requests
|
||||||
|
- name: Check if user is a member of Ansible org
|
||||||
|
uses: jannekem/run-python-script-action@v1
|
||||||
|
id: check_user
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
import requests
|
||||||
|
headers = {'Accept': 'application/vnd.github+json', 'Authorization': 'token ${{ secrets.GITHUB_TOKEN }}'}
|
||||||
|
response = requests.get('${{ fromJson(toJson(github.event.pull_request.user.url)) }}/orgs?per_page=100', headers=headers)
|
||||||
|
is_member = False
|
||||||
|
for org in response.json():
|
||||||
|
if org['login'] == 'ansible':
|
||||||
|
is_member = True
|
||||||
|
if is_member:
|
||||||
|
print("User is member")
|
||||||
|
else:
|
||||||
|
print("User is community")
|
||||||
|
- name: Add community label if not a member
|
||||||
|
if: contains(steps.check_user.outputs.stdout, 'community')
|
||||||
|
uses: andymckay/labeler@e6c4322d0397f3240f0e7e30a33b5c5df2d39e90
|
||||||
|
with:
|
||||||
|
add-labels: "community"
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
18
.github/workflows/pr_body_check.yml
vendored
18
.github/workflows/pr_body_check.yml
vendored
@@ -13,21 +13,13 @@ jobs:
|
|||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- name: Write PR body to a file
|
|
||||||
run: |
|
|
||||||
cat >> pr.body << __SOME_RANDOM_PR_EOF__
|
|
||||||
${{ github.event.pull_request.body }}
|
|
||||||
__SOME_RANDOM_PR_EOF__
|
|
||||||
|
|
||||||
- name: Display the received body for troubleshooting
|
|
||||||
run: cat pr.body
|
|
||||||
|
|
||||||
# We want to write these out individually just incase the options were joined on a single line
|
|
||||||
- name: Check for each of the lines
|
- name: Check for each of the lines
|
||||||
|
env:
|
||||||
|
PR_BODY: ${{ github.event.pull_request.body }}
|
||||||
run: |
|
run: |
|
||||||
grep "Bug, Docs Fix or other nominal change" pr.body > Z
|
echo "$PR_BODY" | grep "Bug, Docs Fix or other nominal change" > Z
|
||||||
grep "New or Enhanced Feature" pr.body > Y
|
echo "$PR_BODY" | grep "New or Enhanced Feature" > Y
|
||||||
grep "Breaking Change" pr.body > X
|
echo "$PR_BODY" | grep "Breaking Change" > X
|
||||||
exit 0
|
exit 0
|
||||||
# We exit 0 and set the shell to prevent the returns from the greps from failing this step
|
# We exit 0 and set the shell to prevent the returns from the greps from failing this step
|
||||||
# See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#exit-codes-and-error-action-preference
|
# See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#exit-codes-and-error-action-preference
|
||||||
|
|||||||
19
.github/workflows/promote.yml
vendored
19
.github/workflows/promote.yml
vendored
@@ -1,5 +1,9 @@
|
|||||||
---
|
---
|
||||||
name: Promote Release
|
name: Promote Release
|
||||||
|
|
||||||
|
env:
|
||||||
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
|
||||||
on:
|
on:
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
@@ -34,9 +38,13 @@ jobs:
|
|||||||
- name: Build collection and publish to galaxy
|
- name: Build collection and publish to galaxy
|
||||||
run: |
|
run: |
|
||||||
COLLECTION_TEMPLATE_VERSION=true COLLECTION_NAMESPACE=${{ env.collection_namespace }} make build_collection
|
COLLECTION_TEMPLATE_VERSION=true COLLECTION_NAMESPACE=${{ env.collection_namespace }} make build_collection
|
||||||
ansible-galaxy collection publish \
|
if [ "$(curl --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz | tail -1)" == "302" ] ; then \
|
||||||
--token=${{ secrets.GALAXY_TOKEN }} \
|
echo "Galaxy release already done"; \
|
||||||
awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz
|
else \
|
||||||
|
ansible-galaxy collection publish \
|
||||||
|
--token=${{ secrets.GALAXY_TOKEN }} \
|
||||||
|
awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz; \
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Set official pypi info
|
- name: Set official pypi info
|
||||||
run: echo pypi_repo=pypi >> $GITHUB_ENV
|
run: echo pypi_repo=pypi >> $GITHUB_ENV
|
||||||
@@ -48,6 +56,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Build awxkit and upload to pypi
|
- name: Build awxkit and upload to pypi
|
||||||
run: |
|
run: |
|
||||||
|
git reset --hard
|
||||||
cd awxkit && python3 setup.py bdist_wheel
|
cd awxkit && python3 setup.py bdist_wheel
|
||||||
twine upload \
|
twine upload \
|
||||||
-r ${{ env.pypi_repo }} \
|
-r ${{ env.pypi_repo }} \
|
||||||
@@ -70,4 +79,6 @@ jobs:
|
|||||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
|
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
|
||||||
docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||||
docker push quay.io/${{ github.repository }}:latest
|
docker push quay.io/${{ github.repository }}:latest
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||||
|
docker tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||||
|
docker push quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||||
|
|||||||
19
.github/workflows/stage.yml
vendored
19
.github/workflows/stage.yml
vendored
@@ -1,5 +1,9 @@
|
|||||||
---
|
---
|
||||||
name: Stage Release
|
name: Stage Release
|
||||||
|
|
||||||
|
env:
|
||||||
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
@@ -80,6 +84,20 @@ jobs:
|
|||||||
-e push=yes \
|
-e push=yes \
|
||||||
-e awx_official=yes
|
-e awx_official=yes
|
||||||
|
|
||||||
|
- name: Log in to GHCR
|
||||||
|
run: |
|
||||||
|
echo ${{ secrets.GITHUB_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Log in to Quay
|
||||||
|
run: |
|
||||||
|
echo ${{ secrets.QUAY_TOKEN }} | docker login quay.io -u ${{ secrets.QUAY_USER }} --password-stdin
|
||||||
|
|
||||||
|
- name: tag awx-ee:latest with version input
|
||||||
|
run: |
|
||||||
|
docker pull quay.io/ansible/awx-ee:latest
|
||||||
|
docker tag quay.io/ansible/awx-ee:latest ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||||
|
docker push ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||||
|
|
||||||
- name: Build and stage awx-operator
|
- name: Build and stage awx-operator
|
||||||
working-directory: awx-operator
|
working-directory: awx-operator
|
||||||
run: |
|
run: |
|
||||||
@@ -99,6 +117,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
AWX_TEST_IMAGE: ${{ github.repository }}
|
AWX_TEST_IMAGE: ${{ github.repository }}
|
||||||
AWX_TEST_VERSION: ${{ github.event.inputs.version }}
|
AWX_TEST_VERSION: ${{ github.event.inputs.version }}
|
||||||
|
AWX_EE_TEST_IMAGE: ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||||
|
|
||||||
- name: Create draft release for AWX
|
- name: Create draft release for AWX
|
||||||
working-directory: awx
|
working-directory: awx
|
||||||
|
|||||||
29
.github/workflows/update_dependabot_prs.yml
vendored
Normal file
29
.github/workflows/update_dependabot_prs.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
---
|
||||||
|
name: Dependency Pr Update
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [labeled, opened, reopened]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
pr-check:
|
||||||
|
name: Update Dependabot Prs
|
||||||
|
if: contains(github.event.pull_request.labels.*.name, 'dependencies') && contains(github.event.pull_request.labels.*.name, 'component:ui')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout branch
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Update PR Body
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||||
|
OWNER: ${{ github.repository_owner }}
|
||||||
|
REPO: ${{ github.event.repository.name }}
|
||||||
|
PR: ${{github.event.pull_request.number}}
|
||||||
|
PR_BODY: ${{github.event.pull_request.body}}
|
||||||
|
run: |
|
||||||
|
gh pr checkout ${{ env.PR }}
|
||||||
|
echo "${{ env.PR_BODY }}" > my_pr_body.txt
|
||||||
|
echo "" >> my_pr_body.txt
|
||||||
|
echo "Bug, Docs Fix or other nominal change" >> my_pr_body.txt
|
||||||
|
gh pr edit ${{env.PR}} --body-file my_pr_body.txt
|
||||||
5
.github/workflows/upload_schema.yml
vendored
5
.github/workflows/upload_schema.yml
vendored
@@ -1,10 +1,15 @@
|
|||||||
---
|
---
|
||||||
name: Upload API Schema
|
name: Upload API Schema
|
||||||
|
|
||||||
|
env:
|
||||||
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- devel
|
- devel
|
||||||
- release_**
|
- release_**
|
||||||
|
- feature_**
|
||||||
jobs:
|
jobs:
|
||||||
push:
|
push:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -153,9 +153,6 @@ use_dev_supervisor.txt
|
|||||||
/sanity/
|
/sanity/
|
||||||
/awx_collection_build/
|
/awx_collection_build/
|
||||||
|
|
||||||
# Setup for metrics gathering
|
|
||||||
tools/prometheus/prometheus.yml
|
|
||||||
|
|
||||||
.idea/*
|
.idea/*
|
||||||
*.unison.tmp
|
*.unison.tmp
|
||||||
*.#
|
*.#
|
||||||
|
|||||||
@@ -8,6 +8,8 @@ ignore: |
|
|||||||
awx/ui/test/e2e/tests/smoke-vars.yml
|
awx/ui/test/e2e/tests/smoke-vars.yml
|
||||||
awx/ui/node_modules
|
awx/ui/node_modules
|
||||||
tools/docker-compose/_sources
|
tools/docker-compose/_sources
|
||||||
|
# django template files
|
||||||
|
awx/api/templates/instance_install_bundle/**
|
||||||
|
|
||||||
extends: default
|
extends: default
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ recursive-include awx *.po
|
|||||||
recursive-include awx *.mo
|
recursive-include awx *.mo
|
||||||
recursive-include awx/static *
|
recursive-include awx/static *
|
||||||
recursive-include awx/templates *.html
|
recursive-include awx/templates *.html
|
||||||
recursive-include awx/api/templates *.md *.html
|
recursive-include awx/api/templates *.md *.html *.yml
|
||||||
recursive-include awx/ui/build *.html
|
recursive-include awx/ui/build *.html
|
||||||
recursive-include awx/ui/build *
|
recursive-include awx/ui/build *
|
||||||
recursive-include awx/playbooks *.yml
|
recursive-include awx/playbooks *.yml
|
||||||
@@ -12,7 +12,7 @@ recursive-include awx/plugins *.ps1
|
|||||||
recursive-include requirements *.txt
|
recursive-include requirements *.txt
|
||||||
recursive-include requirements *.yml
|
recursive-include requirements *.yml
|
||||||
recursive-include config *
|
recursive-include config *
|
||||||
recursive-include docs/licenses *
|
recursive-include licenses *
|
||||||
recursive-exclude awx devonly.py*
|
recursive-exclude awx devonly.py*
|
||||||
recursive-exclude awx/api/tests *
|
recursive-exclude awx/api/tests *
|
||||||
recursive-exclude awx/main/tests *
|
recursive-exclude awx/main/tests *
|
||||||
|
|||||||
175
Makefile
175
Makefile
@@ -6,7 +6,20 @@ CHROMIUM_BIN=/tmp/chrome-linux/chrome
|
|||||||
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||||
MANAGEMENT_COMMAND ?= awx-manage
|
MANAGEMENT_COMMAND ?= awx-manage
|
||||||
VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py)
|
VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py)
|
||||||
COLLECTION_VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
|
|
||||||
|
# ansible-test requires semver compatable version, so we allow overrides to hack it
|
||||||
|
COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
|
||||||
|
# args for the ansible-test sanity command
|
||||||
|
COLLECTION_SANITY_ARGS ?= --docker
|
||||||
|
# collection unit testing directories
|
||||||
|
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
||||||
|
# collection integration test directories (defaults to all)
|
||||||
|
COLLECTION_TEST_TARGET ?=
|
||||||
|
# args for collection install
|
||||||
|
COLLECTION_PACKAGE ?= awx
|
||||||
|
COLLECTION_NAMESPACE ?= awx
|
||||||
|
COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
|
||||||
|
COLLECTION_TEMPLATE_VERSION ?= false
|
||||||
|
|
||||||
# NOTE: This defaults the container image version to the branch that's active
|
# NOTE: This defaults the container image version to the branch that's active
|
||||||
COMPOSE_TAG ?= $(GIT_BRANCH)
|
COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||||
@@ -34,7 +47,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
|||||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio
|
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio
|
||||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||||
# to install the actual requirements
|
# to install the actual requirements
|
||||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==58.2.0 setuptools_scm[toml]==6.4.2 wheel==0.36.2
|
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==7.0.5 wheel==0.38.4
|
||||||
|
|
||||||
NAME ?= awx
|
NAME ?= awx
|
||||||
|
|
||||||
@@ -52,7 +65,7 @@ I18N_FLAG_FILE = .i18n_built
|
|||||||
sdist \
|
sdist \
|
||||||
ui-release ui-devel \
|
ui-release ui-devel \
|
||||||
VERSION PYTHON_VERSION docker-compose-sources \
|
VERSION PYTHON_VERSION docker-compose-sources \
|
||||||
.git/hooks/pre-commit
|
.git/hooks/pre-commit github_ci_setup github_ci_runner
|
||||||
|
|
||||||
clean-tmp:
|
clean-tmp:
|
||||||
rm -rf tmp/
|
rm -rf tmp/
|
||||||
@@ -72,7 +85,7 @@ clean-languages:
|
|||||||
rm -f $(I18N_FLAG_FILE)
|
rm -f $(I18N_FLAG_FILE)
|
||||||
find ./awx/locale/ -type f -regex ".*\.mo$" -delete
|
find ./awx/locale/ -type f -regex ".*\.mo$" -delete
|
||||||
|
|
||||||
# Remove temporary build files, compiled Python files.
|
## Remove temporary build files, compiled Python files.
|
||||||
clean: clean-ui clean-api clean-awxkit clean-dist
|
clean: clean-ui clean-api clean-awxkit clean-dist
|
||||||
rm -rf awx/public
|
rm -rf awx/public
|
||||||
rm -rf awx/lib/site-packages
|
rm -rf awx/lib/site-packages
|
||||||
@@ -85,6 +98,7 @@ clean: clean-ui clean-api clean-awxkit clean-dist
|
|||||||
|
|
||||||
clean-api:
|
clean-api:
|
||||||
rm -rf build $(NAME)-$(VERSION) *.egg-info
|
rm -rf build $(NAME)-$(VERSION) *.egg-info
|
||||||
|
rm -rf .tox
|
||||||
find . -type f -regex ".*\.py[co]$$" -delete
|
find . -type f -regex ".*\.py[co]$$" -delete
|
||||||
find . -type d -name "__pycache__" -delete
|
find . -type d -name "__pycache__" -delete
|
||||||
rm -f awx/awx_test.sqlite3*
|
rm -f awx/awx_test.sqlite3*
|
||||||
@@ -94,7 +108,7 @@ clean-api:
|
|||||||
clean-awxkit:
|
clean-awxkit:
|
||||||
rm -rf awxkit/*.egg-info awxkit/.tox awxkit/build/*
|
rm -rf awxkit/*.egg-info awxkit/.tox awxkit/build/*
|
||||||
|
|
||||||
# convenience target to assert environment variables are defined
|
## convenience target to assert environment variables are defined
|
||||||
guard-%:
|
guard-%:
|
||||||
@if [ "$${$*}" = "" ]; then \
|
@if [ "$${$*}" = "" ]; then \
|
||||||
echo "The required environment variable '$*' is not set"; \
|
echo "The required environment variable '$*' is not set"; \
|
||||||
@@ -117,7 +131,7 @@ virtualenv_awx:
|
|||||||
fi; \
|
fi; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Install third-party requirements needed for AWX's environment.
|
## Install third-party requirements needed for AWX's environment.
|
||||||
# this does not use system site packages intentionally
|
# this does not use system site packages intentionally
|
||||||
requirements_awx: virtualenv_awx
|
requirements_awx: virtualenv_awx
|
||||||
if [[ "$(PIP_OPTIONS)" == *"--no-index"* ]]; then \
|
if [[ "$(PIP_OPTIONS)" == *"--no-index"* ]]; then \
|
||||||
@@ -136,7 +150,7 @@ requirements_dev: requirements_awx requirements_awx_dev
|
|||||||
|
|
||||||
requirements_test: requirements
|
requirements_test: requirements
|
||||||
|
|
||||||
# "Install" awx package in development mode.
|
## "Install" awx package in development mode.
|
||||||
develop:
|
develop:
|
||||||
@if [ "$(VIRTUAL_ENV)" ]; then \
|
@if [ "$(VIRTUAL_ENV)" ]; then \
|
||||||
pip uninstall -y awx; \
|
pip uninstall -y awx; \
|
||||||
@@ -153,21 +167,21 @@ version_file:
|
|||||||
fi; \
|
fi; \
|
||||||
$(PYTHON) -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \
|
$(PYTHON) -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \
|
||||||
|
|
||||||
# Refresh development environment after pulling new code.
|
## Refresh development environment after pulling new code.
|
||||||
refresh: clean requirements_dev version_file develop migrate
|
refresh: clean requirements_dev version_file develop migrate
|
||||||
|
|
||||||
# Create Django superuser.
|
## Create Django superuser.
|
||||||
adduser:
|
adduser:
|
||||||
$(MANAGEMENT_COMMAND) createsuperuser
|
$(MANAGEMENT_COMMAND) createsuperuser
|
||||||
|
|
||||||
# Create database tables and apply any new migrations.
|
## Create database tables and apply any new migrations.
|
||||||
migrate:
|
migrate:
|
||||||
if [ "$(VENV_BASE)" ]; then \
|
if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
$(MANAGEMENT_COMMAND) migrate --noinput
|
$(MANAGEMENT_COMMAND) migrate --noinput
|
||||||
|
|
||||||
# Run after making changes to the models to create a new migration.
|
## Run after making changes to the models to create a new migration.
|
||||||
dbchange:
|
dbchange:
|
||||||
$(MANAGEMENT_COMMAND) makemigrations
|
$(MANAGEMENT_COMMAND) makemigrations
|
||||||
|
|
||||||
@@ -181,7 +195,7 @@ collectstatic:
|
|||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
mkdir -p awx/public/static && $(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||||
|
|
||||||
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
||||||
|
|
||||||
@@ -218,7 +232,7 @@ wsbroadcast:
|
|||||||
fi; \
|
fi; \
|
||||||
$(PYTHON) manage.py run_wsbroadcast
|
$(PYTHON) manage.py run_wsbroadcast
|
||||||
|
|
||||||
# Run to start the background task dispatcher for development.
|
## Run to start the background task dispatcher for development.
|
||||||
dispatcher:
|
dispatcher:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
@@ -226,7 +240,7 @@ dispatcher:
|
|||||||
$(PYTHON) manage.py run_dispatcher
|
$(PYTHON) manage.py run_dispatcher
|
||||||
|
|
||||||
|
|
||||||
# Run to start the zeromq callback receiver
|
## Run to start the zeromq callback receiver
|
||||||
receiver:
|
receiver:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
@@ -278,7 +292,7 @@ awx-link:
|
|||||||
|
|
||||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||||
PYTEST_ARGS ?= -n auto
|
PYTEST_ARGS ?= -n auto
|
||||||
# Run all API unit tests.
|
## Run all API unit tests.
|
||||||
test:
|
test:
|
||||||
if [ "$(VENV_BASE)" ]; then \
|
if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
@@ -287,19 +301,28 @@ test:
|
|||||||
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
|
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
|
||||||
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
|
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
|
||||||
|
|
||||||
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
## Login to Github container image registry, pull image, then build image.
|
||||||
COLLECTION_TEST_TARGET ?=
|
github_ci_setup:
|
||||||
COLLECTION_PACKAGE ?= awx
|
# GITHUB_ACTOR is automatic github actions env var
|
||||||
COLLECTION_NAMESPACE ?= awx
|
# CI_GITHUB_TOKEN is defined in .github files
|
||||||
COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
|
echo $(CI_GITHUB_TOKEN) | docker login ghcr.io -u $(GITHUB_ACTOR) --password-stdin
|
||||||
COLLECTION_TEMPLATE_VERSION ?= false
|
docker pull $(DEVEL_IMAGE_NAME) || : # Pre-pull image to warm build cache
|
||||||
|
make docker-compose-build
|
||||||
|
|
||||||
|
## Runs AWX_DOCKER_CMD inside a new docker container.
|
||||||
|
docker-runner:
|
||||||
|
docker run -u $(shell id -u) --rm -v $(shell pwd):/awx_devel/:Z --workdir=/awx_devel $(DEVEL_IMAGE_NAME) $(AWX_DOCKER_CMD)
|
||||||
|
|
||||||
|
## Builds image and runs AWX_DOCKER_CMD in it, mainly for .github checks.
|
||||||
|
github_ci_runner: github_ci_setup docker-runner
|
||||||
|
|
||||||
test_collection:
|
test_collection:
|
||||||
rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt
|
rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt
|
||||||
if [ "$(VENV_BASE)" ]; then \
|
if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi && \
|
fi && \
|
||||||
pip install ansible-core && \
|
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi
|
||||||
|
ansible --version
|
||||||
py.test $(COLLECTION_TEST_DIRS) -v
|
py.test $(COLLECTION_TEST_DIRS) -v
|
||||||
# The python path needs to be modified so that the tests can find Ansible within the container
|
# The python path needs to be modified so that the tests can find Ansible within the container
|
||||||
# First we will use anything expility set as PYTHONPATH
|
# First we will use anything expility set as PYTHONPATH
|
||||||
@@ -329,8 +352,13 @@ install_collection: build_collection
|
|||||||
rm -rf $(COLLECTION_INSTALL)
|
rm -rf $(COLLECTION_INSTALL)
|
||||||
ansible-galaxy collection install awx_collection_build/$(COLLECTION_NAMESPACE)-$(COLLECTION_PACKAGE)-$(COLLECTION_VERSION).tar.gz
|
ansible-galaxy collection install awx_collection_build/$(COLLECTION_NAMESPACE)-$(COLLECTION_PACKAGE)-$(COLLECTION_VERSION).tar.gz
|
||||||
|
|
||||||
test_collection_sanity: install_collection
|
test_collection_sanity:
|
||||||
cd $(COLLECTION_INSTALL) && ansible-test sanity
|
rm -rf awx_collection_build/
|
||||||
|
rm -rf $(COLLECTION_INSTALL)
|
||||||
|
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi
|
||||||
|
ansible --version
|
||||||
|
COLLECTION_VERSION=1.0.0 make install_collection
|
||||||
|
cd $(COLLECTION_INSTALL) && ansible-test sanity $(COLLECTION_SANITY_ARGS)
|
||||||
|
|
||||||
test_collection_integration: install_collection
|
test_collection_integration: install_collection
|
||||||
cd $(COLLECTION_INSTALL) && ansible-test integration $(COLLECTION_TEST_TARGET)
|
cd $(COLLECTION_INSTALL) && ansible-test integration $(COLLECTION_TEST_TARGET)
|
||||||
@@ -341,23 +369,24 @@ test_unit:
|
|||||||
fi; \
|
fi; \
|
||||||
py.test awx/main/tests/unit awx/conf/tests/unit awx/sso/tests/unit
|
py.test awx/main/tests/unit awx/conf/tests/unit awx/sso/tests/unit
|
||||||
|
|
||||||
# Run all API unit tests with coverage enabled.
|
## Run all API unit tests with coverage enabled.
|
||||||
test_coverage:
|
test_coverage:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
py.test --create-db --cov=awx --cov-report=xml --junitxml=./reports/junit.xml $(TEST_DIRS)
|
py.test --create-db --cov=awx --cov-report=xml --junitxml=./reports/junit.xml $(TEST_DIRS)
|
||||||
|
|
||||||
# Output test coverage as HTML (into htmlcov directory).
|
## Output test coverage as HTML (into htmlcov directory).
|
||||||
coverage_html:
|
coverage_html:
|
||||||
coverage html
|
coverage html
|
||||||
|
|
||||||
# Run API unit tests across multiple Python/Django versions with Tox.
|
## Run API unit tests across multiple Python/Django versions with Tox.
|
||||||
test_tox:
|
test_tox:
|
||||||
tox -v
|
tox -v
|
||||||
|
|
||||||
# Make fake data
|
|
||||||
DATA_GEN_PRESET = ""
|
DATA_GEN_PRESET = ""
|
||||||
|
## Make fake data
|
||||||
bulk_data:
|
bulk_data:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
@@ -376,28 +405,29 @@ clean-ui:
|
|||||||
rm -rf awx/ui/build
|
rm -rf awx/ui/build
|
||||||
rm -rf awx/ui/src/locales/_build
|
rm -rf awx/ui/src/locales/_build
|
||||||
rm -rf $(UI_BUILD_FLAG_FILE)
|
rm -rf $(UI_BUILD_FLAG_FILE)
|
||||||
|
# the collectstatic command doesn't like it if this dir doesn't exist.
|
||||||
|
mkdir -p awx/ui/build/static
|
||||||
|
|
||||||
awx/ui/node_modules:
|
awx/ui/node_modules:
|
||||||
NODE_OPTIONS=--max-old-space-size=6144 $(NPM_BIN) --prefix awx/ui --loglevel warn ci
|
NODE_OPTIONS=--max-old-space-size=6144 $(NPM_BIN) --prefix awx/ui --loglevel warn --force ci
|
||||||
|
|
||||||
$(UI_BUILD_FLAG_FILE): awx/ui/node_modules
|
$(UI_BUILD_FLAG_FILE):
|
||||||
|
$(MAKE) awx/ui/node_modules
|
||||||
$(PYTHON) tools/scripts/compilemessages.py
|
$(PYTHON) tools/scripts/compilemessages.py
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
|
||||||
mkdir -p awx/public/static/css
|
|
||||||
mkdir -p awx/public/static/js
|
|
||||||
mkdir -p awx/public/static/media
|
|
||||||
cp -r awx/ui/build/static/css/* awx/public/static/css
|
|
||||||
cp -r awx/ui/build/static/js/* awx/public/static/js
|
|
||||||
cp -r awx/ui/build/static/media/* awx/public/static/media
|
|
||||||
touch $@
|
touch $@
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
ui-release: $(UI_BUILD_FLAG_FILE)
|
ui-release: $(UI_BUILD_FLAG_FILE)
|
||||||
|
|
||||||
ui-devel: awx/ui/node_modules
|
ui-devel: awx/ui/node_modules
|
||||||
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
|
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
|
||||||
|
mkdir -p /var/lib/awx/public/static/css
|
||||||
|
mkdir -p /var/lib/awx/public/static/js
|
||||||
|
mkdir -p /var/lib/awx/public/static/media
|
||||||
|
cp -r awx/ui/build/static/css/* /var/lib/awx/public/static/css
|
||||||
|
cp -r awx/ui/build/static/js/* /var/lib/awx/public/static/js
|
||||||
|
cp -r awx/ui/build/static/media/* /var/lib/awx/public/static/media
|
||||||
|
|
||||||
ui-devel-instrumented: awx/ui/node_modules
|
ui-devel-instrumented: awx/ui/node_modules
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run start-instrumented
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run start-instrumented
|
||||||
@@ -449,12 +479,18 @@ awx/projects:
|
|||||||
COMPOSE_UP_OPTS ?=
|
COMPOSE_UP_OPTS ?=
|
||||||
COMPOSE_OPTS ?=
|
COMPOSE_OPTS ?=
|
||||||
CONTROL_PLANE_NODE_COUNT ?= 1
|
CONTROL_PLANE_NODE_COUNT ?= 1
|
||||||
EXECUTION_NODE_COUNT ?= 2
|
EXECUTION_NODE_COUNT ?= 0
|
||||||
MINIKUBE_CONTAINER_GROUP ?= false
|
MINIKUBE_CONTAINER_GROUP ?= false
|
||||||
|
MINIKUBE_SETUP ?= false # if false, run minikube separately
|
||||||
|
EXTRA_SOURCES_ANSIBLE_OPTS ?=
|
||||||
|
|
||||||
|
ifneq ($(ADMIN_PASSWORD),)
|
||||||
|
EXTRA_SOURCES_ANSIBLE_OPTS := -e admin_password=$(ADMIN_PASSWORD) $(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||||
|
endif
|
||||||
|
|
||||||
docker-compose-sources: .git/hooks/pre-commit
|
docker-compose-sources: .git/hooks/pre-commit
|
||||||
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose-minikube/deploy.yml; \
|
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||||
fi;
|
fi;
|
||||||
|
|
||||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||||
@@ -468,7 +504,8 @@ docker-compose-sources: .git/hooks/pre-commit
|
|||||||
-e enable_ldap=$(LDAP) \
|
-e enable_ldap=$(LDAP) \
|
||||||
-e enable_splunk=$(SPLUNK) \
|
-e enable_splunk=$(SPLUNK) \
|
||||||
-e enable_prometheus=$(PROMETHEUS) \
|
-e enable_prometheus=$(PROMETHEUS) \
|
||||||
-e enable_grafana=$(GRAFANA)
|
-e enable_grafana=$(GRAFANA) $(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
docker-compose: awx/projects docker-compose-sources
|
docker-compose: awx/projects docker-compose-sources
|
||||||
@@ -502,7 +539,7 @@ docker-compose-container-group-clean:
|
|||||||
fi
|
fi
|
||||||
rm -rf tools/docker-compose-minikube/_sources/
|
rm -rf tools/docker-compose-minikube/_sources/
|
||||||
|
|
||||||
# Base development image build
|
## Base development image build
|
||||||
docker-compose-build:
|
docker-compose-build:
|
||||||
ansible-playbook tools/ansible/dockerfile.yml -e build_dev=True -e receptor_image=$(RECEPTOR_IMAGE)
|
ansible-playbook tools/ansible/dockerfile.yml -e build_dev=True -e receptor_image=$(RECEPTOR_IMAGE)
|
||||||
DOCKER_BUILDKIT=1 docker build -t $(DEVEL_IMAGE_NAME) \
|
DOCKER_BUILDKIT=1 docker build -t $(DEVEL_IMAGE_NAME) \
|
||||||
@@ -520,7 +557,7 @@ docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
|||||||
|
|
||||||
docker-refresh: docker-clean docker-compose
|
docker-refresh: docker-clean docker-compose
|
||||||
|
|
||||||
# Docker Development Environment with Elastic Stack Connected
|
## Docker Development Environment with Elastic Stack Connected
|
||||||
docker-compose-elk: awx/projects docker-compose-sources
|
docker-compose-elk: awx/projects docker-compose-sources
|
||||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||||
|
|
||||||
@@ -557,31 +594,73 @@ Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
|||||||
-e template_dest=_build_kube_dev \
|
-e template_dest=_build_kube_dev \
|
||||||
-e receptor_image=$(RECEPTOR_IMAGE)
|
-e receptor_image=$(RECEPTOR_IMAGE)
|
||||||
|
|
||||||
|
## Build awx_kube_devel image for development on local Kubernetes environment.
|
||||||
awx-kube-dev-build: Dockerfile.kube-dev
|
awx-kube-dev-build: Dockerfile.kube-dev
|
||||||
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||||
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
|
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
|
||||||
|
|
||||||
|
## Build awx image for deployment on Kubernetes environment.
|
||||||
|
awx-kube-build: Dockerfile
|
||||||
|
DOCKER_BUILDKIT=1 docker build -f Dockerfile \
|
||||||
|
--build-arg VERSION=$(VERSION) \
|
||||||
|
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||||
|
--build-arg HEADLESS=$(HEADLESS) \
|
||||||
|
-t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
|
||||||
|
|
||||||
# Translation TASKS
|
# Translation TASKS
|
||||||
# --------------------------------------
|
# --------------------------------------
|
||||||
|
|
||||||
# generate UI .pot file, an empty template of strings yet to be translated
|
## generate UI .pot file, an empty template of strings yet to be translated
|
||||||
pot: $(UI_BUILD_FLAG_FILE)
|
pot: $(UI_BUILD_FLAG_FILE)
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-template --clean
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-template --clean
|
||||||
|
|
||||||
# generate UI .po files for each locale (will update translated strings for `en`)
|
## generate UI .po files for each locale (will update translated strings for `en`)
|
||||||
po: $(UI_BUILD_FLAG_FILE)
|
po: $(UI_BUILD_FLAG_FILE)
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean
|
||||||
|
|
||||||
# generate API django .pot .po
|
## generate API django .pot .po
|
||||||
LANG = "en-us"
|
|
||||||
messages:
|
messages:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
$(PYTHON) manage.py makemessages -l $(LANG) --keep-pot
|
$(PYTHON) manage.py makemessages -l en_us --keep-pot
|
||||||
|
|
||||||
print-%:
|
print-%:
|
||||||
@echo $($*)
|
@echo $($*)
|
||||||
|
|
||||||
|
# HELP related targets
|
||||||
|
# --------------------------------------
|
||||||
|
|
||||||
|
HELP_FILTER=.PHONY
|
||||||
|
|
||||||
|
## Display help targets
|
||||||
|
help:
|
||||||
|
@printf "Available targets:\n"
|
||||||
|
@make -s help/generate | grep -vE "\w($(HELP_FILTER))"
|
||||||
|
|
||||||
|
## Display help for all targets
|
||||||
|
help/all:
|
||||||
|
@printf "Available targets:\n"
|
||||||
|
@make -s help/generate
|
||||||
|
|
||||||
|
## Generate help output from MAKEFILE_LIST
|
||||||
|
help/generate:
|
||||||
|
@awk '/^[-a-zA-Z_0-9%:\\\.\/]+:/ { \
|
||||||
|
helpMessage = match(lastLine, /^## (.*)/); \
|
||||||
|
if (helpMessage) { \
|
||||||
|
helpCommand = $$1; \
|
||||||
|
helpMessage = substr(lastLine, RSTART + 3, RLENGTH); \
|
||||||
|
gsub("\\\\", "", helpCommand); \
|
||||||
|
gsub(":+$$", "", helpCommand); \
|
||||||
|
printf " \x1b[32;01m%-35s\x1b[0m %s\n", helpCommand, helpMessage; \
|
||||||
|
} else { \
|
||||||
|
helpCommand = $$1; \
|
||||||
|
gsub("\\\\", "", helpCommand); \
|
||||||
|
gsub(":+$$", "", helpCommand); \
|
||||||
|
printf " \x1b[32;01m%-35s\x1b[0m %s\n", helpCommand, "No help available"; \
|
||||||
|
} \
|
||||||
|
} \
|
||||||
|
{ lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u
|
||||||
|
@printf "\n"
|
||||||
|
|||||||
@@ -67,7 +67,6 @@ else:
|
|||||||
from django.db import connection
|
from django.db import connection
|
||||||
|
|
||||||
if HAS_DJANGO is True:
|
if HAS_DJANGO is True:
|
||||||
|
|
||||||
# See upgrade blocker note in requirements/README.md
|
# See upgrade blocker note in requirements/README.md
|
||||||
try:
|
try:
|
||||||
names_digest('foo', 'bar', 'baz', length=8)
|
names_digest('foo', 'bar', 'baz', length=8)
|
||||||
@@ -190,7 +189,7 @@ def manage():
|
|||||||
sys.stdout.write('%s\n' % __version__)
|
sys.stdout.write('%s\n' % __version__)
|
||||||
# If running as a user without permission to read settings, display an
|
# If running as a user without permission to read settings, display an
|
||||||
# error message. Allow --help to still work.
|
# error message. Allow --help to still work.
|
||||||
elif settings.SECRET_KEY == 'permission-denied':
|
elif not os.getenv('SKIP_SECRET_KEY_CHECK', False) and settings.SECRET_KEY == 'permission-denied':
|
||||||
if len(sys.argv) == 1 or len(sys.argv) >= 2 and sys.argv[1] in ('-h', '--help', 'help'):
|
if len(sys.argv) == 1 or len(sys.argv) >= 2 and sys.argv[1] in ('-h', '--help', 'help'):
|
||||||
execute_from_command_line(sys.argv)
|
execute_from_command_line(sys.argv)
|
||||||
sys.stdout.write('\n')
|
sys.stdout.write('\n')
|
||||||
|
|||||||
@@ -96,6 +96,15 @@ register(
|
|||||||
category=_('Authentication'),
|
category=_('Authentication'),
|
||||||
category_slug='authentication',
|
category_slug='authentication',
|
||||||
)
|
)
|
||||||
|
register(
|
||||||
|
'ALLOW_METRICS_FOR_ANONYMOUS_USERS',
|
||||||
|
field_class=fields.BooleanField,
|
||||||
|
default=False,
|
||||||
|
label=_('Allow anonymous users to poll metrics'),
|
||||||
|
help_text=_('If true, anonymous users are allowed to poll metrics.'),
|
||||||
|
category=_('Authentication'),
|
||||||
|
category_slug='authentication',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def authentication_validate(serializer, attrs):
|
def authentication_validate(serializer, attrs):
|
||||||
|
|||||||
@@ -80,7 +80,6 @@ class VerbatimField(serializers.Field):
|
|||||||
|
|
||||||
|
|
||||||
class OAuth2ProviderField(fields.DictField):
|
class OAuth2ProviderField(fields.DictField):
|
||||||
|
|
||||||
default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')}
|
default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')}
|
||||||
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
|
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
|
||||||
child = fields.IntegerField(min_value=1)
|
child = fields.IntegerField(min_value=1)
|
||||||
|
|||||||
@@ -157,10 +157,9 @@ class FieldLookupBackend(BaseFilterBackend):
|
|||||||
|
|
||||||
# A list of fields that we know can be filtered on without the possiblity
|
# A list of fields that we know can be filtered on without the possiblity
|
||||||
# of introducing duplicates
|
# of introducing duplicates
|
||||||
NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField)
|
NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField, TextField)
|
||||||
|
|
||||||
def get_fields_from_lookup(self, model, lookup):
|
def get_fields_from_lookup(self, model, lookup):
|
||||||
|
|
||||||
if '__' in lookup and lookup.rsplit('__', 1)[-1] in self.SUPPORTED_LOOKUPS:
|
if '__' in lookup and lookup.rsplit('__', 1)[-1] in self.SUPPORTED_LOOKUPS:
|
||||||
path, suffix = lookup.rsplit('__', 1)
|
path, suffix = lookup.rsplit('__', 1)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ import inspect
|
|||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
import urllib.parse
|
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@@ -14,7 +13,7 @@ from django.contrib.auth import views as auth_views
|
|||||||
from django.contrib.contenttypes.models import ContentType
|
from django.contrib.contenttypes.models import ContentType
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.core.exceptions import FieldDoesNotExist
|
from django.core.exceptions import FieldDoesNotExist
|
||||||
from django.db import connection
|
from django.db import connection, transaction
|
||||||
from django.db.models.fields.related import OneToOneRel
|
from django.db.models.fields.related import OneToOneRel
|
||||||
from django.http import QueryDict
|
from django.http import QueryDict
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
@@ -30,7 +29,7 @@ from rest_framework.response import Response
|
|||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework import views
|
from rest_framework import views
|
||||||
from rest_framework.permissions import AllowAny
|
from rest_framework.permissions import AllowAny
|
||||||
from rest_framework.renderers import StaticHTMLRenderer, JSONRenderer
|
from rest_framework.renderers import StaticHTMLRenderer
|
||||||
from rest_framework.negotiation import DefaultContentNegotiation
|
from rest_framework.negotiation import DefaultContentNegotiation
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
@@ -41,7 +40,7 @@ from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd,
|
|||||||
from awx.main.utils.db import get_all_field_names
|
from awx.main.utils.db import get_all_field_names
|
||||||
from awx.main.utils.licensing import server_product_name
|
from awx.main.utils.licensing import server_product_name
|
||||||
from awx.main.views import ApiErrorView
|
from awx.main.views import ApiErrorView
|
||||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
|
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||||
from awx.api.versioning import URLPathVersioning
|
from awx.api.versioning import URLPathVersioning
|
||||||
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
|
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
|
||||||
from awx.conf import settings_registry
|
from awx.conf import settings_registry
|
||||||
@@ -63,9 +62,9 @@ __all__ = [
|
|||||||
'SubDetailAPIView',
|
'SubDetailAPIView',
|
||||||
'ResourceAccessList',
|
'ResourceAccessList',
|
||||||
'ParentMixin',
|
'ParentMixin',
|
||||||
'DeleteLastUnattachLabelMixin',
|
|
||||||
'SubListAttachDetachAPIView',
|
'SubListAttachDetachAPIView',
|
||||||
'CopyAPIView',
|
'CopyAPIView',
|
||||||
|
'GenericCancelView',
|
||||||
'BaseUsersList',
|
'BaseUsersList',
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -91,14 +90,9 @@ class LoggedLoginView(auth_views.LoginView):
|
|||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def post(self, request, *args, **kwargs):
|
||||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||||
current_user = getattr(request, 'user', None)
|
|
||||||
if request.user.is_authenticated:
|
if request.user.is_authenticated:
|
||||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
||||||
ret.set_cookie('userLoggedIn', 'true')
|
ret.set_cookie('userLoggedIn', 'true')
|
||||||
current_user = UserSerializer(self.request.user)
|
|
||||||
current_user = smart_str(JSONRenderer().render(current_user.data))
|
|
||||||
current_user = urllib.parse.quote('%s' % current_user, '')
|
|
||||||
ret.set_cookie('current_user', current_user, secure=settings.SESSION_COOKIE_SECURE or None)
|
|
||||||
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
@@ -141,7 +135,6 @@ def get_default_schema():
|
|||||||
|
|
||||||
|
|
||||||
class APIView(views.APIView):
|
class APIView(views.APIView):
|
||||||
|
|
||||||
schema = get_default_schema()
|
schema = get_default_schema()
|
||||||
versioning_class = URLPathVersioning
|
versioning_class = URLPathVersioning
|
||||||
|
|
||||||
@@ -255,7 +248,7 @@ class APIView(views.APIView):
|
|||||||
response['X-API-Query-Time'] = '%0.3fs' % sum(q_times)
|
response['X-API-Query-Time'] = '%0.3fs' % sum(q_times)
|
||||||
|
|
||||||
if getattr(self, 'deprecated', False):
|
if getattr(self, 'deprecated', False):
|
||||||
response['Warning'] = '299 awx "This resource has been deprecated and will be removed in a future release."' # noqa
|
response['Warning'] = '299 awx "This resource has been deprecated and will be removed in a future release."'
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
@@ -775,28 +768,6 @@ class SubListAttachDetachAPIView(SubListCreateAttachDetachAPIView):
|
|||||||
return {'id': None}
|
return {'id': None}
|
||||||
|
|
||||||
|
|
||||||
class DeleteLastUnattachLabelMixin(object):
|
|
||||||
"""
|
|
||||||
Models for which you want the last instance to be deleted from the database
|
|
||||||
when the last disassociate is called should inherit from this class. Further,
|
|
||||||
the model should implement is_detached()
|
|
||||||
"""
|
|
||||||
|
|
||||||
def unattach(self, request, *args, **kwargs):
|
|
||||||
(sub_id, res) = super(DeleteLastUnattachLabelMixin, self).unattach_validate(request)
|
|
||||||
if res:
|
|
||||||
return res
|
|
||||||
|
|
||||||
res = super(DeleteLastUnattachLabelMixin, self).unattach_by_id(request, sub_id)
|
|
||||||
|
|
||||||
obj = self.model.objects.get(id=sub_id)
|
|
||||||
|
|
||||||
if obj.is_detached():
|
|
||||||
obj.delete()
|
|
||||||
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
class SubDetailAPIView(ParentMixin, generics.RetrieveAPIView, GenericAPIView):
|
class SubDetailAPIView(ParentMixin, generics.RetrieveAPIView, GenericAPIView):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -828,7 +799,6 @@ class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class ResourceAccessList(ParentMixin, ListAPIView):
|
class ResourceAccessList(ParentMixin, ListAPIView):
|
||||||
|
|
||||||
serializer_class = ResourceAccessListElementSerializer
|
serializer_class = ResourceAccessListElementSerializer
|
||||||
ordering = ('username',)
|
ordering = ('username',)
|
||||||
|
|
||||||
@@ -851,7 +821,6 @@ def trigger_delayed_deep_copy(*args, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
class CopyAPIView(GenericAPIView):
|
class CopyAPIView(GenericAPIView):
|
||||||
|
|
||||||
serializer_class = CopySerializer
|
serializer_class = CopySerializer
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
copy_return_serializer_class = None
|
copy_return_serializer_class = None
|
||||||
@@ -1014,6 +983,23 @@ class CopyAPIView(GenericAPIView):
|
|||||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||||
|
|
||||||
|
|
||||||
|
class GenericCancelView(RetrieveAPIView):
|
||||||
|
# In subclass set model, serializer_class
|
||||||
|
obj_permission_type = 'cancel'
|
||||||
|
|
||||||
|
@transaction.non_atomic_requests
|
||||||
|
def dispatch(self, *args, **kwargs):
|
||||||
|
return super(GenericCancelView, self).dispatch(*args, **kwargs)
|
||||||
|
|
||||||
|
def post(self, request, *args, **kwargs):
|
||||||
|
obj = self.get_object()
|
||||||
|
if obj.can_cancel:
|
||||||
|
obj.cancel()
|
||||||
|
return Response(status=status.HTTP_202_ACCEPTED)
|
||||||
|
else:
|
||||||
|
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class BaseUsersList(SubListCreateAttachDetachAPIView):
|
class BaseUsersList(SubListCreateAttachDetachAPIView):
|
||||||
def post(self, request, *args, **kwargs):
|
def post(self, request, *args, **kwargs):
|
||||||
ret = super(BaseUsersList, self).post(request, *args, **kwargs)
|
ret = super(BaseUsersList, self).post(request, *args, **kwargs)
|
||||||
|
|||||||
@@ -128,7 +128,7 @@ class Metadata(metadata.SimpleMetadata):
|
|||||||
# Special handling of notification configuration where the required properties
|
# Special handling of notification configuration where the required properties
|
||||||
# are conditional on the type selected.
|
# are conditional on the type selected.
|
||||||
if field.field_name == 'notification_configuration':
|
if field.field_name == 'notification_configuration':
|
||||||
for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
|
for notification_type_name, notification_tr_name, notification_type_class in NotificationTemplate.NOTIFICATION_TYPES:
|
||||||
field_info[notification_type_name] = notification_type_class.init_parameters
|
field_info[notification_type_name] = notification_type_class.init_parameters
|
||||||
|
|
||||||
# Special handling of notification messages where the required properties
|
# Special handling of notification messages where the required properties
|
||||||
@@ -138,7 +138,7 @@ class Metadata(metadata.SimpleMetadata):
|
|||||||
except (AttributeError, KeyError):
|
except (AttributeError, KeyError):
|
||||||
view_model = None
|
view_model = None
|
||||||
if view_model == NotificationTemplate and field.field_name == 'messages':
|
if view_model == NotificationTemplate and field.field_name == 'messages':
|
||||||
for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
|
for notification_type_name, notification_tr_name, notification_type_class in NotificationTemplate.NOTIFICATION_TYPES:
|
||||||
field_info[notification_type_name] = notification_type_class.default_messages
|
field_info[notification_type_name] = notification_type_class.default_messages
|
||||||
|
|
||||||
# Update type of fields returned...
|
# Update type of fields returned...
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ class DisabledPaginator(DjangoPaginator):
|
|||||||
|
|
||||||
|
|
||||||
class Pagination(pagination.PageNumberPagination):
|
class Pagination(pagination.PageNumberPagination):
|
||||||
|
|
||||||
page_size_query_param = 'page_size'
|
page_size_query_param = 'page_size'
|
||||||
max_page_size = settings.MAX_PAGE_SIZE
|
max_page_size = settings.MAX_PAGE_SIZE
|
||||||
count_disabled = False
|
count_disabled = False
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ __all__ = [
|
|||||||
'InventoryInventorySourcesUpdatePermission',
|
'InventoryInventorySourcesUpdatePermission',
|
||||||
'UserPermission',
|
'UserPermission',
|
||||||
'IsSystemAdminOrAuditor',
|
'IsSystemAdminOrAuditor',
|
||||||
'InstanceGroupTowerPermission',
|
|
||||||
'WorkflowApprovalPermission',
|
'WorkflowApprovalPermission',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -22,7 +22,6 @@ class SurrogateEncoder(encoders.JSONEncoder):
|
|||||||
|
|
||||||
|
|
||||||
class DefaultJSONRenderer(renderers.JSONRenderer):
|
class DefaultJSONRenderer(renderers.JSONRenderer):
|
||||||
|
|
||||||
encoder_class = SurrogateEncoder
|
encoder_class = SurrogateEncoder
|
||||||
|
|
||||||
|
|
||||||
@@ -95,7 +94,6 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
|||||||
|
|
||||||
|
|
||||||
class PlainTextRenderer(renderers.BaseRenderer):
|
class PlainTextRenderer(renderers.BaseRenderer):
|
||||||
|
|
||||||
media_type = 'text/plain'
|
media_type = 'text/plain'
|
||||||
format = 'txt'
|
format = 'txt'
|
||||||
|
|
||||||
@@ -106,18 +104,15 @@ class PlainTextRenderer(renderers.BaseRenderer):
|
|||||||
|
|
||||||
|
|
||||||
class DownloadTextRenderer(PlainTextRenderer):
|
class DownloadTextRenderer(PlainTextRenderer):
|
||||||
|
|
||||||
format = "txt_download"
|
format = "txt_download"
|
||||||
|
|
||||||
|
|
||||||
class AnsiTextRenderer(PlainTextRenderer):
|
class AnsiTextRenderer(PlainTextRenderer):
|
||||||
|
|
||||||
media_type = 'text/plain'
|
media_type = 'text/plain'
|
||||||
format = 'ansi'
|
format = 'ansi'
|
||||||
|
|
||||||
|
|
||||||
class AnsiDownloadRenderer(PlainTextRenderer):
|
class AnsiDownloadRenderer(PlainTextRenderer):
|
||||||
|
|
||||||
format = "ansi_download"
|
format = "ansi_download"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
|||||||
Launch a Job Template:
|
Launch a Job Template:
|
||||||
|
{% ifmeth GET %}
|
||||||
Make a GET request to this resource to determine if the job_template can be
|
Make a GET request to this resource to determine if the job_template can be
|
||||||
launched and whether any passwords are required to launch the job_template.
|
launched and whether any passwords are required to launch the job_template.
|
||||||
The response will include the following fields:
|
The response will include the following fields:
|
||||||
@@ -29,8 +29,8 @@ The response will include the following fields:
|
|||||||
* `inventory_needed_to_start`: Flag indicating the presence of an inventory
|
* `inventory_needed_to_start`: Flag indicating the presence of an inventory
|
||||||
associated with the job template. If not then one should be supplied when
|
associated with the job template. If not then one should be supplied when
|
||||||
launching the job (boolean, read-only)
|
launching the job (boolean, read-only)
|
||||||
|
{% endifmeth %}
|
||||||
Make a POST request to this resource to launch the job_template. If any
|
{% ifmeth POST %}Make a POST request to this resource to launch the job_template. If any
|
||||||
passwords, inventory, or extra variables (extra_vars) are required, they must
|
passwords, inventory, or extra variables (extra_vars) are required, they must
|
||||||
be passed via POST data, with extra_vars given as a YAML or JSON string and
|
be passed via POST data, with extra_vars given as a YAML or JSON string and
|
||||||
escaped parentheses. If the `inventory_needed_to_start` is `True` then the
|
escaped parentheses. If the `inventory_needed_to_start` is `True` then the
|
||||||
@@ -41,3 +41,4 @@ are not provided, a 400 status code will be returned. If the job cannot be
|
|||||||
launched, a 405 status code will be returned. If the provided credential or
|
launched, a 405 status code will be returned. If the provided credential or
|
||||||
inventory are not allowed to be used by the user, then a 403 status code will
|
inventory are not allowed to be used by the user, then a 403 status code will
|
||||||
be returned.
|
be returned.
|
||||||
|
{% endifmeth %}
|
||||||
23
awx/api/templates/instance_install_bundle/group_vars/all.yml
Normal file
23
awx/api/templates/instance_install_bundle/group_vars/all.yml
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
receptor_user: awx
|
||||||
|
receptor_group: awx
|
||||||
|
receptor_verify: true
|
||||||
|
receptor_tls: true
|
||||||
|
receptor_work_commands:
|
||||||
|
ansible-runner:
|
||||||
|
command: ansible-runner
|
||||||
|
params: worker
|
||||||
|
allowruntimeparams: true
|
||||||
|
verifysignature: true
|
||||||
|
custom_worksign_public_keyfile: receptor/work-public-key.pem
|
||||||
|
custom_tls_certfile: receptor/tls/receptor.crt
|
||||||
|
custom_tls_keyfile: receptor/tls/receptor.key
|
||||||
|
custom_ca_certfile: receptor/tls/ca/receptor-ca.crt
|
||||||
|
receptor_protocol: 'tcp'
|
||||||
|
receptor_listener: true
|
||||||
|
receptor_port: {{ instance.listener_port }}
|
||||||
|
receptor_dependencies:
|
||||||
|
- python39-pip
|
||||||
|
{% verbatim %}
|
||||||
|
podman_user: "{{ receptor_user }}"
|
||||||
|
podman_group: "{{ receptor_group }}"
|
||||||
|
{% endverbatim %}
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
{% verbatim %}
|
||||||
|
---
|
||||||
|
- hosts: all
|
||||||
|
become: yes
|
||||||
|
tasks:
|
||||||
|
- name: Create the receptor user
|
||||||
|
user:
|
||||||
|
name: "{{ receptor_user }}"
|
||||||
|
shell: /bin/bash
|
||||||
|
- name: Enable Copr repo for Receptor
|
||||||
|
command: dnf copr enable ansible-awx/receptor -y
|
||||||
|
- import_role:
|
||||||
|
name: ansible.receptor.podman
|
||||||
|
- import_role:
|
||||||
|
name: ansible.receptor.setup
|
||||||
|
- name: Install ansible-runner
|
||||||
|
pip:
|
||||||
|
name: ansible-runner
|
||||||
|
executable: pip3.9
|
||||||
|
{% endverbatim %}
|
||||||
7
awx/api/templates/instance_install_bundle/inventory.yml
Normal file
7
awx/api/templates/instance_install_bundle/inventory.yml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
all:
|
||||||
|
hosts:
|
||||||
|
remote-execution:
|
||||||
|
ansible_host: {{ instance.hostname }}
|
||||||
|
ansible_user: <username> # user provided
|
||||||
|
ansible_ssh_private_key_file: ~/.ssh/id_rsa
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
---
|
||||||
|
collections:
|
||||||
|
- name: ansible.receptor
|
||||||
|
version: 1.1.0
|
||||||
17
awx/api/urls/debug.py
Normal file
17
awx/api/urls/debug.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
from django.urls import re_path
|
||||||
|
|
||||||
|
from awx.api.views.debug import (
|
||||||
|
DebugRootView,
|
||||||
|
TaskManagerDebugView,
|
||||||
|
DependencyManagerDebugView,
|
||||||
|
WorkflowManagerDebugView,
|
||||||
|
)
|
||||||
|
|
||||||
|
urls = [
|
||||||
|
re_path(r'^$', DebugRootView.as_view(), name='debug'),
|
||||||
|
re_path(r'^task_manager/$', TaskManagerDebugView.as_view(), name='task_manager'),
|
||||||
|
re_path(r'^dependency_manager/$', DependencyManagerDebugView.as_view(), name='dependency_manager'),
|
||||||
|
re_path(r'^workflow_manager/$', WorkflowManagerDebugView.as_view(), name='workflow_manager'),
|
||||||
|
]
|
||||||
|
|
||||||
|
__all__ = ['urls']
|
||||||
@@ -3,7 +3,15 @@
|
|||||||
|
|
||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
from awx.api.views import InstanceList, InstanceDetail, InstanceUnifiedJobsList, InstanceInstanceGroupsList, InstanceHealthCheck
|
from awx.api.views import (
|
||||||
|
InstanceList,
|
||||||
|
InstanceDetail,
|
||||||
|
InstanceUnifiedJobsList,
|
||||||
|
InstanceInstanceGroupsList,
|
||||||
|
InstanceHealthCheck,
|
||||||
|
InstancePeersList,
|
||||||
|
)
|
||||||
|
from awx.api.views.instance_install_bundle import InstanceInstallBundle
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
urls = [
|
||||||
@@ -12,6 +20,8 @@ urls = [
|
|||||||
re_path(r'^(?P<pk>[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'),
|
re_path(r'^(?P<pk>[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
|
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'),
|
re_path(r'^(?P<pk>[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/peers/$', InstancePeersList.as_view(), name='instance_peers_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/install_bundle/$', InstanceInstallBundle.as_view(), name='instance_install_bundle'),
|
||||||
]
|
]
|
||||||
|
|
||||||
__all__ = ['urls']
|
__all__ = ['urls']
|
||||||
|
|||||||
@@ -3,26 +3,31 @@
|
|||||||
|
|
||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
from awx.api.views import (
|
from awx.api.views.inventory import (
|
||||||
InventoryList,
|
InventoryList,
|
||||||
InventoryDetail,
|
InventoryDetail,
|
||||||
InventoryHostsList,
|
ConstructedInventoryDetail,
|
||||||
InventoryGroupsList,
|
ConstructedInventoryList,
|
||||||
InventoryRootGroupsList,
|
|
||||||
InventoryVariableData,
|
|
||||||
InventoryScriptView,
|
|
||||||
InventoryTreeView,
|
|
||||||
InventoryInventorySourcesList,
|
|
||||||
InventoryInventorySourcesUpdate,
|
|
||||||
InventoryActivityStreamList,
|
InventoryActivityStreamList,
|
||||||
|
InventoryInputInventoriesList,
|
||||||
InventoryJobTemplateList,
|
InventoryJobTemplateList,
|
||||||
InventoryAdHocCommandsList,
|
|
||||||
InventoryAccessList,
|
InventoryAccessList,
|
||||||
InventoryObjectRolesList,
|
InventoryObjectRolesList,
|
||||||
InventoryInstanceGroupsList,
|
InventoryInstanceGroupsList,
|
||||||
InventoryLabelList,
|
InventoryLabelList,
|
||||||
InventoryCopy,
|
InventoryCopy,
|
||||||
)
|
)
|
||||||
|
from awx.api.views import (
|
||||||
|
InventoryHostsList,
|
||||||
|
InventoryGroupsList,
|
||||||
|
InventoryInventorySourcesList,
|
||||||
|
InventoryInventorySourcesUpdate,
|
||||||
|
InventoryAdHocCommandsList,
|
||||||
|
InventoryRootGroupsList,
|
||||||
|
InventoryScriptView,
|
||||||
|
InventoryTreeView,
|
||||||
|
InventoryVariableData,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
urls = [
|
||||||
@@ -35,6 +40,7 @@ urls = [
|
|||||||
re_path(r'^(?P<pk>[0-9]+)/script/$', InventoryScriptView.as_view(), name='inventory_script_view'),
|
re_path(r'^(?P<pk>[0-9]+)/script/$', InventoryScriptView.as_view(), name='inventory_script_view'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/tree/$', InventoryTreeView.as_view(), name='inventory_tree_view'),
|
re_path(r'^(?P<pk>[0-9]+)/tree/$', InventoryTreeView.as_view(), name='inventory_tree_view'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/inventory_sources/$', InventoryInventorySourcesList.as_view(), name='inventory_inventory_sources_list'),
|
re_path(r'^(?P<pk>[0-9]+)/inventory_sources/$', InventoryInventorySourcesList.as_view(), name='inventory_inventory_sources_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/input_inventories/$', InventoryInputInventoriesList.as_view(), name='inventory_input_inventories'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/update_inventory_sources/$', InventoryInventorySourcesUpdate.as_view(), name='inventory_inventory_sources_update'),
|
re_path(r'^(?P<pk>[0-9]+)/update_inventory_sources/$', InventoryInventorySourcesUpdate.as_view(), name='inventory_inventory_sources_update'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/activity_stream/$', InventoryActivityStreamList.as_view(), name='inventory_activity_stream_list'),
|
re_path(r'^(?P<pk>[0-9]+)/activity_stream/$', InventoryActivityStreamList.as_view(), name='inventory_activity_stream_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/job_templates/$', InventoryJobTemplateList.as_view(), name='inventory_job_template_list'),
|
re_path(r'^(?P<pk>[0-9]+)/job_templates/$', InventoryJobTemplateList.as_view(), name='inventory_job_template_list'),
|
||||||
@@ -46,4 +52,10 @@ urls = [
|
|||||||
re_path(r'^(?P<pk>[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'),
|
re_path(r'^(?P<pk>[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'),
|
||||||
]
|
]
|
||||||
|
|
||||||
__all__ = ['urls']
|
# Constructed inventory special views
|
||||||
|
constructed_inventory_urls = [
|
||||||
|
re_path(r'^$', ConstructedInventoryList.as_view(), name='constructed_inventory_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/$', ConstructedInventoryDetail.as_view(), name='constructed_inventory_detail'),
|
||||||
|
]
|
||||||
|
|
||||||
|
__all__ = ['urls', 'constructed_inventory_urls']
|
||||||
|
|||||||
@@ -3,6 +3,9 @@
|
|||||||
|
|
||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
|
from awx.api.views.inventory import (
|
||||||
|
InventoryUpdateEventsList,
|
||||||
|
)
|
||||||
from awx.api.views import (
|
from awx.api.views import (
|
||||||
InventoryUpdateList,
|
InventoryUpdateList,
|
||||||
InventoryUpdateDetail,
|
InventoryUpdateDetail,
|
||||||
@@ -10,7 +13,6 @@ from awx.api.views import (
|
|||||||
InventoryUpdateStdout,
|
InventoryUpdateStdout,
|
||||||
InventoryUpdateNotificationsList,
|
InventoryUpdateNotificationsList,
|
||||||
InventoryUpdateCredentialsList,
|
InventoryUpdateCredentialsList,
|
||||||
InventoryUpdateEventsList,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
from awx.api.views import LabelList, LabelDetail
|
from awx.api.views.labels import LabelList, LabelDetail
|
||||||
|
|
||||||
|
|
||||||
urls = [re_path(r'^$', LabelList.as_view(), name='label_list'), re_path(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail')]
|
urls = [re_path(r'^$', LabelList.as_view(), name='label_list'), re_path(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail')]
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ from oauthlib import oauth2
|
|||||||
from oauth2_provider import views
|
from oauth2_provider import views
|
||||||
|
|
||||||
from awx.main.models import RefreshToken
|
from awx.main.models import RefreshToken
|
||||||
from awx.api.views import ApiOAuthAuthorizationRootView
|
from awx.api.views.root import ApiOAuthAuthorizationRootView
|
||||||
|
|
||||||
|
|
||||||
class TokenView(views.TokenView):
|
class TokenView(views.TokenView):
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
from awx.api.views import (
|
from awx.api.views.organization import (
|
||||||
OrganizationList,
|
OrganizationList,
|
||||||
OrganizationDetail,
|
OrganizationDetail,
|
||||||
OrganizationUsersList,
|
OrganizationUsersList,
|
||||||
@@ -14,7 +14,6 @@ from awx.api.views import (
|
|||||||
OrganizationJobTemplatesList,
|
OrganizationJobTemplatesList,
|
||||||
OrganizationWorkflowJobTemplatesList,
|
OrganizationWorkflowJobTemplatesList,
|
||||||
OrganizationTeamsList,
|
OrganizationTeamsList,
|
||||||
OrganizationCredentialList,
|
|
||||||
OrganizationActivityStreamList,
|
OrganizationActivityStreamList,
|
||||||
OrganizationNotificationTemplatesList,
|
OrganizationNotificationTemplatesList,
|
||||||
OrganizationNotificationTemplatesErrorList,
|
OrganizationNotificationTemplatesErrorList,
|
||||||
@@ -25,8 +24,8 @@ from awx.api.views import (
|
|||||||
OrganizationGalaxyCredentialsList,
|
OrganizationGalaxyCredentialsList,
|
||||||
OrganizationObjectRolesList,
|
OrganizationObjectRolesList,
|
||||||
OrganizationAccessList,
|
OrganizationAccessList,
|
||||||
OrganizationApplicationList,
|
|
||||||
)
|
)
|
||||||
|
from awx.api.views import OrganizationCredentialList, OrganizationApplicationList
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
urls = [
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList
|
from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList, ScheduleLabelsList, ScheduleInstanceGroupList
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
urls = [
|
||||||
@@ -11,6 +11,8 @@ urls = [
|
|||||||
re_path(r'^(?P<pk>[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'),
|
re_path(r'^(?P<pk>[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/jobs/$', ScheduleUnifiedJobsList.as_view(), name='schedule_unified_jobs_list'),
|
re_path(r'^(?P<pk>[0-9]+)/jobs/$', ScheduleUnifiedJobsList.as_view(), name='schedule_unified_jobs_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/credentials/$', ScheduleCredentialsList.as_view(), name='schedule_credentials_list'),
|
re_path(r'^(?P<pk>[0-9]+)/credentials/$', ScheduleCredentialsList.as_view(), name='schedule_credentials_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/labels/$', ScheduleLabelsList.as_view(), name='schedule_labels_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', ScheduleInstanceGroupList.as_view(), name='schedule_instance_groups_list'),
|
||||||
]
|
]
|
||||||
|
|
||||||
__all__ = ['urls']
|
__all__ = ['urls']
|
||||||
|
|||||||
@@ -2,17 +2,19 @@
|
|||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
from __future__ import absolute_import, unicode_literals
|
from __future__ import absolute_import, unicode_literals
|
||||||
from django.conf import settings
|
|
||||||
from django.urls import include, re_path
|
from django.urls import include, re_path
|
||||||
|
|
||||||
|
from awx import MODE
|
||||||
from awx.api.generics import LoggedLoginView, LoggedLogoutView
|
from awx.api.generics import LoggedLoginView, LoggedLogoutView
|
||||||
from awx.api.views import (
|
from awx.api.views.root import (
|
||||||
ApiRootView,
|
ApiRootView,
|
||||||
ApiV2RootView,
|
ApiV2RootView,
|
||||||
ApiV2PingView,
|
ApiV2PingView,
|
||||||
ApiV2ConfigView,
|
ApiV2ConfigView,
|
||||||
ApiV2SubscriptionView,
|
ApiV2SubscriptionView,
|
||||||
ApiV2AttachView,
|
ApiV2AttachView,
|
||||||
|
)
|
||||||
|
from awx.api.views import (
|
||||||
AuthView,
|
AuthView,
|
||||||
UserMeList,
|
UserMeList,
|
||||||
DashboardView,
|
DashboardView,
|
||||||
@@ -28,8 +30,8 @@ from awx.api.views import (
|
|||||||
OAuth2TokenList,
|
OAuth2TokenList,
|
||||||
ApplicationOAuth2TokenList,
|
ApplicationOAuth2TokenList,
|
||||||
OAuth2ApplicationDetail,
|
OAuth2ApplicationDetail,
|
||||||
MeshVisualizer,
|
|
||||||
)
|
)
|
||||||
|
from awx.api.views.mesh_visualizer import MeshVisualizer
|
||||||
|
|
||||||
from awx.api.views.metrics import MetricsView
|
from awx.api.views.metrics import MetricsView
|
||||||
|
|
||||||
@@ -37,7 +39,7 @@ from .organization import urls as organization_urls
|
|||||||
from .user import urls as user_urls
|
from .user import urls as user_urls
|
||||||
from .project import urls as project_urls
|
from .project import urls as project_urls
|
||||||
from .project_update import urls as project_update_urls
|
from .project_update import urls as project_update_urls
|
||||||
from .inventory import urls as inventory_urls
|
from .inventory import urls as inventory_urls, constructed_inventory_urls
|
||||||
from .execution_environments import urls as execution_environment_urls
|
from .execution_environments import urls as execution_environment_urls
|
||||||
from .team import urls as team_urls
|
from .team import urls as team_urls
|
||||||
from .host import urls as host_urls
|
from .host import urls as host_urls
|
||||||
@@ -108,6 +110,7 @@ v2_urls = [
|
|||||||
re_path(r'^project_updates/', include(project_update_urls)),
|
re_path(r'^project_updates/', include(project_update_urls)),
|
||||||
re_path(r'^teams/', include(team_urls)),
|
re_path(r'^teams/', include(team_urls)),
|
||||||
re_path(r'^inventories/', include(inventory_urls)),
|
re_path(r'^inventories/', include(inventory_urls)),
|
||||||
|
re_path(r'^constructed_inventories/', include(constructed_inventory_urls)),
|
||||||
re_path(r'^hosts/', include(host_urls)),
|
re_path(r'^hosts/', include(host_urls)),
|
||||||
re_path(r'^groups/', include(group_urls)),
|
re_path(r'^groups/', include(group_urls)),
|
||||||
re_path(r'^inventory_sources/', include(inventory_source_urls)),
|
re_path(r'^inventory_sources/', include(inventory_source_urls)),
|
||||||
@@ -145,7 +148,12 @@ urlpatterns = [
|
|||||||
re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
|
re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
|
||||||
re_path(r'^o/', include(oauth2_root_urls)),
|
re_path(r'^o/', include(oauth2_root_urls)),
|
||||||
]
|
]
|
||||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
if MODE == 'development':
|
||||||
|
# Only include these if we are in the development environment
|
||||||
from awx.api.swagger import SwaggerSchemaView
|
from awx.api.swagger import SwaggerSchemaView
|
||||||
|
|
||||||
urlpatterns += [re_path(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')]
|
urlpatterns += [re_path(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')]
|
||||||
|
|
||||||
|
from awx.api.urls.debug import urls as debug_urls
|
||||||
|
|
||||||
|
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
from awx.api.views import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
|
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ from awx.api.views import (
|
|||||||
WorkflowJobNodeFailureNodesList,
|
WorkflowJobNodeFailureNodesList,
|
||||||
WorkflowJobNodeAlwaysNodesList,
|
WorkflowJobNodeAlwaysNodesList,
|
||||||
WorkflowJobNodeCredentialsList,
|
WorkflowJobNodeCredentialsList,
|
||||||
|
WorkflowJobNodeLabelsList,
|
||||||
|
WorkflowJobNodeInstanceGroupsList,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -20,6 +22,8 @@ urls = [
|
|||||||
re_path(r'^(?P<pk>[0-9]+)/failure_nodes/$', WorkflowJobNodeFailureNodesList.as_view(), name='workflow_job_node_failure_nodes_list'),
|
re_path(r'^(?P<pk>[0-9]+)/failure_nodes/$', WorkflowJobNodeFailureNodesList.as_view(), name='workflow_job_node_failure_nodes_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/always_nodes/$', WorkflowJobNodeAlwaysNodesList.as_view(), name='workflow_job_node_always_nodes_list'),
|
re_path(r'^(?P<pk>[0-9]+)/always_nodes/$', WorkflowJobNodeAlwaysNodesList.as_view(), name='workflow_job_node_always_nodes_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/credentials/$', WorkflowJobNodeCredentialsList.as_view(), name='workflow_job_node_credentials_list'),
|
re_path(r'^(?P<pk>[0-9]+)/credentials/$', WorkflowJobNodeCredentialsList.as_view(), name='workflow_job_node_credentials_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobNodeLabelsList.as_view(), name='workflow_job_node_labels_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', WorkflowJobNodeInstanceGroupsList.as_view(), name='workflow_job_node_instance_groups_list'),
|
||||||
]
|
]
|
||||||
|
|
||||||
__all__ = ['urls']
|
__all__ = ['urls']
|
||||||
|
|||||||
@@ -11,6 +11,8 @@ from awx.api.views import (
|
|||||||
WorkflowJobTemplateNodeAlwaysNodesList,
|
WorkflowJobTemplateNodeAlwaysNodesList,
|
||||||
WorkflowJobTemplateNodeCredentialsList,
|
WorkflowJobTemplateNodeCredentialsList,
|
||||||
WorkflowJobTemplateNodeCreateApproval,
|
WorkflowJobTemplateNodeCreateApproval,
|
||||||
|
WorkflowJobTemplateNodeLabelsList,
|
||||||
|
WorkflowJobTemplateNodeInstanceGroupsList,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -21,6 +23,8 @@ urls = [
|
|||||||
re_path(r'^(?P<pk>[0-9]+)/failure_nodes/$', WorkflowJobTemplateNodeFailureNodesList.as_view(), name='workflow_job_template_node_failure_nodes_list'),
|
re_path(r'^(?P<pk>[0-9]+)/failure_nodes/$', WorkflowJobTemplateNodeFailureNodesList.as_view(), name='workflow_job_template_node_failure_nodes_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/always_nodes/$', WorkflowJobTemplateNodeAlwaysNodesList.as_view(), name='workflow_job_template_node_always_nodes_list'),
|
re_path(r'^(?P<pk>[0-9]+)/always_nodes/$', WorkflowJobTemplateNodeAlwaysNodesList.as_view(), name='workflow_job_template_node_always_nodes_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/credentials/$', WorkflowJobTemplateNodeCredentialsList.as_view(), name='workflow_job_template_node_credentials_list'),
|
re_path(r'^(?P<pk>[0-9]+)/credentials/$', WorkflowJobTemplateNodeCredentialsList.as_view(), name='workflow_job_template_node_credentials_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobTemplateNodeLabelsList.as_view(), name='workflow_job_template_node_labels_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', WorkflowJobTemplateNodeInstanceGroupsList.as_view(), name='workflow_job_template_node_instance_groups_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/create_approval_template/$', WorkflowJobTemplateNodeCreateApproval.as_view(), name='workflow_job_template_node_create_approval'),
|
re_path(r'^(?P<pk>[0-9]+)/create_approval_template/$', WorkflowJobTemplateNodeCreateApproval.as_view(), name='workflow_job_template_node_create_approval'),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
54
awx/api/validators.py
Normal file
54
awx/api/validators.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
import re
|
||||||
|
|
||||||
|
from django.core.validators import RegexValidator, validate_ipv46_address
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
|
|
||||||
|
|
||||||
|
class HostnameRegexValidator(RegexValidator):
|
||||||
|
"""
|
||||||
|
Fully validates a domain name that is compliant with norms in Linux/RHEL
|
||||||
|
- Cannot start with a hyphen
|
||||||
|
- Cannot begin with, or end with a "."
|
||||||
|
- Cannot contain any whitespaces
|
||||||
|
- Entire hostname is max 255 chars (including dots)
|
||||||
|
- Each domain/label is between 1 and 63 characters, except top level domain, which must be at least 2 characters
|
||||||
|
- Supports ipv4, ipv6, simple hostnames and FQDNs
|
||||||
|
- Follows RFC 9210 (modern RFC 1123, 1178) requirements
|
||||||
|
|
||||||
|
Accepts an IP Address or Hostname as the argument
|
||||||
|
"""
|
||||||
|
|
||||||
|
regex = '^[a-z0-9][-a-z0-9]*$|^([a-z0-9][-a-z0-9]{0,62}[.])*[a-z0-9][-a-z0-9]{1,62}$'
|
||||||
|
flags = re.IGNORECASE
|
||||||
|
|
||||||
|
def __call__(self, value):
|
||||||
|
regex_matches, err = self.__validate(value)
|
||||||
|
invalid_input = regex_matches if self.inverse_match else not regex_matches
|
||||||
|
if invalid_input:
|
||||||
|
if err is None:
|
||||||
|
err = ValidationError(self.message, code=self.code, params={"value": value})
|
||||||
|
raise err
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"regex={self.regex}, message={self.message}, code={self.code}, inverse_match={self.inverse_match}, flags={self.flags}"
|
||||||
|
|
||||||
|
def __validate(self, value):
|
||||||
|
if ' ' in value:
|
||||||
|
return False, ValidationError("whitespaces in hostnames are illegal")
|
||||||
|
|
||||||
|
"""
|
||||||
|
If we have an IP address, try and validate it.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
validate_ipv46_address(value)
|
||||||
|
return True, None
|
||||||
|
except ValidationError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
"""
|
||||||
|
By this point in the code, we probably have a simple hostname, FQDN or a strange hostname like "192.localhost.domain.101"
|
||||||
|
"""
|
||||||
|
if not self.regex.match(value):
|
||||||
|
return False, ValidationError(f"illegal characters detected in hostname={value}. Please verify.")
|
||||||
|
|
||||||
|
return True, None
|
||||||
File diff suppressed because it is too large
Load Diff
68
awx/api/views/debug.py
Normal file
68
awx/api/views/debug.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from rest_framework.permissions import AllowAny
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from awx.api.generics import APIView
|
||||||
|
|
||||||
|
from awx.main.scheduler import TaskManager, DependencyManager, WorkflowManager
|
||||||
|
|
||||||
|
|
||||||
|
class TaskManagerDebugView(APIView):
|
||||||
|
_ignore_model_permissions = True
|
||||||
|
exclude_from_schema = True
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
prefix = 'Task'
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
TaskManager().schedule()
|
||||||
|
if not settings.AWX_DISABLE_TASK_MANAGERS:
|
||||||
|
msg = f"Running {self.prefix} manager. To disable other triggers to the {self.prefix} manager, set AWX_DISABLE_TASK_MANAGERS to True"
|
||||||
|
else:
|
||||||
|
msg = f"AWX_DISABLE_TASK_MANAGERS is True, this view is the only way to trigger the {self.prefix} manager"
|
||||||
|
return Response(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class DependencyManagerDebugView(APIView):
|
||||||
|
_ignore_model_permissions = True
|
||||||
|
exclude_from_schema = True
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
prefix = 'Dependency'
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
DependencyManager().schedule()
|
||||||
|
if not settings.AWX_DISABLE_TASK_MANAGERS:
|
||||||
|
msg = f"Running {self.prefix} manager. To disable other triggers to the {self.prefix} manager, set AWX_DISABLE_TASK_MANAGERS to True"
|
||||||
|
else:
|
||||||
|
msg = f"AWX_DISABLE_TASK_MANAGERS is True, this view is the only way to trigger the {self.prefix} manager"
|
||||||
|
return Response(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowManagerDebugView(APIView):
|
||||||
|
_ignore_model_permissions = True
|
||||||
|
exclude_from_schema = True
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
prefix = 'Workflow'
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
WorkflowManager().schedule()
|
||||||
|
if not settings.AWX_DISABLE_TASK_MANAGERS:
|
||||||
|
msg = f"Running {self.prefix} manager. To disable other triggers to the {self.prefix} manager, set AWX_DISABLE_TASK_MANAGERS to True"
|
||||||
|
else:
|
||||||
|
msg = f"AWX_DISABLE_TASK_MANAGERS is True, this view is the only way to trigger the {self.prefix} manager"
|
||||||
|
return Response(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class DebugRootView(APIView):
|
||||||
|
_ignore_model_permissions = True
|
||||||
|
exclude_from_schema = True
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
|
def get(self, request, format=None):
|
||||||
|
'''List of available debug urls'''
|
||||||
|
data = OrderedDict()
|
||||||
|
data['task_manager'] = '/api/debug/task_manager/'
|
||||||
|
data['dependency_manager'] = '/api/debug/dependency_manager/'
|
||||||
|
data['workflow_manager'] = '/api/debug/workflow_manager/'
|
||||||
|
return Response(data)
|
||||||
199
awx/api/views/instance_install_bundle.py
Normal file
199
awx/api/views/instance_install_bundle.py
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
# Copyright (c) 2018 Red Hat, Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import io
|
||||||
|
import ipaddress
|
||||||
|
import os
|
||||||
|
import tarfile
|
||||||
|
|
||||||
|
import asn1
|
||||||
|
from awx.api import serializers
|
||||||
|
from awx.api.generics import GenericAPIView, Response
|
||||||
|
from awx.api.permissions import IsSystemAdminOrAuditor
|
||||||
|
from awx.main import models
|
||||||
|
from cryptography import x509
|
||||||
|
from cryptography.hazmat.primitives import hashes, serialization
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||||
|
from cryptography.x509 import DNSName, IPAddress, ObjectIdentifier, OtherName
|
||||||
|
from cryptography.x509.oid import NameOID
|
||||||
|
from django.http import HttpResponse
|
||||||
|
from django.template.loader import render_to_string
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from rest_framework import status
|
||||||
|
|
||||||
|
# Red Hat has an OID namespace (RHANANA). Receptor has its own designation under that.
|
||||||
|
RECEPTOR_OID = "1.3.6.1.4.1.2312.19.1"
|
||||||
|
|
||||||
|
|
||||||
|
# generate install bundle for the instance
|
||||||
|
# install bundle directory structure
|
||||||
|
# ├── install_receptor.yml (playbook)
|
||||||
|
# ├── inventory.yml
|
||||||
|
# ├── group_vars
|
||||||
|
# │ └── all.yml
|
||||||
|
# ├── receptor
|
||||||
|
# │ ├── tls
|
||||||
|
# │ │ ├── ca
|
||||||
|
# │ │ │ └── receptor-ca.crt
|
||||||
|
# │ │ ├── receptor.crt
|
||||||
|
# │ │ └── receptor.key
|
||||||
|
# │ └── work-public-key.pem
|
||||||
|
# └── requirements.yml
|
||||||
|
class InstanceInstallBundle(GenericAPIView):
|
||||||
|
name = _('Install Bundle')
|
||||||
|
model = models.Instance
|
||||||
|
serializer_class = serializers.InstanceSerializer
|
||||||
|
permission_classes = (IsSystemAdminOrAuditor,)
|
||||||
|
|
||||||
|
def get(self, request, *args, **kwargs):
|
||||||
|
instance_obj = self.get_object()
|
||||||
|
|
||||||
|
if instance_obj.node_type not in ('execution',):
|
||||||
|
return Response(
|
||||||
|
data=dict(msg=_('Install bundle can only be generated for execution nodes.')),
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
with io.BytesIO() as f:
|
||||||
|
with tarfile.open(fileobj=f, mode='w:gz') as tar:
|
||||||
|
# copy /etc/receptor/tls/ca/receptor-ca.crt to receptor/tls/ca in the tar file
|
||||||
|
tar.add(
|
||||||
|
os.path.realpath('/etc/receptor/tls/ca/receptor-ca.crt'), arcname=f"{instance_obj.hostname}_install_bundle/receptor/tls/ca/receptor-ca.crt"
|
||||||
|
)
|
||||||
|
|
||||||
|
# copy /etc/receptor/signing/work-public-key.pem to receptor/work-public-key.pem
|
||||||
|
tar.add('/etc/receptor/signing/work-public-key.pem', arcname=f"{instance_obj.hostname}_install_bundle/receptor/work-public-key.pem")
|
||||||
|
|
||||||
|
# generate and write the receptor key to receptor/tls/receptor.key in the tar file
|
||||||
|
key, cert = generate_receptor_tls(instance_obj)
|
||||||
|
|
||||||
|
key_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/receptor/tls/receptor.key")
|
||||||
|
key_tarinfo.size = len(key)
|
||||||
|
tar.addfile(key_tarinfo, io.BytesIO(key))
|
||||||
|
|
||||||
|
cert_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/receptor/tls/receptor.crt")
|
||||||
|
cert_tarinfo.size = len(cert)
|
||||||
|
tar.addfile(cert_tarinfo, io.BytesIO(cert))
|
||||||
|
|
||||||
|
# generate and write install_receptor.yml to the tar file
|
||||||
|
playbook = generate_playbook().encode('utf-8')
|
||||||
|
playbook_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/install_receptor.yml")
|
||||||
|
playbook_tarinfo.size = len(playbook)
|
||||||
|
tar.addfile(playbook_tarinfo, io.BytesIO(playbook))
|
||||||
|
|
||||||
|
# generate and write inventory.yml to the tar file
|
||||||
|
inventory_yml = generate_inventory_yml(instance_obj).encode('utf-8')
|
||||||
|
inventory_yml_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/inventory.yml")
|
||||||
|
inventory_yml_tarinfo.size = len(inventory_yml)
|
||||||
|
tar.addfile(inventory_yml_tarinfo, io.BytesIO(inventory_yml))
|
||||||
|
|
||||||
|
# generate and write group_vars/all.yml to the tar file
|
||||||
|
group_vars = generate_group_vars_all_yml(instance_obj).encode('utf-8')
|
||||||
|
group_vars_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/group_vars/all.yml")
|
||||||
|
group_vars_tarinfo.size = len(group_vars)
|
||||||
|
tar.addfile(group_vars_tarinfo, io.BytesIO(group_vars))
|
||||||
|
|
||||||
|
# generate and write requirements.yml to the tar file
|
||||||
|
requirements_yml = generate_requirements_yml().encode('utf-8')
|
||||||
|
requirements_yml_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/requirements.yml")
|
||||||
|
requirements_yml_tarinfo.size = len(requirements_yml)
|
||||||
|
tar.addfile(requirements_yml_tarinfo, io.BytesIO(requirements_yml))
|
||||||
|
|
||||||
|
# respond with the tarfile
|
||||||
|
f.seek(0)
|
||||||
|
response = HttpResponse(f.read(), status=status.HTTP_200_OK)
|
||||||
|
response['Content-Disposition'] = f"attachment; filename={instance_obj.hostname}_install_bundle.tar.gz"
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def generate_playbook():
|
||||||
|
return render_to_string("instance_install_bundle/install_receptor.yml")
|
||||||
|
|
||||||
|
|
||||||
|
def generate_requirements_yml():
|
||||||
|
return render_to_string("instance_install_bundle/requirements.yml")
|
||||||
|
|
||||||
|
|
||||||
|
def generate_inventory_yml(instance_obj):
|
||||||
|
return render_to_string("instance_install_bundle/inventory.yml", context=dict(instance=instance_obj))
|
||||||
|
|
||||||
|
|
||||||
|
def generate_group_vars_all_yml(instance_obj):
|
||||||
|
return render_to_string("instance_install_bundle/group_vars/all.yml", context=dict(instance=instance_obj))
|
||||||
|
|
||||||
|
|
||||||
|
def generate_receptor_tls(instance_obj):
|
||||||
|
# generate private key for the receptor
|
||||||
|
key = rsa.generate_private_key(public_exponent=65537, key_size=2048)
|
||||||
|
|
||||||
|
# encode receptor hostname to asn1
|
||||||
|
hostname = instance_obj.hostname
|
||||||
|
encoder = asn1.Encoder()
|
||||||
|
encoder.start()
|
||||||
|
encoder.write(hostname.encode(), nr=asn1.Numbers.UTF8String)
|
||||||
|
hostname_asn1 = encoder.output()
|
||||||
|
|
||||||
|
san_params = [
|
||||||
|
DNSName(hostname),
|
||||||
|
OtherName(ObjectIdentifier(RECEPTOR_OID), hostname_asn1),
|
||||||
|
]
|
||||||
|
|
||||||
|
try:
|
||||||
|
san_params.append(IPAddress(ipaddress.IPv4Address(hostname)))
|
||||||
|
except ipaddress.AddressValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# generate certificate for the receptor
|
||||||
|
csr = (
|
||||||
|
x509.CertificateSigningRequestBuilder()
|
||||||
|
.subject_name(
|
||||||
|
x509.Name(
|
||||||
|
[
|
||||||
|
x509.NameAttribute(NameOID.COMMON_NAME, hostname),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.add_extension(
|
||||||
|
x509.SubjectAlternativeName(san_params),
|
||||||
|
critical=False,
|
||||||
|
)
|
||||||
|
.sign(key, hashes.SHA256())
|
||||||
|
)
|
||||||
|
|
||||||
|
# sign csr with the receptor ca key from /etc/receptor/ca/receptor-ca.key
|
||||||
|
with open('/etc/receptor/tls/ca/receptor-ca.key', 'rb') as f:
|
||||||
|
ca_key = serialization.load_pem_private_key(
|
||||||
|
f.read(),
|
||||||
|
password=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
with open('/etc/receptor/tls/ca/receptor-ca.crt', 'rb') as f:
|
||||||
|
ca_cert = x509.load_pem_x509_certificate(f.read())
|
||||||
|
|
||||||
|
cert = (
|
||||||
|
x509.CertificateBuilder()
|
||||||
|
.subject_name(csr.subject)
|
||||||
|
.issuer_name(ca_cert.issuer)
|
||||||
|
.public_key(csr.public_key())
|
||||||
|
.serial_number(x509.random_serial_number())
|
||||||
|
.not_valid_before(datetime.datetime.utcnow())
|
||||||
|
.not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=3650))
|
||||||
|
.add_extension(
|
||||||
|
csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).value,
|
||||||
|
critical=csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).critical,
|
||||||
|
)
|
||||||
|
.sign(ca_key, hashes.SHA256())
|
||||||
|
)
|
||||||
|
|
||||||
|
key = key.private_bytes(
|
||||||
|
encoding=serialization.Encoding.PEM,
|
||||||
|
format=serialization.PrivateFormat.TraditionalOpenSSL,
|
||||||
|
encryption_algorithm=serialization.NoEncryption(),
|
||||||
|
)
|
||||||
|
|
||||||
|
cert = cert.public_bytes(
|
||||||
|
encoding=serialization.Encoding.PEM,
|
||||||
|
)
|
||||||
|
|
||||||
|
return key, cert
|
||||||
@@ -14,12 +14,11 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
from rest_framework.exceptions import PermissionDenied
|
from rest_framework.exceptions import PermissionDenied
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.models import ActivityStream, Inventory, JobTemplate, Role, User, InstanceGroup, InventoryUpdateEvent, InventoryUpdate
|
from awx.main.models import ActivityStream, Inventory, JobTemplate, Role, User, InstanceGroup, InventoryUpdateEvent, InventoryUpdate
|
||||||
|
|
||||||
from awx.main.models.label import Label
|
|
||||||
|
|
||||||
from awx.api.generics import (
|
from awx.api.generics import (
|
||||||
ListCreateAPIView,
|
ListCreateAPIView,
|
||||||
RetrieveUpdateDestroyAPIView,
|
RetrieveUpdateDestroyAPIView,
|
||||||
@@ -27,19 +26,18 @@ from awx.api.generics import (
|
|||||||
SubListAttachDetachAPIView,
|
SubListAttachDetachAPIView,
|
||||||
ResourceAccessList,
|
ResourceAccessList,
|
||||||
CopyAPIView,
|
CopyAPIView,
|
||||||
DeleteLastUnattachLabelMixin,
|
|
||||||
SubListCreateAttachDetachAPIView,
|
|
||||||
)
|
)
|
||||||
|
from awx.api.views.labels import LabelSubListCreateAttachDetachView
|
||||||
|
|
||||||
|
|
||||||
from awx.api.serializers import (
|
from awx.api.serializers import (
|
||||||
InventorySerializer,
|
InventorySerializer,
|
||||||
|
ConstructedInventorySerializer,
|
||||||
ActivityStreamSerializer,
|
ActivityStreamSerializer,
|
||||||
RoleSerializer,
|
RoleSerializer,
|
||||||
InstanceGroupSerializer,
|
InstanceGroupSerializer,
|
||||||
InventoryUpdateEventSerializer,
|
InventoryUpdateEventSerializer,
|
||||||
JobTemplateSerializer,
|
JobTemplateSerializer,
|
||||||
LabelSerializer,
|
|
||||||
)
|
)
|
||||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin
|
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin
|
||||||
|
|
||||||
@@ -50,7 +48,6 @@ logger = logging.getLogger('awx.api.views.organization')
|
|||||||
|
|
||||||
|
|
||||||
class InventoryUpdateEventsList(SubListAPIView):
|
class InventoryUpdateEventsList(SubListAPIView):
|
||||||
|
|
||||||
model = InventoryUpdateEvent
|
model = InventoryUpdateEvent
|
||||||
serializer_class = InventoryUpdateEventSerializer
|
serializer_class = InventoryUpdateEventSerializer
|
||||||
parent_model = InventoryUpdate
|
parent_model = InventoryUpdate
|
||||||
@@ -70,13 +67,11 @@ class InventoryUpdateEventsList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class InventoryList(ListCreateAPIView):
|
class InventoryList(ListCreateAPIView):
|
||||||
|
|
||||||
model = Inventory
|
model = Inventory
|
||||||
serializer_class = InventorySerializer
|
serializer_class = InventorySerializer
|
||||||
|
|
||||||
|
|
||||||
class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||||
|
|
||||||
model = Inventory
|
model = Inventory
|
||||||
serializer_class = InventorySerializer
|
serializer_class = InventorySerializer
|
||||||
|
|
||||||
@@ -86,7 +81,9 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIVie
|
|||||||
|
|
||||||
# Do not allow changes to an Inventory kind.
|
# Do not allow changes to an Inventory kind.
|
||||||
if kind is not None and obj.kind != kind:
|
if kind is not None and obj.kind != kind:
|
||||||
return Response(dict(error=_('You cannot turn a regular inventory into a "smart" inventory.')), status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
return Response(
|
||||||
|
dict(error=_('You cannot turn a regular inventory into a "smart" or "constructed" inventory.')), status=status.HTTP_405_METHOD_NOT_ALLOWED
|
||||||
|
)
|
||||||
return super(InventoryDetail, self).update(request, *args, **kwargs)
|
return super(InventoryDetail, self).update(request, *args, **kwargs)
|
||||||
|
|
||||||
def destroy(self, request, *args, **kwargs):
|
def destroy(self, request, *args, **kwargs):
|
||||||
@@ -101,8 +98,30 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIVie
|
|||||||
return Response(dict(error=_("{0}".format(e))), status=status.HTTP_400_BAD_REQUEST)
|
return Response(dict(error=_("{0}".format(e))), status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
|
||||||
class InventoryActivityStreamList(SubListAPIView):
|
class ConstructedInventoryDetail(InventoryDetail):
|
||||||
|
serializer_class = ConstructedInventorySerializer
|
||||||
|
|
||||||
|
|
||||||
|
class ConstructedInventoryList(InventoryList):
|
||||||
|
serializer_class = ConstructedInventorySerializer
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
r = super().get_queryset()
|
||||||
|
return r.filter(kind='constructed')
|
||||||
|
|
||||||
|
|
||||||
|
class InventoryInputInventoriesList(SubListAttachDetachAPIView):
|
||||||
|
model = Inventory
|
||||||
|
serializer_class = InventorySerializer
|
||||||
|
parent_model = Inventory
|
||||||
|
relationship = 'input_inventories'
|
||||||
|
|
||||||
|
def is_valid_relation(self, parent, sub, created=False):
|
||||||
|
if sub.kind == 'constructed':
|
||||||
|
raise serializers.ValidationError({'error': 'You cannot add a constructed inventory to another constructed inventory.'})
|
||||||
|
|
||||||
|
|
||||||
|
class InventoryActivityStreamList(SubListAPIView):
|
||||||
model = ActivityStream
|
model = ActivityStream
|
||||||
serializer_class = ActivityStreamSerializer
|
serializer_class = ActivityStreamSerializer
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
@@ -117,7 +136,6 @@ class InventoryActivityStreamList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
||||||
|
|
||||||
model = InstanceGroup
|
model = InstanceGroup
|
||||||
serializer_class = InstanceGroupSerializer
|
serializer_class = InstanceGroupSerializer
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
@@ -125,13 +143,11 @@ class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class InventoryAccessList(ResourceAccessList):
|
class InventoryAccessList(ResourceAccessList):
|
||||||
|
|
||||||
model = User # needs to be User for AccessLists's
|
model = User # needs to be User for AccessLists's
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
|
|
||||||
|
|
||||||
class InventoryObjectRolesList(SubListAPIView):
|
class InventoryObjectRolesList(SubListAPIView):
|
||||||
|
|
||||||
model = Role
|
model = Role
|
||||||
serializer_class = RoleSerializer
|
serializer_class = RoleSerializer
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
@@ -144,7 +160,6 @@ class InventoryObjectRolesList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class InventoryJobTemplateList(SubListAPIView):
|
class InventoryJobTemplateList(SubListAPIView):
|
||||||
|
|
||||||
model = JobTemplate
|
model = JobTemplate
|
||||||
serializer_class = JobTemplateSerializer
|
serializer_class = JobTemplateSerializer
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
@@ -157,31 +172,10 @@ class InventoryJobTemplateList(SubListAPIView):
|
|||||||
return qs.filter(inventory=parent)
|
return qs.filter(inventory=parent)
|
||||||
|
|
||||||
|
|
||||||
class InventoryLabelList(DeleteLastUnattachLabelMixin, SubListCreateAttachDetachAPIView, SubListAPIView):
|
class InventoryLabelList(LabelSubListCreateAttachDetachView):
|
||||||
|
|
||||||
model = Label
|
|
||||||
serializer_class = LabelSerializer
|
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
relationship = 'labels'
|
|
||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
|
||||||
# If a label already exists in the database, attach it instead of erroring out
|
|
||||||
# that it already exists
|
|
||||||
if 'id' not in request.data and 'name' in request.data and 'organization' in request.data:
|
|
||||||
existing = Label.objects.filter(name=request.data['name'], organization_id=request.data['organization'])
|
|
||||||
if existing.exists():
|
|
||||||
existing = existing[0]
|
|
||||||
request.data['id'] = existing.id
|
|
||||||
del request.data['name']
|
|
||||||
del request.data['organization']
|
|
||||||
if Label.objects.filter(inventory_labels=self.kwargs['pk']).count() > 100:
|
|
||||||
return Response(
|
|
||||||
dict(msg=_('Maximum number of labels for {} reached.'.format(self.parent_model._meta.verbose_name_raw))), status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
return super(InventoryLabelList, self).post(request, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class InventoryCopy(CopyAPIView):
|
class InventoryCopy(CopyAPIView):
|
||||||
|
|
||||||
model = Inventory
|
model = Inventory
|
||||||
copy_return_serializer_class = InventorySerializer
|
copy_return_serializer_class = InventorySerializer
|
||||||
|
|||||||
69
awx/api/views/labels.py
Normal file
69
awx/api/views/labels.py
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
# AWX
|
||||||
|
from awx.api.generics import SubListCreateAttachDetachAPIView, RetrieveUpdateAPIView, ListCreateAPIView
|
||||||
|
from awx.main.models import Label
|
||||||
|
from awx.api.serializers import LabelSerializer
|
||||||
|
|
||||||
|
# Django
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
# Django REST Framework
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.status import HTTP_400_BAD_REQUEST
|
||||||
|
|
||||||
|
|
||||||
|
class LabelSubListCreateAttachDetachView(SubListCreateAttachDetachAPIView):
|
||||||
|
"""
|
||||||
|
For related labels lists like /api/v2/inventories/N/labels/
|
||||||
|
|
||||||
|
We want want the last instance to be deleted from the database
|
||||||
|
when the last disassociate happens.
|
||||||
|
|
||||||
|
Subclasses need to define parent_model
|
||||||
|
"""
|
||||||
|
|
||||||
|
model = Label
|
||||||
|
serializer_class = LabelSerializer
|
||||||
|
relationship = 'labels'
|
||||||
|
|
||||||
|
def unattach(self, request, *args, **kwargs):
|
||||||
|
(sub_id, res) = super().unattach_validate(request)
|
||||||
|
if res:
|
||||||
|
return res
|
||||||
|
|
||||||
|
res = super().unattach_by_id(request, sub_id)
|
||||||
|
|
||||||
|
obj = self.model.objects.get(id=sub_id)
|
||||||
|
|
||||||
|
if obj.is_detached():
|
||||||
|
obj.delete()
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
def post(self, request, *args, **kwargs):
|
||||||
|
# If a label already exists in the database, attach it instead of erroring out
|
||||||
|
# that it already exists
|
||||||
|
if 'id' not in request.data and 'name' in request.data and 'organization' in request.data:
|
||||||
|
existing = Label.objects.filter(name=request.data['name'], organization_id=request.data['organization'])
|
||||||
|
if existing.exists():
|
||||||
|
existing = existing[0]
|
||||||
|
request.data['id'] = existing.id
|
||||||
|
del request.data['name']
|
||||||
|
del request.data['organization']
|
||||||
|
|
||||||
|
# Give a 400 error if we have attached too many labels to this object
|
||||||
|
label_filter = self.parent_model._meta.get_field(self.relationship).remote_field.name
|
||||||
|
if Label.objects.filter(**{label_filter: self.kwargs['pk']}).count() > 100:
|
||||||
|
return Response(dict(msg=_(f'Maximum number of labels for {self.parent_model._meta.verbose_name_raw} reached.')), status=HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
return super().post(request, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class LabelDetail(RetrieveUpdateAPIView):
|
||||||
|
model = Label
|
||||||
|
serializer_class = LabelSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class LabelList(ListCreateAPIView):
|
||||||
|
name = _("Labels")
|
||||||
|
model = Label
|
||||||
|
serializer_class = LabelSerializer
|
||||||
@@ -10,13 +10,11 @@ from awx.main.models import InstanceLink, Instance
|
|||||||
|
|
||||||
|
|
||||||
class MeshVisualizer(APIView):
|
class MeshVisualizer(APIView):
|
||||||
|
|
||||||
name = _("Mesh Visualizer")
|
name = _("Mesh Visualizer")
|
||||||
permission_classes = (IsSystemAdminOrAuditor,)
|
permission_classes = (IsSystemAdminOrAuditor,)
|
||||||
swagger_topic = "System Configuration"
|
swagger_topic = "System Configuration"
|
||||||
|
|
||||||
def get(self, request, format=None):
|
def get(self, request, format=None):
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data,
|
'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data,
|
||||||
'links': InstanceLinkSerializer(InstanceLink.objects.select_related('target', 'source'), many=True).data,
|
'links': InstanceLinkSerializer(InstanceLink.objects.select_related('target', 'source'), many=True).data,
|
||||||
|
|||||||
@@ -5,9 +5,11 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
|
from django.conf import settings
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
# Django REST Framework
|
# Django REST Framework
|
||||||
|
from rest_framework.permissions import AllowAny
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.exceptions import PermissionDenied
|
from rest_framework.exceptions import PermissionDenied
|
||||||
|
|
||||||
@@ -25,15 +27,19 @@ logger = logging.getLogger('awx.analytics')
|
|||||||
|
|
||||||
|
|
||||||
class MetricsView(APIView):
|
class MetricsView(APIView):
|
||||||
|
|
||||||
name = _('Metrics')
|
name = _('Metrics')
|
||||||
swagger_topic = 'Metrics'
|
swagger_topic = 'Metrics'
|
||||||
|
|
||||||
renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer]
|
renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer]
|
||||||
|
|
||||||
|
def initialize_request(self, request, *args, **kwargs):
|
||||||
|
if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS:
|
||||||
|
self.permission_classes = (AllowAny,)
|
||||||
|
return super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||||
|
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
'''Show Metrics Details'''
|
'''Show Metrics Details'''
|
||||||
if request.user.is_superuser or request.user.is_system_auditor:
|
if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS or request.user.is_superuser or request.user.is_system_auditor:
|
||||||
metrics_to_show = ''
|
metrics_to_show = ''
|
||||||
if not request.query_params.get('subsystemonly', "0") == "1":
|
if not request.query_params.get('subsystemonly', "0") == "1":
|
||||||
metrics_to_show += metrics().decode('UTF-8')
|
metrics_to_show += metrics().decode('UTF-8')
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ from rest_framework import status
|
|||||||
|
|
||||||
from awx.main.constants import ACTIVE_STATES
|
from awx.main.constants import ACTIVE_STATES
|
||||||
from awx.main.utils import get_object_or_400
|
from awx.main.utils import get_object_or_400
|
||||||
from awx.main.models.ha import Instance, InstanceGroup
|
from awx.main.models.ha import Instance, InstanceGroup, schedule_policy_task
|
||||||
from awx.main.models.organization import Team
|
from awx.main.models.organization import Team
|
||||||
from awx.main.models.projects import Project
|
from awx.main.models.projects import Project
|
||||||
from awx.main.models.inventory import Inventory
|
from awx.main.models.inventory import Inventory
|
||||||
@@ -107,6 +107,11 @@ class InstanceGroupMembershipMixin(object):
|
|||||||
if inst_name in ig_obj.policy_instance_list:
|
if inst_name in ig_obj.policy_instance_list:
|
||||||
ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name))
|
ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name))
|
||||||
ig_obj.save(update_fields=['policy_instance_list'])
|
ig_obj.save(update_fields=['policy_instance_list'])
|
||||||
|
|
||||||
|
# sometimes removing an instance has a non-obvious consequence
|
||||||
|
# this is almost always true if policy_instance_percentage or _minimum is non-zero
|
||||||
|
# after removing a single instance, the other memberships need to be re-balanced
|
||||||
|
schedule_policy_task()
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -58,7 +58,6 @@ logger = logging.getLogger('awx.api.views.organization')
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||||
|
|
||||||
model = Organization
|
model = Organization
|
||||||
serializer_class = OrganizationSerializer
|
serializer_class = OrganizationSerializer
|
||||||
|
|
||||||
@@ -70,7 +69,6 @@ class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||||
|
|
||||||
model = Organization
|
model = Organization
|
||||||
serializer_class = OrganizationSerializer
|
serializer_class = OrganizationSerializer
|
||||||
|
|
||||||
@@ -106,7 +104,6 @@ class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPI
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationInventoriesList(SubListAPIView):
|
class OrganizationInventoriesList(SubListAPIView):
|
||||||
|
|
||||||
model = Inventory
|
model = Inventory
|
||||||
serializer_class = InventorySerializer
|
serializer_class = InventorySerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -114,7 +111,6 @@ class OrganizationInventoriesList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationUsersList(BaseUsersList):
|
class OrganizationUsersList(BaseUsersList):
|
||||||
|
|
||||||
model = User
|
model = User
|
||||||
serializer_class = UserSerializer
|
serializer_class = UserSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -123,7 +119,6 @@ class OrganizationUsersList(BaseUsersList):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationAdminsList(BaseUsersList):
|
class OrganizationAdminsList(BaseUsersList):
|
||||||
|
|
||||||
model = User
|
model = User
|
||||||
serializer_class = UserSerializer
|
serializer_class = UserSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -132,7 +127,6 @@ class OrganizationAdminsList(BaseUsersList):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationProjectsList(SubListCreateAPIView):
|
class OrganizationProjectsList(SubListCreateAPIView):
|
||||||
|
|
||||||
model = Project
|
model = Project
|
||||||
serializer_class = ProjectSerializer
|
serializer_class = ProjectSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -140,7 +134,6 @@ class OrganizationProjectsList(SubListCreateAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
|
class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
|
||||||
|
|
||||||
model = ExecutionEnvironment
|
model = ExecutionEnvironment
|
||||||
serializer_class = ExecutionEnvironmentSerializer
|
serializer_class = ExecutionEnvironmentSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -150,7 +143,6 @@ class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationJobTemplatesList(SubListCreateAPIView):
|
class OrganizationJobTemplatesList(SubListCreateAPIView):
|
||||||
|
|
||||||
model = JobTemplate
|
model = JobTemplate
|
||||||
serializer_class = JobTemplateSerializer
|
serializer_class = JobTemplateSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -158,7 +150,6 @@ class OrganizationJobTemplatesList(SubListCreateAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
||||||
|
|
||||||
model = WorkflowJobTemplate
|
model = WorkflowJobTemplate
|
||||||
serializer_class = WorkflowJobTemplateSerializer
|
serializer_class = WorkflowJobTemplateSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -166,7 +157,6 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||||
|
|
||||||
model = Team
|
model = Team
|
||||||
serializer_class = TeamSerializer
|
serializer_class = TeamSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -175,7 +165,6 @@ class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationActivityStreamList(SubListAPIView):
|
class OrganizationActivityStreamList(SubListAPIView):
|
||||||
|
|
||||||
model = ActivityStream
|
model = ActivityStream
|
||||||
serializer_class = ActivityStreamSerializer
|
serializer_class = ActivityStreamSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -184,7 +173,6 @@ class OrganizationActivityStreamList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
||||||
|
|
||||||
model = NotificationTemplate
|
model = NotificationTemplate
|
||||||
serializer_class = NotificationTemplateSerializer
|
serializer_class = NotificationTemplateSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -193,34 +181,28 @@ class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
|
class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
|
||||||
|
|
||||||
model = NotificationTemplate
|
model = NotificationTemplate
|
||||||
serializer_class = NotificationTemplateSerializer
|
serializer_class = NotificationTemplateSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesStartedList(OrganizationNotificationTemplatesAnyList):
|
class OrganizationNotificationTemplatesStartedList(OrganizationNotificationTemplatesAnyList):
|
||||||
|
|
||||||
relationship = 'notification_templates_started'
|
relationship = 'notification_templates_started'
|
||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesErrorList(OrganizationNotificationTemplatesAnyList):
|
class OrganizationNotificationTemplatesErrorList(OrganizationNotificationTemplatesAnyList):
|
||||||
|
|
||||||
relationship = 'notification_templates_error'
|
relationship = 'notification_templates_error'
|
||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesSuccessList(OrganizationNotificationTemplatesAnyList):
|
class OrganizationNotificationTemplatesSuccessList(OrganizationNotificationTemplatesAnyList):
|
||||||
|
|
||||||
relationship = 'notification_templates_success'
|
relationship = 'notification_templates_success'
|
||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesApprovalList(OrganizationNotificationTemplatesAnyList):
|
class OrganizationNotificationTemplatesApprovalList(OrganizationNotificationTemplatesAnyList):
|
||||||
|
|
||||||
relationship = 'notification_templates_approvals'
|
relationship = 'notification_templates_approvals'
|
||||||
|
|
||||||
|
|
||||||
class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
||||||
|
|
||||||
model = InstanceGroup
|
model = InstanceGroup
|
||||||
serializer_class = InstanceGroupSerializer
|
serializer_class = InstanceGroupSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -228,7 +210,6 @@ class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
||||||
|
|
||||||
model = Credential
|
model = Credential
|
||||||
serializer_class = CredentialSerializer
|
serializer_class = CredentialSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -240,13 +221,11 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationAccessList(ResourceAccessList):
|
class OrganizationAccessList(ResourceAccessList):
|
||||||
|
|
||||||
model = User # needs to be User for AccessLists's
|
model = User # needs to be User for AccessLists's
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
|
|
||||||
|
|
||||||
class OrganizationObjectRolesList(SubListAPIView):
|
class OrganizationObjectRolesList(SubListAPIView):
|
||||||
|
|
||||||
model = Role
|
model = Role
|
||||||
serializer_class = RoleSerializer
|
serializer_class = RoleSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
|
|||||||
@@ -36,7 +36,6 @@ logger = logging.getLogger('awx.api.views.root')
|
|||||||
|
|
||||||
|
|
||||||
class ApiRootView(APIView):
|
class ApiRootView(APIView):
|
||||||
|
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
name = _('REST API')
|
name = _('REST API')
|
||||||
versioning_class = None
|
versioning_class = None
|
||||||
@@ -59,7 +58,6 @@ class ApiRootView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class ApiOAuthAuthorizationRootView(APIView):
|
class ApiOAuthAuthorizationRootView(APIView):
|
||||||
|
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
name = _("API OAuth 2 Authorization Root")
|
name = _("API OAuth 2 Authorization Root")
|
||||||
versioning_class = None
|
versioning_class = None
|
||||||
@@ -74,7 +72,6 @@ class ApiOAuthAuthorizationRootView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class ApiVersionRootView(APIView):
|
class ApiVersionRootView(APIView):
|
||||||
|
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
swagger_topic = 'Versioning'
|
swagger_topic = 'Versioning'
|
||||||
|
|
||||||
@@ -101,6 +98,7 @@ class ApiVersionRootView(APIView):
|
|||||||
data['tokens'] = reverse('api:o_auth2_token_list', request=request)
|
data['tokens'] = reverse('api:o_auth2_token_list', request=request)
|
||||||
data['metrics'] = reverse('api:metrics_view', request=request)
|
data['metrics'] = reverse('api:metrics_view', request=request)
|
||||||
data['inventory'] = reverse('api:inventory_list', request=request)
|
data['inventory'] = reverse('api:inventory_list', request=request)
|
||||||
|
data['constructed_inventory'] = reverse('api:constructed_inventory_list', request=request)
|
||||||
data['inventory_sources'] = reverse('api:inventory_source_list', request=request)
|
data['inventory_sources'] = reverse('api:inventory_source_list', request=request)
|
||||||
data['inventory_updates'] = reverse('api:inventory_update_list', request=request)
|
data['inventory_updates'] = reverse('api:inventory_update_list', request=request)
|
||||||
data['groups'] = reverse('api:group_list', request=request)
|
data['groups'] = reverse('api:group_list', request=request)
|
||||||
@@ -172,7 +170,6 @@ class ApiV2PingView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class ApiV2SubscriptionView(APIView):
|
class ApiV2SubscriptionView(APIView):
|
||||||
|
|
||||||
permission_classes = (IsAuthenticated,)
|
permission_classes = (IsAuthenticated,)
|
||||||
name = _('Subscriptions')
|
name = _('Subscriptions')
|
||||||
swagger_topic = 'System Configuration'
|
swagger_topic = 'System Configuration'
|
||||||
@@ -212,7 +209,6 @@ class ApiV2SubscriptionView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class ApiV2AttachView(APIView):
|
class ApiV2AttachView(APIView):
|
||||||
|
|
||||||
permission_classes = (IsAuthenticated,)
|
permission_classes = (IsAuthenticated,)
|
||||||
name = _('Attach Subscription')
|
name = _('Attach Subscription')
|
||||||
swagger_topic = 'System Configuration'
|
swagger_topic = 'System Configuration'
|
||||||
@@ -230,7 +226,6 @@ class ApiV2AttachView(APIView):
|
|||||||
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||||
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||||
if pool_id and user and pw:
|
if pool_id and user and pw:
|
||||||
|
|
||||||
data = request.data.copy()
|
data = request.data.copy()
|
||||||
try:
|
try:
|
||||||
with set_environ(**settings.AWX_TASK_ENV):
|
with set_environ(**settings.AWX_TASK_ENV):
|
||||||
@@ -258,7 +253,6 @@ class ApiV2AttachView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class ApiV2ConfigView(APIView):
|
class ApiV2ConfigView(APIView):
|
||||||
|
|
||||||
permission_classes = (IsAuthenticated,)
|
permission_classes = (IsAuthenticated,)
|
||||||
name = _('Configuration')
|
name = _('Configuration')
|
||||||
swagger_topic = 'System Configuration'
|
swagger_topic = 'System Configuration'
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
|
|
||||||
|
|
||||||
class ConfConfig(AppConfig):
|
class ConfConfig(AppConfig):
|
||||||
|
|
||||||
name = 'awx.conf'
|
name = 'awx.conf'
|
||||||
verbose_name = _('Configuration')
|
verbose_name = _('Configuration')
|
||||||
|
|
||||||
@@ -16,7 +15,6 @@ class ConfConfig(AppConfig):
|
|||||||
self.module.autodiscover()
|
self.module.autodiscover()
|
||||||
|
|
||||||
if not set(sys.argv) & {'migrate', 'check_migrations'}:
|
if not set(sys.argv) & {'migrate', 'check_migrations'}:
|
||||||
|
|
||||||
from .settings import SettingsWrapper
|
from .settings import SettingsWrapper
|
||||||
|
|
||||||
SettingsWrapper.initialize()
|
SettingsWrapper.initialize()
|
||||||
|
|||||||
@@ -47,7 +47,6 @@ class IntegerField(IntegerField):
|
|||||||
|
|
||||||
|
|
||||||
class StringListField(ListField):
|
class StringListField(ListField):
|
||||||
|
|
||||||
child = CharField()
|
child = CharField()
|
||||||
|
|
||||||
def to_representation(self, value):
|
def to_representation(self, value):
|
||||||
@@ -57,7 +56,6 @@ class StringListField(ListField):
|
|||||||
|
|
||||||
|
|
||||||
class StringListBooleanField(ListField):
|
class StringListBooleanField(ListField):
|
||||||
|
|
||||||
default_error_messages = {'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.')}
|
default_error_messages = {'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.')}
|
||||||
child = CharField()
|
child = CharField()
|
||||||
|
|
||||||
@@ -96,7 +94,6 @@ class StringListBooleanField(ListField):
|
|||||||
|
|
||||||
|
|
||||||
class StringListPathField(StringListField):
|
class StringListPathField(StringListField):
|
||||||
|
|
||||||
default_error_messages = {'type_error': _('Expected list of strings but got {input_type} instead.'), 'path_error': _('{path} is not a valid path choice.')}
|
default_error_messages = {'type_error': _('Expected list of strings but got {input_type} instead.'), 'path_error': _('{path} is not a valid path choice.')}
|
||||||
|
|
||||||
def to_internal_value(self, paths):
|
def to_internal_value(self, paths):
|
||||||
@@ -126,7 +123,6 @@ class StringListIsolatedPathField(StringListField):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def to_internal_value(self, paths):
|
def to_internal_value(self, paths):
|
||||||
|
|
||||||
if isinstance(paths, (list, tuple)):
|
if isinstance(paths, (list, tuple)):
|
||||||
for p in paths:
|
for p in paths:
|
||||||
if not isinstance(p, str):
|
if not isinstance(p, str):
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ import awx.main.fields
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
|
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
|
|||||||
@@ -48,7 +48,6 @@ def revert_tower_settings(apps, schema_editor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0001_initial'), ('main', '0004_squashed_v310_release')]
|
dependencies = [('conf', '0001_initial'), ('main', '0004_squashed_v310_release')]
|
||||||
|
|
||||||
run_before = [('main', '0005_squashed_v310_v313_updates')]
|
run_before = [('main', '0005_squashed_v310_v313_updates')]
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ import awx.main.fields
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0002_v310_copy_tower_settings')]
|
dependencies = [('conf', '0002_v310_copy_tower_settings')]
|
||||||
|
|
||||||
operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONBlob(null=True))]
|
operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONBlob(null=True))]
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ from django.db import migrations
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0003_v310_JSONField_changes')]
|
dependencies = [('conf', '0003_v310_JSONField_changes')]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ def reverse_copy_session_settings(apps, schema_editor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0004_v320_reencrypt')]
|
dependencies = [('conf', '0004_v320_reencrypt')]
|
||||||
|
|
||||||
operations = [migrations.RunPython(copy_session_settings, reverse_copy_session_settings)]
|
operations = [migrations.RunPython(copy_session_settings, reverse_copy_session_settings)]
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ from django.db import migrations
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0005_v330_rename_two_session_settings')]
|
dependencies = [('conf', '0005_v330_rename_two_session_settings')]
|
||||||
|
|
||||||
operations = [migrations.RunPython(fill_ldap_group_type_params)]
|
operations = [migrations.RunPython(fill_ldap_group_type_params)]
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ def copy_allowed_ips(apps, schema_editor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0006_v331_ldap_group_type')]
|
dependencies = [('conf', '0006_v331_ldap_group_type')]
|
||||||
|
|
||||||
operations = [migrations.RunPython(copy_allowed_ips)]
|
operations = [migrations.RunPython(copy_allowed_ips)]
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ def _noop(apps, schema_editor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0007_v380_rename_more_settings')]
|
dependencies = [('conf', '0007_v380_rename_more_settings')]
|
||||||
|
|
||||||
operations = [migrations.RunPython(clear_old_license, _noop), migrations.RunPython(prefill_rh_credentials, _noop)]
|
operations = [migrations.RunPython(clear_old_license, _noop), migrations.RunPython(prefill_rh_credentials, _noop)]
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ def rename_proot_settings(apps, schema_editor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0008_subscriptions')]
|
dependencies = [('conf', '0008_subscriptions')]
|
||||||
|
|
||||||
operations = [migrations.RunPython(rename_proot_settings)]
|
operations = [migrations.RunPython(rename_proot_settings)]
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ __all__ = ['rename_setting']
|
|||||||
|
|
||||||
|
|
||||||
def rename_setting(apps, schema_editor, old_key, new_key):
|
def rename_setting(apps, schema_editor, old_key, new_key):
|
||||||
|
|
||||||
old_setting = None
|
old_setting = None
|
||||||
Setting = apps.get_model('conf', 'Setting')
|
Setting = apps.get_model('conf', 'Setting')
|
||||||
if Setting.objects.filter(key=new_key).exists() or hasattr(settings, new_key):
|
if Setting.objects.filter(key=new_key).exists() or hasattr(settings, new_key):
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ __all__ = ['Setting']
|
|||||||
|
|
||||||
|
|
||||||
class Setting(CreatedModifiedModel):
|
class Setting(CreatedModifiedModel):
|
||||||
|
|
||||||
key = models.CharField(max_length=255)
|
key = models.CharField(max_length=255)
|
||||||
value = JSONBlob(null=True)
|
value = JSONBlob(null=True)
|
||||||
user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE))
|
user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE))
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ def _ctit_db_wrapper(trans_safe=False):
|
|||||||
yield
|
yield
|
||||||
except DBError as exc:
|
except DBError as exc:
|
||||||
if trans_safe:
|
if trans_safe:
|
||||||
level = logger.exception
|
level = logger.warning
|
||||||
if isinstance(exc, ProgrammingError):
|
if isinstance(exc, ProgrammingError):
|
||||||
if 'relation' in str(exc) and 'does not exist' in str(exc):
|
if 'relation' in str(exc) and 'does not exist' in str(exc):
|
||||||
# this generally means we can't fetch Tower configuration
|
# this generally means we can't fetch Tower configuration
|
||||||
@@ -89,7 +89,7 @@ def _ctit_db_wrapper(trans_safe=False):
|
|||||||
# has come up *before* the database has finished migrating, and
|
# has come up *before* the database has finished migrating, and
|
||||||
# especially that the conf.settings table doesn't exist yet
|
# especially that the conf.settings table doesn't exist yet
|
||||||
level = logger.debug
|
level = logger.debug
|
||||||
level('Database settings are not available, using defaults.')
|
level(f'Database settings are not available, using defaults. error: {str(exc)}')
|
||||||
else:
|
else:
|
||||||
logger.exception('Error modifying something related to database settings.')
|
logger.exception('Error modifying something related to database settings.')
|
||||||
finally:
|
finally:
|
||||||
@@ -104,7 +104,6 @@ def filter_sensitive(registry, key, value):
|
|||||||
|
|
||||||
|
|
||||||
class TransientSetting(object):
|
class TransientSetting(object):
|
||||||
|
|
||||||
__slots__ = ('pk', 'value')
|
__slots__ = ('pk', 'value')
|
||||||
|
|
||||||
def __init__(self, pk, value):
|
def __init__(self, pk, value):
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ from awx.conf.fields import StringListBooleanField, StringListPathField, ListTup
|
|||||||
|
|
||||||
|
|
||||||
class TestStringListBooleanField:
|
class TestStringListBooleanField:
|
||||||
|
|
||||||
FIELD_VALUES = [
|
FIELD_VALUES = [
|
||||||
("hello", "hello"),
|
("hello", "hello"),
|
||||||
(("a", "b"), ["a", "b"]),
|
(("a", "b"), ["a", "b"]),
|
||||||
@@ -53,7 +52,6 @@ class TestStringListBooleanField:
|
|||||||
|
|
||||||
|
|
||||||
class TestListTuplesField:
|
class TestListTuplesField:
|
||||||
|
|
||||||
FIELD_VALUES = [([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")])]
|
FIELD_VALUES = [([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")])]
|
||||||
|
|
||||||
FIELD_VALUES_INVALID = [("abc", type("abc")), ([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))), (['a', 'b'], type('a')), (123, type(123))]
|
FIELD_VALUES_INVALID = [("abc", type("abc")), ([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))), (['a', 'b'], type('a')), (123, type(123))]
|
||||||
@@ -73,7 +71,6 @@ class TestListTuplesField:
|
|||||||
|
|
||||||
|
|
||||||
class TestStringListPathField:
|
class TestStringListPathField:
|
||||||
|
|
||||||
FIELD_VALUES = [
|
FIELD_VALUES = [
|
||||||
((".", "..", "/"), [".", "..", "/"]),
|
((".", "..", "/"), [".", "..", "/"]),
|
||||||
(("/home",), ["/home"]),
|
(("/home",), ["/home"]),
|
||||||
|
|||||||
@@ -36,7 +36,6 @@ SettingCategory = collections.namedtuple('SettingCategory', ('url', 'slug', 'nam
|
|||||||
|
|
||||||
|
|
||||||
class SettingCategoryList(ListAPIView):
|
class SettingCategoryList(ListAPIView):
|
||||||
|
|
||||||
model = Setting # Not exactly, but needed for the view.
|
model = Setting # Not exactly, but needed for the view.
|
||||||
serializer_class = SettingCategorySerializer
|
serializer_class = SettingCategorySerializer
|
||||||
filter_backends = []
|
filter_backends = []
|
||||||
@@ -58,7 +57,6 @@ class SettingCategoryList(ListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||||
|
|
||||||
model = Setting # Not exactly, but needed for the view.
|
model = Setting # Not exactly, but needed for the view.
|
||||||
serializer_class = SettingSingletonSerializer
|
serializer_class = SettingSingletonSerializer
|
||||||
filter_backends = []
|
filter_backends = []
|
||||||
@@ -146,7 +144,6 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class SettingLoggingTest(GenericAPIView):
|
class SettingLoggingTest(GenericAPIView):
|
||||||
|
|
||||||
name = _('Logging Connectivity Test')
|
name = _('Logging Connectivity Test')
|
||||||
model = Setting
|
model = Setting
|
||||||
serializer_class = SettingSingletonSerializer
|
serializer_class = SettingSingletonSerializer
|
||||||
|
|||||||
@@ -6237,4 +6237,5 @@ msgstr "%s se está actualizando."
|
|||||||
|
|
||||||
#: awx/ui/urls.py:24
|
#: awx/ui/urls.py:24
|
||||||
msgid "This page will refresh when complete."
|
msgid "This page will refresh when complete."
|
||||||
msgstr "Esta página se actualizará cuando se complete."
|
msgstr "Esta página se actualizará cuando se complete."
|
||||||
|
|
||||||
|
|||||||
@@ -721,7 +721,7 @@ msgstr "DTSTART valide obligatoire dans rrule. La valeur doit commencer par : DT
|
|||||||
#: awx/api/serializers.py:4657
|
#: awx/api/serializers.py:4657
|
||||||
msgid ""
|
msgid ""
|
||||||
"DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ."
|
"DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ."
|
||||||
msgstr "DTSTART ne peut correspondre à une DateHeure naïve. Spécifier ;TZINFO= ou YYYYMMDDTHHMMSSZZ."
|
msgstr "DTSTART ne peut correspondre à une date-heure naïve. Spécifier ;TZINFO= ou YYYYMMDDTHHMMSSZZ."
|
||||||
|
|
||||||
#: awx/api/serializers.py:4659
|
#: awx/api/serializers.py:4659
|
||||||
msgid "Multiple DTSTART is not supported."
|
msgid "Multiple DTSTART is not supported."
|
||||||
@@ -6239,4 +6239,5 @@ msgstr "%s est en cours de mise à niveau."
|
|||||||
|
|
||||||
#: awx/ui/urls.py:24
|
#: awx/ui/urls.py:24
|
||||||
msgid "This page will refresh when complete."
|
msgid "This page will refresh when complete."
|
||||||
msgstr "Cette page sera rafraîchie une fois terminée."
|
msgstr "Cette page sera rafraîchie une fois terminée."
|
||||||
|
|
||||||
|
|||||||
@@ -6237,4 +6237,5 @@ msgstr "Er wordt momenteel een upgrade van%s geïnstalleerd."
|
|||||||
|
|
||||||
#: awx/ui/urls.py:24
|
#: awx/ui/urls.py:24
|
||||||
msgid "This page will refresh when complete."
|
msgid "This page will refresh when complete."
|
||||||
msgstr "Deze pagina wordt vernieuwd als hij klaar is."
|
msgstr "Deze pagina wordt vernieuwd als hij klaar is."
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ from django.conf import settings
|
|||||||
from django.db.models import Q, Prefetch
|
from django.db.models import Q, Prefetch
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.core.exceptions import ObjectDoesNotExist
|
from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
|
||||||
|
|
||||||
# Django REST Framework
|
# Django REST Framework
|
||||||
from rest_framework.exceptions import ParseError, PermissionDenied
|
from rest_framework.exceptions import ParseError, PermissionDenied
|
||||||
@@ -281,13 +281,23 @@ class BaseAccess(object):
|
|||||||
"""
|
"""
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def assure_relationship_exists(self, obj, relationship):
|
||||||
|
if '.' in relationship:
|
||||||
|
return # not attempting validation for complex relationships now
|
||||||
|
try:
|
||||||
|
obj._meta.get_field(relationship)
|
||||||
|
except FieldDoesNotExist:
|
||||||
|
raise NotImplementedError(f'The relationship {relationship} does not exist for model {type(obj)}')
|
||||||
|
|
||||||
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
||||||
|
self.assure_relationship_exists(obj, relationship)
|
||||||
if skip_sub_obj_read_check:
|
if skip_sub_obj_read_check:
|
||||||
return self.can_change(obj, None)
|
return self.can_change(obj, None)
|
||||||
else:
|
else:
|
||||||
return bool(self.can_change(obj, None) and self.user.can_access(type(sub_obj), 'read', sub_obj))
|
return bool(self.can_change(obj, None) and self.user.can_access(type(sub_obj), 'read', sub_obj))
|
||||||
|
|
||||||
def can_unattach(self, obj, sub_obj, relationship, data=None):
|
def can_unattach(self, obj, sub_obj, relationship, data=None):
|
||||||
|
self.assure_relationship_exists(obj, relationship)
|
||||||
return self.can_change(obj, data)
|
return self.can_change(obj, data)
|
||||||
|
|
||||||
def check_related(self, field, Model, data, role_field='admin_role', obj=None, mandatory=False):
|
def check_related(self, field, Model, data, role_field='admin_role', obj=None, mandatory=False):
|
||||||
@@ -328,6 +338,8 @@ class BaseAccess(object):
|
|||||||
role = getattr(resource, role_field, None)
|
role = getattr(resource, role_field, None)
|
||||||
if role is None:
|
if role is None:
|
||||||
# Handle special case where resource does not have direct roles
|
# Handle special case where resource does not have direct roles
|
||||||
|
if role_field == 'read_role':
|
||||||
|
return self.user.can_access(type(resource), 'read', resource)
|
||||||
access_method_type = {'admin_role': 'change', 'execute_role': 'start'}[role_field]
|
access_method_type = {'admin_role': 'change', 'execute_role': 'start'}[role_field]
|
||||||
return self.user.can_access(type(resource), access_method_type, resource, None)
|
return self.user.can_access(type(resource), access_method_type, resource, None)
|
||||||
return self.user in role
|
return self.user in role
|
||||||
@@ -499,6 +511,21 @@ class BaseAccess(object):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class UnifiedCredentialsMixin(BaseAccess):
|
||||||
|
"""
|
||||||
|
The credentials many-to-many is a standard relationship for JT, jobs, and others
|
||||||
|
Permission to attach is always use permission, and permission to unattach is admin to the parent object
|
||||||
|
"""
|
||||||
|
|
||||||
|
@check_superuser
|
||||||
|
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
||||||
|
if relationship == 'credentials':
|
||||||
|
if not isinstance(sub_obj, Credential):
|
||||||
|
raise RuntimeError(f'Can only attach credentials to credentials relationship, got {type(sub_obj)}')
|
||||||
|
return self.can_change(obj, None) and (self.user in sub_obj.use_role)
|
||||||
|
return super().can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
||||||
|
|
||||||
|
|
||||||
class NotificationAttachMixin(BaseAccess):
|
class NotificationAttachMixin(BaseAccess):
|
||||||
"""For models that can have notifications attached
|
"""For models that can have notifications attached
|
||||||
|
|
||||||
@@ -534,7 +561,6 @@ class NotificationAttachMixin(BaseAccess):
|
|||||||
|
|
||||||
|
|
||||||
class InstanceAccess(BaseAccess):
|
class InstanceAccess(BaseAccess):
|
||||||
|
|
||||||
model = Instance
|
model = Instance
|
||||||
prefetch_related = ('rampart_groups',)
|
prefetch_related = ('rampart_groups',)
|
||||||
|
|
||||||
@@ -552,7 +578,7 @@ class InstanceAccess(BaseAccess):
|
|||||||
return super(InstanceAccess, self).can_unattach(obj, sub_obj, relationship, relationship, data=data)
|
return super(InstanceAccess, self).can_unattach(obj, sub_obj, relationship, relationship, data=data)
|
||||||
|
|
||||||
def can_add(self, data):
|
def can_add(self, data):
|
||||||
return False
|
return self.user.is_superuser
|
||||||
|
|
||||||
def can_change(self, obj, data):
|
def can_change(self, obj, data):
|
||||||
return False
|
return False
|
||||||
@@ -562,7 +588,6 @@ class InstanceAccess(BaseAccess):
|
|||||||
|
|
||||||
|
|
||||||
class InstanceGroupAccess(BaseAccess):
|
class InstanceGroupAccess(BaseAccess):
|
||||||
|
|
||||||
model = InstanceGroup
|
model = InstanceGroup
|
||||||
prefetch_related = ('instances',)
|
prefetch_related = ('instances',)
|
||||||
|
|
||||||
@@ -965,9 +990,6 @@ class HostAccess(BaseAccess):
|
|||||||
if data and 'name' in data:
|
if data and 'name' in data:
|
||||||
self.check_license(add_host_name=data['name'])
|
self.check_license(add_host_name=data['name'])
|
||||||
|
|
||||||
# Check the per-org limit
|
|
||||||
self.check_org_host_limit({'inventory': obj.inventory}, add_host_name=data['name'])
|
|
||||||
|
|
||||||
# Checks for admin or change permission on inventory, controls whether
|
# Checks for admin or change permission on inventory, controls whether
|
||||||
# the user can edit variable data.
|
# the user can edit variable data.
|
||||||
return obj and self.user in obj.inventory.admin_role
|
return obj and self.user in obj.inventory.admin_role
|
||||||
@@ -1005,7 +1027,9 @@ class GroupAccess(BaseAccess):
|
|||||||
return Group.objects.filter(inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role'))
|
return Group.objects.filter(inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role'))
|
||||||
|
|
||||||
def can_add(self, data):
|
def can_add(self, data):
|
||||||
if not data or 'inventory' not in data:
|
if not data: # So the browseable API will work
|
||||||
|
return Inventory.accessible_objects(self.user, 'admin_role').exists()
|
||||||
|
if 'inventory' not in data:
|
||||||
return False
|
return False
|
||||||
# Checks for admin or change permission on inventory.
|
# Checks for admin or change permission on inventory.
|
||||||
return self.check_related('inventory', Inventory, data)
|
return self.check_related('inventory', Inventory, data)
|
||||||
@@ -1031,7 +1055,7 @@ class GroupAccess(BaseAccess):
|
|||||||
return bool(obj and self.user in obj.inventory.admin_role)
|
return bool(obj and self.user in obj.inventory.admin_role)
|
||||||
|
|
||||||
|
|
||||||
class InventorySourceAccess(NotificationAttachMixin, BaseAccess):
|
class InventorySourceAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can see inventory sources whenever I can see their inventory.
|
I can see inventory sources whenever I can see their inventory.
|
||||||
I can change inventory sources whenever I can change their inventory.
|
I can change inventory sources whenever I can change their inventory.
|
||||||
@@ -1075,18 +1099,6 @@ class InventorySourceAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
return self.user in obj.inventory.update_role
|
return self.user in obj.inventory.update_role
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@check_superuser
|
|
||||||
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
|
||||||
if relationship == 'credentials' and isinstance(sub_obj, Credential):
|
|
||||||
return obj and obj.inventory and self.user in obj.inventory.admin_role and self.user in sub_obj.use_role
|
|
||||||
return super(InventorySourceAccess, self).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
|
||||||
|
|
||||||
@check_superuser
|
|
||||||
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
|
|
||||||
if relationship == 'credentials' and isinstance(sub_obj, Credential):
|
|
||||||
return obj and obj.inventory and self.user in obj.inventory.admin_role
|
|
||||||
return super(InventorySourceAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class InventoryUpdateAccess(BaseAccess):
|
class InventoryUpdateAccess(BaseAccess):
|
||||||
"""
|
"""
|
||||||
@@ -1485,7 +1497,7 @@ class ProjectUpdateAccess(BaseAccess):
|
|||||||
return obj and self.user in obj.project.admin_role
|
return obj and self.user in obj.project.admin_role
|
||||||
|
|
||||||
|
|
||||||
class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can see job templates when:
|
I can see job templates when:
|
||||||
- I have read role for the job template.
|
- I have read role for the job template.
|
||||||
@@ -1549,8 +1561,7 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
if self.user not in inventory.use_role:
|
if self.user not in inventory.use_role:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
ee = get_value(ExecutionEnvironment, 'execution_environment')
|
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
||||||
if ee and not self.user.can_access(ExecutionEnvironment, 'read', ee):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
project = get_value(Project, 'project')
|
project = get_value(Project, 'project')
|
||||||
@@ -1600,10 +1611,8 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
if self.changes_are_non_sensitive(obj, data):
|
if self.changes_are_non_sensitive(obj, data):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if data.get('execution_environment'):
|
if not self.check_related('execution_environment', ExecutionEnvironment, data, obj=obj, role_field='read_role'):
|
||||||
ee = get_object_from_data('execution_environment', ExecutionEnvironment, data)
|
return False
|
||||||
if not self.user.can_access(ExecutionEnvironment, 'read', ee):
|
|
||||||
return False
|
|
||||||
|
|
||||||
for required_field, cls in (('inventory', Inventory), ('project', Project)):
|
for required_field, cls in (('inventory', Inventory), ('project', Project)):
|
||||||
is_mandatory = True
|
is_mandatory = True
|
||||||
@@ -1667,17 +1676,13 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
if not obj.organization:
|
if not obj.organization:
|
||||||
return False
|
return False
|
||||||
return self.user.can_access(type(sub_obj), "read", sub_obj) and self.user in obj.organization.admin_role
|
return self.user.can_access(type(sub_obj), "read", sub_obj) and self.user in obj.organization.admin_role
|
||||||
if relationship == 'credentials' and isinstance(sub_obj, Credential):
|
|
||||||
return self.user in obj.admin_role and self.user in sub_obj.use_role
|
|
||||||
return super(JobTemplateAccess, self).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
return super(JobTemplateAccess, self).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
|
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||||
if relationship == "instance_groups":
|
if relationship == "instance_groups":
|
||||||
return self.can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
return self.can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
||||||
if relationship == 'credentials' and isinstance(sub_obj, Credential):
|
return super(JobTemplateAccess, self).can_unattach(obj, sub_obj, relationship, *args, **kwargs)
|
||||||
return self.user in obj.admin_role
|
|
||||||
return super(JobTemplateAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class JobAccess(BaseAccess):
|
class JobAccess(BaseAccess):
|
||||||
@@ -1824,7 +1829,7 @@ class SystemJobAccess(BaseAccess):
|
|||||||
return False # no relaunching of system jobs
|
return False # no relaunching of system jobs
|
||||||
|
|
||||||
|
|
||||||
class JobLaunchConfigAccess(BaseAccess):
|
class JobLaunchConfigAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||||
"""
|
"""
|
||||||
Launch configs must have permissions checked for
|
Launch configs must have permissions checked for
|
||||||
- relaunching
|
- relaunching
|
||||||
@@ -1832,63 +1837,69 @@ class JobLaunchConfigAccess(BaseAccess):
|
|||||||
|
|
||||||
In order to create a new object with a copy of this launch config, I need:
|
In order to create a new object with a copy of this launch config, I need:
|
||||||
- use access to related inventory (if present)
|
- use access to related inventory (if present)
|
||||||
|
- read access to Execution Environment (if present), unless the specified ee is already in the template
|
||||||
- use role to many-related credentials (if any present)
|
- use role to many-related credentials (if any present)
|
||||||
|
- read access to many-related labels (if any present), unless the specified label is already in the template
|
||||||
|
- read access to many-related instance groups (if any present), unless the specified instance group is already in the template
|
||||||
"""
|
"""
|
||||||
|
|
||||||
model = JobLaunchConfig
|
model = JobLaunchConfig
|
||||||
select_related = 'job'
|
select_related = 'job'
|
||||||
prefetch_related = ('credentials', 'inventory')
|
prefetch_related = ('credentials', 'inventory')
|
||||||
|
|
||||||
def _unusable_creds_exist(self, qs):
|
M2M_CHECKS = {'credentials': Credential, 'labels': Label, 'instance_groups': InstanceGroup}
|
||||||
return qs.exclude(pk__in=Credential._accessible_pk_qs(Credential, self.user, 'use_role')).exists()
|
|
||||||
|
|
||||||
def has_credentials_access(self, obj):
|
def _related_filtered_queryset(self, cls):
|
||||||
# user has access if no related credentials exist that the user lacks use role for
|
if cls is Label:
|
||||||
return not self._unusable_creds_exist(obj.credentials)
|
return LabelAccess(self.user).filtered_queryset()
|
||||||
|
elif cls is InstanceGroup:
|
||||||
|
return InstanceGroupAccess(self.user).filtered_queryset()
|
||||||
|
else:
|
||||||
|
return cls._accessible_pk_qs(cls, self.user, 'use_role')
|
||||||
|
|
||||||
|
def has_obj_m2m_access(self, obj):
|
||||||
|
for relationship, cls in self.M2M_CHECKS.items():
|
||||||
|
if getattr(obj, relationship).exclude(pk__in=self._related_filtered_queryset(cls)).exists():
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_add(self, data, template=None):
|
def can_add(self, data, template=None):
|
||||||
# This is a special case, we don't check related many-to-many elsewhere
|
# This is a special case, we don't check related many-to-many elsewhere
|
||||||
# launch RBAC checks use this
|
# launch RBAC checks use this
|
||||||
if 'credentials' in data and data['credentials'] or 'reference_obj' in data:
|
if 'reference_obj' in data:
|
||||||
if 'reference_obj' in data:
|
if not self.has_obj_m2m_access(data['reference_obj']):
|
||||||
prompted_cred_qs = data['reference_obj'].credentials.all()
|
|
||||||
else:
|
|
||||||
# If given model objects, only use the primary key from them
|
|
||||||
cred_pks = [cred.pk for cred in data['credentials']]
|
|
||||||
if template:
|
|
||||||
for cred in template.credentials.all():
|
|
||||||
if cred.pk in cred_pks:
|
|
||||||
cred_pks.remove(cred.pk)
|
|
||||||
prompted_cred_qs = Credential.objects.filter(pk__in=cred_pks)
|
|
||||||
if self._unusable_creds_exist(prompted_cred_qs):
|
|
||||||
return False
|
return False
|
||||||
return self.check_related('inventory', Inventory, data, role_field='use_role')
|
else:
|
||||||
|
for relationship, cls in self.M2M_CHECKS.items():
|
||||||
|
if relationship in data and data[relationship]:
|
||||||
|
# If given model objects, only use the primary key from them
|
||||||
|
sub_obj_pks = [sub_obj.pk for sub_obj in data[relationship]]
|
||||||
|
if template:
|
||||||
|
for sub_obj in getattr(template, relationship).all():
|
||||||
|
if sub_obj.pk in sub_obj_pks:
|
||||||
|
sub_obj_pks.remove(sub_obj.pk)
|
||||||
|
if cls.objects.filter(pk__in=sub_obj_pks).exclude(pk__in=self._related_filtered_queryset(cls)).exists():
|
||||||
|
return False
|
||||||
|
return self.check_related('inventory', Inventory, data, role_field='use_role') and self.check_related(
|
||||||
|
'execution_environment', ExecutionEnvironment, data, role_field='read_role'
|
||||||
|
)
|
||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_use(self, obj):
|
def can_use(self, obj):
|
||||||
return self.check_related('inventory', Inventory, {}, obj=obj, role_field='use_role', mandatory=True) and self.has_credentials_access(obj)
|
return (
|
||||||
|
self.has_obj_m2m_access(obj)
|
||||||
|
and self.check_related('inventory', Inventory, {}, obj=obj, role_field='use_role', mandatory=True)
|
||||||
|
and self.check_related('execution_environment', ExecutionEnvironment, {}, obj=obj, role_field='read_role')
|
||||||
|
)
|
||||||
|
|
||||||
def can_change(self, obj, data):
|
def can_change(self, obj, data):
|
||||||
return self.check_related('inventory', Inventory, data, obj=obj, role_field='use_role')
|
return self.check_related('inventory', Inventory, data, obj=obj, role_field='use_role') and self.check_related(
|
||||||
|
'execution_environment', ExecutionEnvironment, data, obj=obj, role_field='read_role'
|
||||||
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
)
|
||||||
if isinstance(sub_obj, Credential) and relationship == 'credentials':
|
|
||||||
return self.user in sub_obj.use_role
|
|
||||||
else:
|
|
||||||
raise NotImplementedError('Only credentials can be attached to launch configurations.')
|
|
||||||
|
|
||||||
def can_unattach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
|
||||||
if isinstance(sub_obj, Credential) and relationship == 'credentials':
|
|
||||||
if skip_sub_obj_read_check:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return self.user in sub_obj.read_role
|
|
||||||
else:
|
|
||||||
raise NotImplementedError('Only credentials can be attached to launch configurations.')
|
|
||||||
|
|
||||||
|
|
||||||
class WorkflowJobTemplateNodeAccess(BaseAccess):
|
class WorkflowJobTemplateNodeAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can see/use a WorkflowJobTemplateNode if I have read permission
|
I can see/use a WorkflowJobTemplateNode if I have read permission
|
||||||
to associated Workflow Job Template
|
to associated Workflow Job Template
|
||||||
@@ -1911,7 +1922,7 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
model = WorkflowJobTemplateNode
|
model = WorkflowJobTemplateNode
|
||||||
prefetch_related = ('success_nodes', 'failure_nodes', 'always_nodes', 'unified_job_template', 'credentials', 'workflow_job_template')
|
prefetch_related = ('success_nodes', 'failure_nodes', 'always_nodes', 'unified_job_template', 'workflow_job_template')
|
||||||
|
|
||||||
def filtered_queryset(self):
|
def filtered_queryset(self):
|
||||||
return self.model.objects.filter(workflow_job_template__in=WorkflowJobTemplate.accessible_objects(self.user, 'read_role'))
|
return self.model.objects.filter(workflow_job_template__in=WorkflowJobTemplate.accessible_objects(self.user, 'read_role'))
|
||||||
@@ -1923,7 +1934,8 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
|
|||||||
return (
|
return (
|
||||||
self.check_related('workflow_job_template', WorkflowJobTemplate, data, mandatory=True)
|
self.check_related('workflow_job_template', WorkflowJobTemplate, data, mandatory=True)
|
||||||
and self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role')
|
and self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role')
|
||||||
and JobLaunchConfigAccess(self.user).can_add(data)
|
and self.check_related('inventory', Inventory, data, role_field='use_role')
|
||||||
|
and self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role')
|
||||||
)
|
)
|
||||||
|
|
||||||
def wfjt_admin(self, obj):
|
def wfjt_admin(self, obj):
|
||||||
@@ -1932,17 +1944,14 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
|
|||||||
else:
|
else:
|
||||||
return self.user in obj.workflow_job_template.admin_role
|
return self.user in obj.workflow_job_template.admin_role
|
||||||
|
|
||||||
def ujt_execute(self, obj):
|
def ujt_execute(self, obj, data=None):
|
||||||
if not obj.unified_job_template:
|
if not obj.unified_job_template:
|
||||||
return True
|
return True
|
||||||
return self.check_related('unified_job_template', UnifiedJobTemplate, {}, obj=obj, role_field='execute_role', mandatory=True)
|
return self.check_related('unified_job_template', UnifiedJobTemplate, data, obj=obj, role_field='execute_role', mandatory=True)
|
||||||
|
|
||||||
def can_change(self, obj, data):
|
def can_change(self, obj, data):
|
||||||
if not data:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# should not be able to edit the prompts if lacking access to UJT or WFJT
|
# should not be able to edit the prompts if lacking access to UJT or WFJT
|
||||||
return self.ujt_execute(obj) and self.wfjt_admin(obj) and JobLaunchConfigAccess(self.user).can_change(obj, data)
|
return self.ujt_execute(obj, data=data) and self.wfjt_admin(obj) and JobLaunchConfigAccess(self.user).can_change(obj, data)
|
||||||
|
|
||||||
def can_delete(self, obj):
|
def can_delete(self, obj):
|
||||||
return self.wfjt_admin(obj)
|
return self.wfjt_admin(obj)
|
||||||
@@ -1955,29 +1964,14 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
||||||
if not self.wfjt_admin(obj):
|
if relationship in ('success_nodes', 'failure_nodes', 'always_nodes'):
|
||||||
return False
|
return self.wfjt_admin(obj) and self.check_same_WFJT(obj, sub_obj)
|
||||||
if relationship == 'credentials':
|
return super().can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
||||||
# Need permission to related template to attach a credential
|
|
||||||
if not self.ujt_execute(obj):
|
|
||||||
return False
|
|
||||||
return JobLaunchConfigAccess(self.user).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
|
||||||
elif relationship in ('success_nodes', 'failure_nodes', 'always_nodes'):
|
|
||||||
return self.check_same_WFJT(obj, sub_obj)
|
|
||||||
else:
|
|
||||||
raise NotImplementedError('Relationship {} not understood for WFJT nodes.'.format(relationship))
|
|
||||||
|
|
||||||
def can_unattach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
def can_unattach(self, obj, sub_obj, relationship, data=None):
|
||||||
if not self.wfjt_admin(obj):
|
if relationship in ('success_nodes', 'failure_nodes', 'always_nodes'):
|
||||||
return False
|
return self.wfjt_admin(obj)
|
||||||
if relationship == 'credentials':
|
return super().can_unattach(obj, sub_obj, relationship, data=None)
|
||||||
if not self.ujt_execute(obj):
|
|
||||||
return False
|
|
||||||
return JobLaunchConfigAccess(self.user).can_unattach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
|
||||||
elif relationship in ('success_nodes', 'failure_nodes', 'always_nodes'):
|
|
||||||
return self.check_same_WFJT(obj, sub_obj)
|
|
||||||
else:
|
|
||||||
raise NotImplementedError('Relationship {} not understood for WFJT nodes.'.format(relationship))
|
|
||||||
|
|
||||||
|
|
||||||
class WorkflowJobNodeAccess(BaseAccess):
|
class WorkflowJobNodeAccess(BaseAccess):
|
||||||
@@ -2052,13 +2046,10 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
if not data: # So the browseable API will work
|
if not data: # So the browseable API will work
|
||||||
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
|
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
|
||||||
|
|
||||||
if data.get('execution_environment'):
|
return bool(
|
||||||
ee = get_object_from_data('execution_environment', ExecutionEnvironment, data)
|
self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True)
|
||||||
if not self.user.can_access(ExecutionEnvironment, 'read', ee):
|
and self.check_related('inventory', Inventory, data, role_field='use_role')
|
||||||
return False
|
and self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role')
|
||||||
|
|
||||||
return self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True) and self.check_related(
|
|
||||||
'inventory', Inventory, data, role_field='use_role'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def can_copy(self, obj):
|
def can_copy(self, obj):
|
||||||
@@ -2104,14 +2095,10 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
if self.user.is_superuser:
|
if self.user.is_superuser:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if data and data.get('execution_environment'):
|
|
||||||
ee = get_object_from_data('execution_environment', ExecutionEnvironment, data)
|
|
||||||
if not self.user.can_access(ExecutionEnvironment, 'read', ee):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
self.check_related('organization', Organization, data, role_field='workflow_admin_role', obj=obj)
|
self.check_related('organization', Organization, data, role_field='workflow_admin_role', obj=obj)
|
||||||
and self.check_related('inventory', Inventory, data, role_field='use_role', obj=obj)
|
and self.check_related('inventory', Inventory, data, role_field='use_role', obj=obj)
|
||||||
|
and self.check_related('execution_environment', ExecutionEnvironment, data, obj=obj, role_field='read_role')
|
||||||
and self.user in obj.admin_role
|
and self.user in obj.admin_role
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -2364,7 +2351,6 @@ class JobEventAccess(BaseAccess):
|
|||||||
|
|
||||||
|
|
||||||
class UnpartitionedJobEventAccess(JobEventAccess):
|
class UnpartitionedJobEventAccess(JobEventAccess):
|
||||||
|
|
||||||
model = UnpartitionedJobEvent
|
model = UnpartitionedJobEvent
|
||||||
|
|
||||||
|
|
||||||
@@ -2518,7 +2504,7 @@ class UnifiedJobAccess(BaseAccess):
|
|||||||
return super(UnifiedJobAccess, self).get_queryset().filter(workflowapproval__isnull=True)
|
return super(UnifiedJobAccess, self).get_queryset().filter(workflowapproval__isnull=True)
|
||||||
|
|
||||||
|
|
||||||
class ScheduleAccess(BaseAccess):
|
class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can see a schedule if I can see it's related unified job, I can create them or update them if I have write access
|
I can see a schedule if I can see it's related unified job, I can create them or update them if I have write access
|
||||||
"""
|
"""
|
||||||
@@ -2559,12 +2545,6 @@ class ScheduleAccess(BaseAccess):
|
|||||||
def can_delete(self, obj):
|
def can_delete(self, obj):
|
||||||
return self.can_change(obj, {})
|
return self.can_change(obj, {})
|
||||||
|
|
||||||
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
|
||||||
return JobLaunchConfigAccess(self.user).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
|
||||||
|
|
||||||
def can_unattach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
|
||||||
return JobLaunchConfigAccess(self.user).can_unattach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
|
||||||
|
|
||||||
|
|
||||||
class NotificationTemplateAccess(BaseAccess):
|
class NotificationTemplateAccess(BaseAccess):
|
||||||
"""
|
"""
|
||||||
@@ -2715,46 +2695,66 @@ class ActivityStreamAccess(BaseAccess):
|
|||||||
# 'job_template', 'job', 'project', 'project_update', 'workflow_job',
|
# 'job_template', 'job', 'project', 'project_update', 'workflow_job',
|
||||||
# 'inventory_source', 'workflow_job_template'
|
# 'inventory_source', 'workflow_job_template'
|
||||||
|
|
||||||
inventory_set = Inventory.accessible_objects(self.user, 'read_role')
|
q = Q(user=self.user)
|
||||||
credential_set = Credential.accessible_objects(self.user, 'read_role')
|
inventory_set = Inventory.accessible_pk_qs(self.user, 'read_role')
|
||||||
|
if inventory_set:
|
||||||
|
q |= (
|
||||||
|
Q(ad_hoc_command__inventory__in=inventory_set)
|
||||||
|
| Q(inventory__in=inventory_set)
|
||||||
|
| Q(host__inventory__in=inventory_set)
|
||||||
|
| Q(group__inventory__in=inventory_set)
|
||||||
|
| Q(inventory_source__inventory__in=inventory_set)
|
||||||
|
| Q(inventory_update__inventory_source__inventory__in=inventory_set)
|
||||||
|
)
|
||||||
|
|
||||||
|
credential_set = Credential.accessible_pk_qs(self.user, 'read_role')
|
||||||
|
if credential_set:
|
||||||
|
q |= Q(credential__in=credential_set)
|
||||||
|
|
||||||
auditing_orgs = (
|
auditing_orgs = (
|
||||||
(Organization.accessible_objects(self.user, 'admin_role') | Organization.accessible_objects(self.user, 'auditor_role'))
|
(Organization.accessible_objects(self.user, 'admin_role') | Organization.accessible_objects(self.user, 'auditor_role'))
|
||||||
.distinct()
|
.distinct()
|
||||||
.values_list('id', flat=True)
|
.values_list('id', flat=True)
|
||||||
)
|
)
|
||||||
project_set = Project.accessible_objects(self.user, 'read_role')
|
if auditing_orgs:
|
||||||
jt_set = JobTemplate.accessible_objects(self.user, 'read_role')
|
q |= (
|
||||||
team_set = Team.accessible_objects(self.user, 'read_role')
|
Q(user__in=auditing_orgs.values('member_role__members'))
|
||||||
wfjt_set = WorkflowJobTemplate.accessible_objects(self.user, 'read_role')
|
| Q(organization__in=auditing_orgs)
|
||||||
app_set = OAuth2ApplicationAccess(self.user).filtered_queryset()
|
| Q(notification_template__organization__in=auditing_orgs)
|
||||||
token_set = OAuth2TokenAccess(self.user).filtered_queryset()
|
| Q(notification__notification_template__organization__in=auditing_orgs)
|
||||||
|
| Q(label__organization__in=auditing_orgs)
|
||||||
|
| Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
|
||||||
|
)
|
||||||
|
|
||||||
return qs.filter(
|
project_set = Project.accessible_pk_qs(self.user, 'read_role')
|
||||||
Q(ad_hoc_command__inventory__in=inventory_set)
|
if project_set:
|
||||||
| Q(o_auth2_application__in=app_set)
|
q |= Q(project__in=project_set) | Q(project_update__project__in=project_set)
|
||||||
| Q(o_auth2_access_token__in=token_set)
|
|
||||||
| Q(user__in=auditing_orgs.values('member_role__members'))
|
jt_set = JobTemplate.accessible_pk_qs(self.user, 'read_role')
|
||||||
| Q(user=self.user)
|
if jt_set:
|
||||||
| Q(organization__in=auditing_orgs)
|
q |= Q(job_template__in=jt_set) | Q(job__job_template__in=jt_set)
|
||||||
| Q(inventory__in=inventory_set)
|
|
||||||
| Q(host__inventory__in=inventory_set)
|
wfjt_set = WorkflowJobTemplate.accessible_pk_qs(self.user, 'read_role')
|
||||||
| Q(group__inventory__in=inventory_set)
|
if wfjt_set:
|
||||||
| Q(inventory_source__inventory__in=inventory_set)
|
q |= (
|
||||||
| Q(inventory_update__inventory_source__inventory__in=inventory_set)
|
Q(workflow_job_template__in=wfjt_set)
|
||||||
| Q(credential__in=credential_set)
|
| Q(workflow_job_template_node__workflow_job_template__in=wfjt_set)
|
||||||
| Q(team__in=team_set)
|
| Q(workflow_job__workflow_job_template__in=wfjt_set)
|
||||||
| Q(project__in=project_set)
|
)
|
||||||
| Q(project_update__project__in=project_set)
|
|
||||||
| Q(job_template__in=jt_set)
|
team_set = Team.accessible_pk_qs(self.user, 'read_role')
|
||||||
| Q(job__job_template__in=jt_set)
|
if team_set:
|
||||||
| Q(workflow_job_template__in=wfjt_set)
|
q |= Q(team__in=team_set)
|
||||||
| Q(workflow_job_template_node__workflow_job_template__in=wfjt_set)
|
|
||||||
| Q(workflow_job__workflow_job_template__in=wfjt_set)
|
app_set = OAuth2ApplicationAccess(self.user).filtered_queryset()
|
||||||
| Q(notification_template__organization__in=auditing_orgs)
|
if app_set:
|
||||||
| Q(notification__notification_template__organization__in=auditing_orgs)
|
q |= Q(o_auth2_application__in=app_set)
|
||||||
| Q(label__organization__in=auditing_orgs)
|
|
||||||
| Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
|
token_set = OAuth2TokenAccess(self.user).filtered_queryset()
|
||||||
).distinct()
|
if token_set:
|
||||||
|
q |= Q(o_auth2_access_token__in=token_set)
|
||||||
|
|
||||||
|
return qs.filter(q).distinct()
|
||||||
|
|
||||||
def can_add(self, data):
|
def can_add(self, data):
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import aioredis
|
|
||||||
import redis
|
import redis
|
||||||
|
import redis.asyncio
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from prometheus_client import (
|
from prometheus_client import (
|
||||||
@@ -82,7 +82,7 @@ class BroadcastWebsocketStatsManager:
|
|||||||
|
|
||||||
async def run_loop(self):
|
async def run_loop(self):
|
||||||
try:
|
try:
|
||||||
redis_conn = await aioredis.create_redis_pool(settings.BROKER_URL)
|
redis_conn = await redis.asyncio.Redis.from_url(settings.BROKER_URL)
|
||||||
while True:
|
while True:
|
||||||
stats_data_str = ''.join(stat.serialize() for stat in self._stats.values())
|
stats_data_str = ''.join(stat.serialize() for stat in self._stats.values())
|
||||||
await redis_conn.set(self._redis_key, stats_data_str)
|
await redis_conn.set(self._redis_key, stats_data_str)
|
||||||
@@ -122,8 +122,8 @@ class BroadcastWebsocketStats:
|
|||||||
'Number of messages received, to be forwarded, by the broadcast websocket system',
|
'Number of messages received, to be forwarded, by the broadcast websocket system',
|
||||||
registry=self._registry,
|
registry=self._registry,
|
||||||
)
|
)
|
||||||
self._messages_received = Gauge(
|
self._messages_received_current_conn = Gauge(
|
||||||
f'awx_{self.remote_name}_messages_received',
|
f'awx_{self.remote_name}_messages_received_currrent_conn',
|
||||||
'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection',
|
'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection',
|
||||||
registry=self._registry,
|
registry=self._registry,
|
||||||
)
|
)
|
||||||
@@ -144,13 +144,13 @@ class BroadcastWebsocketStats:
|
|||||||
|
|
||||||
def record_message_received(self):
|
def record_message_received(self):
|
||||||
self._internal_messages_received_per_minute.record()
|
self._internal_messages_received_per_minute.record()
|
||||||
self._messages_received.inc()
|
self._messages_received_current_conn.inc()
|
||||||
self._messages_received_total.inc()
|
self._messages_received_total.inc()
|
||||||
|
|
||||||
def record_connection_established(self):
|
def record_connection_established(self):
|
||||||
self._connection.state('connected')
|
self._connection.state('connected')
|
||||||
self._connection_start.set_to_current_time()
|
self._connection_start.set_to_current_time()
|
||||||
self._messages_received.set(0)
|
self._messages_received_current_conn.set(0)
|
||||||
|
|
||||||
def record_connection_lost(self):
|
def record_connection_lost(self):
|
||||||
self._connection.state('disconnected')
|
self._connection.state('disconnected')
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ from awx.conf.license import get_license
|
|||||||
from awx.main.utils import get_awx_version, camelcase_to_underscore, datetime_hook
|
from awx.main.utils import get_awx_version, camelcase_to_underscore, datetime_hook
|
||||||
from awx.main import models
|
from awx.main import models
|
||||||
from awx.main.analytics import register
|
from awx.main.analytics import register
|
||||||
|
from awx.main.scheduler.task_manager_models import TaskManagerModels
|
||||||
|
|
||||||
"""
|
"""
|
||||||
This module is used to define metrics collected by awx.main.analytics.gather()
|
This module is used to define metrics collected by awx.main.analytics.gather()
|
||||||
@@ -235,25 +236,25 @@ def projects_by_scm_type(since, **kwargs):
|
|||||||
@register('instance_info', '1.2', description=_('Cluster topology and capacity'))
|
@register('instance_info', '1.2', description=_('Cluster topology and capacity'))
|
||||||
def instance_info(since, include_hostnames=False, **kwargs):
|
def instance_info(since, include_hostnames=False, **kwargs):
|
||||||
info = {}
|
info = {}
|
||||||
instances = models.Instance.objects.values_list('hostname').values(
|
# Use same method that the TaskManager does to compute consumed capacity without querying all running jobs for each Instance
|
||||||
'uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'hostname', 'enabled'
|
tm_models = TaskManagerModels.init_with_consumed_capacity(instance_fields=['uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'enabled'])
|
||||||
)
|
for tm_instance in tm_models.instances.instances_by_hostname.values():
|
||||||
for instance in instances:
|
instance = tm_instance.obj
|
||||||
consumed_capacity = sum(x.task_impact for x in models.UnifiedJob.objects.filter(execution_node=instance['hostname'], status__in=('running', 'waiting')))
|
|
||||||
instance_info = {
|
instance_info = {
|
||||||
'uuid': instance['uuid'],
|
'uuid': instance.uuid,
|
||||||
'version': instance['version'],
|
'version': instance.version,
|
||||||
'capacity': instance['capacity'],
|
'capacity': instance.capacity,
|
||||||
'cpu': instance['cpu'],
|
'cpu': instance.cpu,
|
||||||
'memory': instance['memory'],
|
'memory': instance.memory,
|
||||||
'managed_by_policy': instance['managed_by_policy'],
|
'managed_by_policy': instance.managed_by_policy,
|
||||||
'enabled': instance['enabled'],
|
'enabled': instance.enabled,
|
||||||
'consumed_capacity': consumed_capacity,
|
'consumed_capacity': tm_instance.consumed_capacity,
|
||||||
'remaining_capacity': instance['capacity'] - consumed_capacity,
|
'remaining_capacity': instance.capacity - tm_instance.consumed_capacity,
|
||||||
|
'node_type': instance.node_type,
|
||||||
}
|
}
|
||||||
if include_hostnames is True:
|
if include_hostnames is True:
|
||||||
instance_info['hostname'] = instance['hostname']
|
instance_info['hostname'] = instance.hostname
|
||||||
info[instance['uuid']] = instance_info
|
info[instance.uuid] = instance_info
|
||||||
return info
|
return info
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ from prometheus_client import CollectorRegistry, Gauge, Info, generate_latest
|
|||||||
|
|
||||||
from awx.conf.license import get_license
|
from awx.conf.license import get_license
|
||||||
from awx.main.utils import get_awx_version
|
from awx.main.utils import get_awx_version
|
||||||
|
from awx.main.models import UnifiedJob
|
||||||
from awx.main.analytics.collectors import (
|
from awx.main.analytics.collectors import (
|
||||||
counts,
|
counts,
|
||||||
instance_info,
|
instance_info,
|
||||||
@@ -56,6 +57,7 @@ def metrics():
|
|||||||
[
|
[
|
||||||
'hostname',
|
'hostname',
|
||||||
'instance_uuid',
|
'instance_uuid',
|
||||||
|
'node_type',
|
||||||
],
|
],
|
||||||
registry=REGISTRY,
|
registry=REGISTRY,
|
||||||
)
|
)
|
||||||
@@ -83,6 +85,7 @@ def metrics():
|
|||||||
[
|
[
|
||||||
'hostname',
|
'hostname',
|
||||||
'instance_uuid',
|
'instance_uuid',
|
||||||
|
'node_type',
|
||||||
],
|
],
|
||||||
registry=REGISTRY,
|
registry=REGISTRY,
|
||||||
)
|
)
|
||||||
@@ -110,6 +113,7 @@ def metrics():
|
|||||||
[
|
[
|
||||||
'hostname',
|
'hostname',
|
||||||
'instance_uuid',
|
'instance_uuid',
|
||||||
|
'node_type',
|
||||||
],
|
],
|
||||||
registry=REGISTRY,
|
registry=REGISTRY,
|
||||||
)
|
)
|
||||||
@@ -119,6 +123,7 @@ def metrics():
|
|||||||
[
|
[
|
||||||
'hostname',
|
'hostname',
|
||||||
'instance_uuid',
|
'instance_uuid',
|
||||||
|
'node_type',
|
||||||
],
|
],
|
||||||
registry=REGISTRY,
|
registry=REGISTRY,
|
||||||
)
|
)
|
||||||
@@ -169,8 +174,9 @@ def metrics():
|
|||||||
|
|
||||||
all_job_data = job_counts(None)
|
all_job_data = job_counts(None)
|
||||||
statuses = all_job_data.get('status', {})
|
statuses = all_job_data.get('status', {})
|
||||||
for status, value in statuses.items():
|
states = set(dict(UnifiedJob.STATUS_CHOICES).keys()) - set(['new'])
|
||||||
STATUS.labels(status=status).set(value)
|
for state in states:
|
||||||
|
STATUS.labels(status=state).set(statuses.get(state, 0))
|
||||||
|
|
||||||
RUNNING_JOBS.set(current_counts['running_jobs'])
|
RUNNING_JOBS.set(current_counts['running_jobs'])
|
||||||
PENDING_JOBS.set(current_counts['pending_jobs'])
|
PENDING_JOBS.set(current_counts['pending_jobs'])
|
||||||
@@ -178,12 +184,13 @@ def metrics():
|
|||||||
instance_data = instance_info(None, include_hostnames=True)
|
instance_data = instance_info(None, include_hostnames=True)
|
||||||
for uuid, info in instance_data.items():
|
for uuid, info in instance_data.items():
|
||||||
hostname = info['hostname']
|
hostname = info['hostname']
|
||||||
INSTANCE_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['capacity'])
|
node_type = info['node_type']
|
||||||
|
INSTANCE_CAPACITY.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).set(instance_data[uuid]['capacity'])
|
||||||
INSTANCE_CPU.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['cpu'])
|
INSTANCE_CPU.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['cpu'])
|
||||||
INSTANCE_MEMORY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['memory'])
|
INSTANCE_MEMORY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['memory'])
|
||||||
INSTANCE_CONSUMED_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['consumed_capacity'])
|
INSTANCE_CONSUMED_CAPACITY.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).set(instance_data[uuid]['consumed_capacity'])
|
||||||
INSTANCE_REMAINING_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['remaining_capacity'])
|
INSTANCE_REMAINING_CAPACITY.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).set(instance_data[uuid]['remaining_capacity'])
|
||||||
INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid).info(
|
INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).info(
|
||||||
{
|
{
|
||||||
'enabled': str(instance_data[uuid]['enabled']),
|
'enabled': str(instance_data[uuid]['enabled']),
|
||||||
'managed_by_policy': str(instance_data[uuid]['managed_by_policy']),
|
'managed_by_policy': str(instance_data[uuid]['managed_by_policy']),
|
||||||
|
|||||||
@@ -5,7 +5,9 @@ import logging
|
|||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
|
|
||||||
from awx.main.consumers import emit_channel_notification
|
from awx.main.consumers import emit_channel_notification
|
||||||
|
from awx.main.utils import is_testing
|
||||||
|
|
||||||
root_key = 'awx_metrics'
|
root_key = 'awx_metrics'
|
||||||
logger = logging.getLogger('awx.main.analytics')
|
logger = logging.getLogger('awx.main.analytics')
|
||||||
@@ -163,10 +165,10 @@ class Metrics:
|
|||||||
Instance = apps.get_model('main', 'Instance')
|
Instance = apps.get_model('main', 'Instance')
|
||||||
if instance_name:
|
if instance_name:
|
||||||
self.instance_name = instance_name
|
self.instance_name = instance_name
|
||||||
elif settings.IS_TESTING():
|
elif is_testing():
|
||||||
self.instance_name = "awx_testing"
|
self.instance_name = "awx_testing"
|
||||||
else:
|
else:
|
||||||
self.instance_name = Instance.objects.me().hostname
|
self.instance_name = Instance.objects.my_hostname()
|
||||||
|
|
||||||
# metric name, help_text
|
# metric name, help_text
|
||||||
METRICSLIST = [
|
METRICSLIST = [
|
||||||
@@ -184,19 +186,29 @@ class Metrics:
|
|||||||
FloatM('subsystem_metrics_pipe_execute_seconds', 'Time spent saving metrics to redis'),
|
FloatM('subsystem_metrics_pipe_execute_seconds', 'Time spent saving metrics to redis'),
|
||||||
IntM('subsystem_metrics_pipe_execute_calls', 'Number of calls to pipe_execute'),
|
IntM('subsystem_metrics_pipe_execute_calls', 'Number of calls to pipe_execute'),
|
||||||
FloatM('subsystem_metrics_send_metrics_seconds', 'Time spent sending metrics to other nodes'),
|
FloatM('subsystem_metrics_send_metrics_seconds', 'Time spent sending metrics to other nodes'),
|
||||||
SetFloatM('task_manager_get_tasks_seconds', 'Time spent in loading all tasks from db'),
|
SetFloatM('task_manager_get_tasks_seconds', 'Time spent in loading tasks from db'),
|
||||||
SetFloatM('task_manager_start_task_seconds', 'Time spent starting task'),
|
SetFloatM('task_manager_start_task_seconds', 'Time spent starting task'),
|
||||||
SetFloatM('task_manager_process_running_tasks_seconds', 'Time spent processing running tasks'),
|
SetFloatM('task_manager_process_running_tasks_seconds', 'Time spent processing running tasks'),
|
||||||
SetFloatM('task_manager_process_pending_tasks_seconds', 'Time spent processing pending tasks'),
|
SetFloatM('task_manager_process_pending_tasks_seconds', 'Time spent processing pending tasks'),
|
||||||
SetFloatM('task_manager_generate_dependencies_seconds', 'Time spent generating dependencies for pending tasks'),
|
|
||||||
SetFloatM('task_manager_spawn_workflow_graph_jobs_seconds', 'Time spent spawning workflow jobs'),
|
|
||||||
SetFloatM('task_manager__schedule_seconds', 'Time spent in running the entire _schedule'),
|
SetFloatM('task_manager__schedule_seconds', 'Time spent in running the entire _schedule'),
|
||||||
IntM('task_manager_schedule_calls', 'Number of calls to task manager schedule'),
|
IntM('task_manager__schedule_calls', 'Number of calls to _schedule, after lock is acquired'),
|
||||||
SetFloatM('task_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
SetFloatM('task_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
||||||
SetIntM('task_manager_tasks_started', 'Number of tasks started'),
|
SetIntM('task_manager_tasks_started', 'Number of tasks started'),
|
||||||
SetIntM('task_manager_running_processed', 'Number of running tasks processed'),
|
SetIntM('task_manager_running_processed', 'Number of running tasks processed'),
|
||||||
SetIntM('task_manager_pending_processed', 'Number of pending tasks processed'),
|
SetIntM('task_manager_pending_processed', 'Number of pending tasks processed'),
|
||||||
SetIntM('task_manager_tasks_blocked', 'Number of tasks blocked from running'),
|
SetIntM('task_manager_tasks_blocked', 'Number of tasks blocked from running'),
|
||||||
|
SetFloatM('task_manager_commit_seconds', 'Time spent in db transaction, including on_commit calls'),
|
||||||
|
SetFloatM('dependency_manager_get_tasks_seconds', 'Time spent loading pending tasks from db'),
|
||||||
|
SetFloatM('dependency_manager_generate_dependencies_seconds', 'Time spent generating dependencies for pending tasks'),
|
||||||
|
SetFloatM('dependency_manager__schedule_seconds', 'Time spent in running the entire _schedule'),
|
||||||
|
IntM('dependency_manager__schedule_calls', 'Number of calls to _schedule, after lock is acquired'),
|
||||||
|
SetFloatM('dependency_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
||||||
|
SetIntM('dependency_manager_pending_processed', 'Number of pending tasks processed'),
|
||||||
|
SetFloatM('workflow_manager__schedule_seconds', 'Time spent in running the entire _schedule'),
|
||||||
|
IntM('workflow_manager__schedule_calls', 'Number of calls to _schedule, after lock is acquired'),
|
||||||
|
SetFloatM('workflow_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
||||||
|
SetFloatM('workflow_manager_spawn_workflow_graph_jobs_seconds', 'Time spent spawning workflow tasks'),
|
||||||
|
SetFloatM('workflow_manager_get_tasks_seconds', 'Time spent loading workflow tasks from db'),
|
||||||
]
|
]
|
||||||
# turn metric list into dictionary with the metric name as a key
|
# turn metric list into dictionary with the metric name as a key
|
||||||
self.METRICS = {}
|
self.METRICS = {}
|
||||||
@@ -303,7 +315,12 @@ class Metrics:
|
|||||||
self.previous_send_metrics.set(current_time)
|
self.previous_send_metrics.set(current_time)
|
||||||
self.previous_send_metrics.store_value(self.conn)
|
self.previous_send_metrics.store_value(self.conn)
|
||||||
finally:
|
finally:
|
||||||
lock.release()
|
try:
|
||||||
|
lock.release()
|
||||||
|
except Exception as exc:
|
||||||
|
# After system failures, we might throw redis.exceptions.LockNotOwnedError
|
||||||
|
# this is to avoid print a Traceback, and importantly, avoid raising an exception into parent context
|
||||||
|
logger.warning(f'Error releasing subsystem metrics redis lock, error: {str(exc)}')
|
||||||
|
|
||||||
def load_other_metrics(self, request):
|
def load_other_metrics(self, request):
|
||||||
# data received from other nodes are stored in their own keys
|
# data received from other nodes are stored in their own keys
|
||||||
|
|||||||
@@ -3,6 +3,5 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
|
|
||||||
|
|
||||||
class MainConfig(AppConfig):
|
class MainConfig(AppConfig):
|
||||||
|
|
||||||
name = 'awx.main'
|
name = 'awx.main'
|
||||||
verbose_name = _('Main')
|
verbose_name = _('Main')
|
||||||
|
|||||||
@@ -446,7 +446,7 @@ register(
|
|||||||
label=_('Default Job Idle Timeout'),
|
label=_('Default Job Idle Timeout'),
|
||||||
help_text=_(
|
help_text=_(
|
||||||
'If no output is detected from ansible in this number of seconds the execution will be terminated. '
|
'If no output is detected from ansible in this number of seconds the execution will be terminated. '
|
||||||
'Use value of 0 to used default idle_timeout is 600s.'
|
'Use value of 0 to indicate that no idle timeout should be imposed.'
|
||||||
),
|
),
|
||||||
category=_('Jobs'),
|
category=_('Jobs'),
|
||||||
category_slug='jobs',
|
category_slug='jobs',
|
||||||
@@ -569,7 +569,7 @@ register(
|
|||||||
register(
|
register(
|
||||||
'LOG_AGGREGATOR_LOGGERS',
|
'LOG_AGGREGATOR_LOGGERS',
|
||||||
field_class=fields.StringListField,
|
field_class=fields.StringListField,
|
||||||
default=['awx', 'activity_stream', 'job_events', 'system_tracking'],
|
default=['awx', 'activity_stream', 'job_events', 'system_tracking', 'broadcast_websocket'],
|
||||||
label=_('Loggers Sending Data to Log Aggregator Form'),
|
label=_('Loggers Sending Data to Log Aggregator Form'),
|
||||||
help_text=_(
|
help_text=_(
|
||||||
'List of loggers that will send HTTP logs to the collector, these can '
|
'List of loggers that will send HTTP logs to the collector, these can '
|
||||||
@@ -577,7 +577,8 @@ register(
|
|||||||
'awx - service logs\n'
|
'awx - service logs\n'
|
||||||
'activity_stream - activity stream records\n'
|
'activity_stream - activity stream records\n'
|
||||||
'job_events - callback data from Ansible job events\n'
|
'job_events - callback data from Ansible job events\n'
|
||||||
'system_tracking - facts gathered from scan jobs.'
|
'system_tracking - facts gathered from scan jobs\n'
|
||||||
|
'broadcast_websocket - errors pertaining to websockets broadcast metrics\n'
|
||||||
),
|
),
|
||||||
category=_('Logging'),
|
category=_('Logging'),
|
||||||
category_slug='logging',
|
category_slug='logging',
|
||||||
|
|||||||
@@ -9,10 +9,16 @@ aim_inputs = {
|
|||||||
'fields': [
|
'fields': [
|
||||||
{
|
{
|
||||||
'id': 'url',
|
'id': 'url',
|
||||||
'label': _('CyberArk AIM URL'),
|
'label': _('CyberArk CCP URL'),
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'format': 'url',
|
'format': 'url',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
'id': 'webservice_id',
|
||||||
|
'label': _('Web Service ID'),
|
||||||
|
'type': 'string',
|
||||||
|
'help_text': _('The CCP Web Service ID. Leave blank to default to AIMWebService.'),
|
||||||
|
},
|
||||||
{
|
{
|
||||||
'id': 'app_id',
|
'id': 'app_id',
|
||||||
'label': _('Application ID'),
|
'label': _('Application ID'),
|
||||||
@@ -64,10 +70,13 @@ def aim_backend(**kwargs):
|
|||||||
client_cert = kwargs.get('client_cert', None)
|
client_cert = kwargs.get('client_cert', None)
|
||||||
client_key = kwargs.get('client_key', None)
|
client_key = kwargs.get('client_key', None)
|
||||||
verify = kwargs['verify']
|
verify = kwargs['verify']
|
||||||
|
webservice_id = kwargs['webservice_id']
|
||||||
app_id = kwargs['app_id']
|
app_id = kwargs['app_id']
|
||||||
object_query = kwargs['object_query']
|
object_query = kwargs['object_query']
|
||||||
object_query_format = kwargs['object_query_format']
|
object_query_format = kwargs['object_query_format']
|
||||||
reason = kwargs.get('reason', None)
|
reason = kwargs.get('reason', None)
|
||||||
|
if webservice_id == '':
|
||||||
|
webservice_id = 'AIMWebService'
|
||||||
|
|
||||||
query_params = {
|
query_params = {
|
||||||
'AppId': app_id,
|
'AppId': app_id,
|
||||||
@@ -78,7 +87,7 @@ def aim_backend(**kwargs):
|
|||||||
query_params['reason'] = reason
|
query_params['reason'] = reason
|
||||||
|
|
||||||
request_qs = '?' + urlencode(query_params, quote_via=quote)
|
request_qs = '?' + urlencode(query_params, quote_via=quote)
|
||||||
request_url = urljoin(url, '/'.join(['AIMWebService', 'api', 'Accounts']))
|
request_url = urljoin(url, '/'.join([webservice_id, 'api', 'Accounts']))
|
||||||
|
|
||||||
with CertFiles(client_cert, client_key) as cert:
|
with CertFiles(client_cert, client_key) as cert:
|
||||||
res = requests.get(
|
res = requests.get(
|
||||||
@@ -92,4 +101,4 @@ def aim_backend(**kwargs):
|
|||||||
return res.json()['Content']
|
return res.json()['Content']
|
||||||
|
|
||||||
|
|
||||||
aim_plugin = CredentialPlugin('CyberArk AIM Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
|
aim_plugin = CredentialPlugin('CyberArk Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
||||||
|
|
||||||
import base64
|
|
||||||
from urllib.parse import urljoin, quote
|
from urllib.parse import urljoin, quote
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
@@ -61,7 +60,7 @@ def conjur_backend(**kwargs):
|
|||||||
cacert = kwargs.get('cacert', None)
|
cacert = kwargs.get('cacert', None)
|
||||||
|
|
||||||
auth_kwargs = {
|
auth_kwargs = {
|
||||||
'headers': {'Content-Type': 'text/plain'},
|
'headers': {'Content-Type': 'text/plain', 'Accept-Encoding': 'base64'},
|
||||||
'data': api_key,
|
'data': api_key,
|
||||||
'allow_redirects': False,
|
'allow_redirects': False,
|
||||||
}
|
}
|
||||||
@@ -69,9 +68,13 @@ def conjur_backend(**kwargs):
|
|||||||
with CertFiles(cacert) as cert:
|
with CertFiles(cacert) as cert:
|
||||||
# https://www.conjur.org/api.html#authentication-authenticate-post
|
# https://www.conjur.org/api.html#authentication-authenticate-post
|
||||||
auth_kwargs['verify'] = cert
|
auth_kwargs['verify'] = cert
|
||||||
resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
|
try:
|
||||||
|
resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||||
|
resp.raise_for_status()
|
||||||
|
except requests.exceptions.HTTPError:
|
||||||
|
resp = requests.post(urljoin(url, '/'.join(['api', 'authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||||
raise_for_status(resp)
|
raise_for_status(resp)
|
||||||
token = base64.b64encode(resp.content).decode('utf-8')
|
token = resp.content.decode('utf-8')
|
||||||
|
|
||||||
lookup_kwargs = {
|
lookup_kwargs = {
|
||||||
'headers': {'Authorization': 'Token token="{}"'.format(token)},
|
'headers': {'Authorization': 'Token token="{}"'.format(token)},
|
||||||
@@ -80,14 +83,21 @@ def conjur_backend(**kwargs):
|
|||||||
|
|
||||||
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
|
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
|
||||||
path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path]))
|
path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path]))
|
||||||
|
path_conjurcloud = urljoin(url, '/'.join(['api', 'secrets', account, 'variable', secret_path]))
|
||||||
if version:
|
if version:
|
||||||
path = '?'.join([path, version])
|
ver = "version={}".format(version)
|
||||||
|
path = '?'.join([path, ver])
|
||||||
|
path_conjurcloud = '?'.join([path_conjurcloud, ver])
|
||||||
|
|
||||||
with CertFiles(cacert) as cert:
|
with CertFiles(cacert) as cert:
|
||||||
lookup_kwargs['verify'] = cert
|
lookup_kwargs['verify'] = cert
|
||||||
resp = requests.get(path, timeout=30, **lookup_kwargs)
|
try:
|
||||||
|
resp = requests.get(path, timeout=30, **lookup_kwargs)
|
||||||
|
resp.raise_for_status()
|
||||||
|
except requests.exceptions.HTTPError:
|
||||||
|
resp = requests.get(path_conjurcloud, timeout=30, **lookup_kwargs)
|
||||||
raise_for_status(resp)
|
raise_for_status(resp)
|
||||||
return resp.text
|
return resp.text
|
||||||
|
|
||||||
|
|
||||||
conjur_plugin = CredentialPlugin('CyberArk Conjur Secret Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
conjur_plugin = CredentialPlugin('CyberArk Conjur Secrets Manager Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import copy
|
import copy
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
|
import time
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
||||||
@@ -247,7 +248,15 @@ def kv_backend(**kwargs):
|
|||||||
request_url = urljoin(url, '/'.join(['v1'] + path_segments)).rstrip('/')
|
request_url = urljoin(url, '/'.join(['v1'] + path_segments)).rstrip('/')
|
||||||
with CertFiles(cacert) as cert:
|
with CertFiles(cacert) as cert:
|
||||||
request_kwargs['verify'] = cert
|
request_kwargs['verify'] = cert
|
||||||
response = sess.get(request_url, **request_kwargs)
|
request_retries = 0
|
||||||
|
while request_retries < 5:
|
||||||
|
response = sess.get(request_url, **request_kwargs)
|
||||||
|
# https://developer.hashicorp.com/vault/docs/enterprise/consistency
|
||||||
|
if response.status_code == 412:
|
||||||
|
request_retries += 1
|
||||||
|
time.sleep(1)
|
||||||
|
else:
|
||||||
|
break
|
||||||
raise_for_status(response)
|
raise_for_status(response)
|
||||||
|
|
||||||
json = response.json()
|
json = response.json()
|
||||||
@@ -289,8 +298,15 @@ def ssh_backend(**kwargs):
|
|||||||
|
|
||||||
with CertFiles(cacert) as cert:
|
with CertFiles(cacert) as cert:
|
||||||
request_kwargs['verify'] = cert
|
request_kwargs['verify'] = cert
|
||||||
resp = sess.post(request_url, **request_kwargs)
|
request_retries = 0
|
||||||
|
while request_retries < 5:
|
||||||
|
resp = sess.post(request_url, **request_kwargs)
|
||||||
|
# https://developer.hashicorp.com/vault/docs/enterprise/consistency
|
||||||
|
if resp.status_code == 412:
|
||||||
|
request_retries += 1
|
||||||
|
time.sleep(1)
|
||||||
|
else:
|
||||||
|
break
|
||||||
raise_for_status(resp)
|
raise_for_status(resp)
|
||||||
return resp.json()['data']['signed_key']
|
return resp.json()['data']['signed_key']
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import select
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.db import connection as pg_connection
|
||||||
|
|
||||||
|
|
||||||
NOT_READY = ([], [], [])
|
NOT_READY = ([], [], [])
|
||||||
@@ -15,7 +16,6 @@ def get_local_queuename():
|
|||||||
|
|
||||||
class PubSub(object):
|
class PubSub(object):
|
||||||
def __init__(self, conn):
|
def __init__(self, conn):
|
||||||
assert conn.autocommit, "Connection must be in autocommit mode."
|
|
||||||
self.conn = conn
|
self.conn = conn
|
||||||
|
|
||||||
def listen(self, channel):
|
def listen(self, channel):
|
||||||
@@ -31,6 +31,9 @@ class PubSub(object):
|
|||||||
cur.execute('SELECT pg_notify(%s, %s);', (channel, payload))
|
cur.execute('SELECT pg_notify(%s, %s);', (channel, payload))
|
||||||
|
|
||||||
def events(self, select_timeout=5, yield_timeouts=False):
|
def events(self, select_timeout=5, yield_timeouts=False):
|
||||||
|
if not self.conn.autocommit:
|
||||||
|
raise RuntimeError('Listening for events can only be done in autocommit mode')
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
if select.select([self.conn], [], [], select_timeout) == NOT_READY:
|
if select.select([self.conn], [], [], select_timeout) == NOT_READY:
|
||||||
if yield_timeouts:
|
if yield_timeouts:
|
||||||
@@ -45,11 +48,32 @@ class PubSub(object):
|
|||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def pg_bus_conn():
|
def pg_bus_conn(new_connection=False):
|
||||||
conf = settings.DATABASES['default']
|
'''
|
||||||
conn = psycopg2.connect(dbname=conf['NAME'], host=conf['HOST'], user=conf['USER'], password=conf['PASSWORD'], port=conf['PORT'], **conf.get("OPTIONS", {}))
|
Any listeners probably want to establish a new database connection,
|
||||||
# Django connection.cursor().connection doesn't have autocommit=True on
|
separate from the Django connection used for queries, because that will prevent
|
||||||
conn.set_session(autocommit=True)
|
losing connection to the channel whenever a .close() happens.
|
||||||
|
|
||||||
|
Any publishers probably want to use the existing connection
|
||||||
|
so that messages follow postgres transaction rules
|
||||||
|
https://www.postgresql.org/docs/current/sql-notify.html
|
||||||
|
'''
|
||||||
|
|
||||||
|
if new_connection:
|
||||||
|
conf = settings.DATABASES['default']
|
||||||
|
conn = psycopg2.connect(
|
||||||
|
dbname=conf['NAME'], host=conf['HOST'], user=conf['USER'], password=conf['PASSWORD'], port=conf['PORT'], **conf.get("OPTIONS", {})
|
||||||
|
)
|
||||||
|
# Django connection.cursor().connection doesn't have autocommit=True on by default
|
||||||
|
conn.set_session(autocommit=True)
|
||||||
|
else:
|
||||||
|
if pg_connection.connection is None:
|
||||||
|
pg_connection.connect()
|
||||||
|
if pg_connection.connection is None:
|
||||||
|
raise RuntimeError('Unexpectedly could not connect to postgres for pg_notify actions')
|
||||||
|
conn = pg_connection.connection
|
||||||
|
|
||||||
pubsub = PubSub(conn)
|
pubsub = PubSub(conn)
|
||||||
yield pubsub
|
yield pubsub
|
||||||
conn.close()
|
if new_connection:
|
||||||
|
conn.close()
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import uuid
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.db import connection
|
||||||
import redis
|
import redis
|
||||||
|
|
||||||
from awx.main.dispatch import get_local_queuename
|
from awx.main.dispatch import get_local_queuename
|
||||||
@@ -13,7 +14,6 @@ logger = logging.getLogger('awx.main.dispatch')
|
|||||||
|
|
||||||
|
|
||||||
class Control(object):
|
class Control(object):
|
||||||
|
|
||||||
services = ('dispatcher', 'callback_receiver')
|
services = ('dispatcher', 'callback_receiver')
|
||||||
result = None
|
result = None
|
||||||
|
|
||||||
@@ -37,18 +37,27 @@ class Control(object):
|
|||||||
def running(self, *args, **kwargs):
|
def running(self, *args, **kwargs):
|
||||||
return self.control_with_reply('running', *args, **kwargs)
|
return self.control_with_reply('running', *args, **kwargs)
|
||||||
|
|
||||||
|
def cancel(self, task_ids, *args, **kwargs):
|
||||||
|
return self.control_with_reply('cancel', *args, extra_data={'task_ids': task_ids}, **kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def generate_reply_queue_name(cls):
|
def generate_reply_queue_name(cls):
|
||||||
return f"reply_to_{str(uuid.uuid4()).replace('-','_')}"
|
return f"reply_to_{str(uuid.uuid4()).replace('-','_')}"
|
||||||
|
|
||||||
def control_with_reply(self, command, timeout=5):
|
def control_with_reply(self, command, timeout=5, extra_data=None):
|
||||||
logger.warning('checking {} {} for {}'.format(self.service, command, self.queuename))
|
logger.warning('checking {} {} for {}'.format(self.service, command, self.queuename))
|
||||||
reply_queue = Control.generate_reply_queue_name()
|
reply_queue = Control.generate_reply_queue_name()
|
||||||
self.result = None
|
self.result = None
|
||||||
|
|
||||||
|
if not connection.get_autocommit():
|
||||||
|
raise RuntimeError('Control-with-reply messages can only be done in autocommit mode')
|
||||||
|
|
||||||
with pg_bus_conn() as conn:
|
with pg_bus_conn() as conn:
|
||||||
conn.listen(reply_queue)
|
conn.listen(reply_queue)
|
||||||
conn.notify(self.queuename, json.dumps({'control': command, 'reply_to': reply_queue}))
|
send_data = {'control': command, 'reply_to': reply_queue}
|
||||||
|
if extra_data:
|
||||||
|
send_data.update(extra_data)
|
||||||
|
conn.notify(self.queuename, json.dumps(send_data))
|
||||||
|
|
||||||
for reply in conn.events(select_timeout=timeout, yield_timeouts=True):
|
for reply in conn.events(select_timeout=timeout, yield_timeouts=True):
|
||||||
if reply is None:
|
if reply is None:
|
||||||
|
|||||||
@@ -16,13 +16,14 @@ from queue import Full as QueueFull, Empty as QueueEmpty
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import connection as django_connection, connections
|
from django.db import connection as django_connection, connections
|
||||||
from django.core.cache import cache as django_cache
|
from django.core.cache import cache as django_cache
|
||||||
|
from django.utils.timezone import now as tz_now
|
||||||
from django_guid import set_guid
|
from django_guid import set_guid
|
||||||
from jinja2 import Template
|
from jinja2 import Template
|
||||||
import psutil
|
import psutil
|
||||||
|
|
||||||
from awx.main.models import UnifiedJob
|
from awx.main.models import UnifiedJob
|
||||||
from awx.main.dispatch import reaper
|
from awx.main.dispatch import reaper
|
||||||
from awx.main.utils.common import convert_mem_str_to_bytes, get_mem_effective_capacity
|
from awx.main.utils.common import convert_mem_str_to_bytes, get_mem_effective_capacity, log_excess_runtime
|
||||||
|
|
||||||
if 'run_callback_receiver' in sys.argv:
|
if 'run_callback_receiver' in sys.argv:
|
||||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||||
@@ -191,7 +192,6 @@ class PoolWorker(object):
|
|||||||
|
|
||||||
|
|
||||||
class StatefulPoolWorker(PoolWorker):
|
class StatefulPoolWorker(PoolWorker):
|
||||||
|
|
||||||
track_managed_tasks = True
|
track_managed_tasks = True
|
||||||
|
|
||||||
|
|
||||||
@@ -328,12 +328,16 @@ class AutoscalePool(WorkerPool):
|
|||||||
# Get same number as max forks based on memory, this function takes memory as bytes
|
# Get same number as max forks based on memory, this function takes memory as bytes
|
||||||
self.max_workers = get_mem_effective_capacity(total_memory_gb * 2**30)
|
self.max_workers = get_mem_effective_capacity(total_memory_gb * 2**30)
|
||||||
|
|
||||||
|
# add magic prime number of extra workers to ensure
|
||||||
|
# we have a few extra workers to run the heartbeat
|
||||||
|
self.max_workers += 7
|
||||||
|
|
||||||
# max workers can't be less than min_workers
|
# max workers can't be less than min_workers
|
||||||
self.max_workers = max(self.min_workers, self.max_workers)
|
self.max_workers = max(self.min_workers, self.max_workers)
|
||||||
|
|
||||||
def debug(self, *args, **kwargs):
|
# the task manager enforces settings.TASK_MANAGER_TIMEOUT on its own
|
||||||
self.cleanup()
|
# but if the task takes longer than the time defined here, we will force it to stop here
|
||||||
return super(AutoscalePool, self).debug(*args, **kwargs)
|
self.task_manager_timeout = settings.TASK_MANAGER_TIMEOUT + settings.TASK_MANAGER_TIMEOUT_GRACE_PERIOD
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def should_grow(self):
|
def should_grow(self):
|
||||||
@@ -351,6 +355,7 @@ class AutoscalePool(WorkerPool):
|
|||||||
def debug_meta(self):
|
def debug_meta(self):
|
||||||
return 'min={} max={}'.format(self.min_workers, self.max_workers)
|
return 'min={} max={}'.format(self.min_workers, self.max_workers)
|
||||||
|
|
||||||
|
@log_excess_runtime(logger)
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
"""
|
"""
|
||||||
Perform some internal account and cleanup. This is run on
|
Perform some internal account and cleanup. This is run on
|
||||||
@@ -359,8 +364,6 @@ class AutoscalePool(WorkerPool):
|
|||||||
1. Discover worker processes that exited, and recover messages they
|
1. Discover worker processes that exited, and recover messages they
|
||||||
were handling.
|
were handling.
|
||||||
2. Clean up unnecessary, idle workers.
|
2. Clean up unnecessary, idle workers.
|
||||||
3. Check to see if the database says this node is running any tasks
|
|
||||||
that aren't actually running. If so, reap them.
|
|
||||||
|
|
||||||
IMPORTANT: this function is one of the few places in the dispatcher
|
IMPORTANT: this function is one of the few places in the dispatcher
|
||||||
(aside from setting lookups) where we talk to the database. As such,
|
(aside from setting lookups) where we talk to the database. As such,
|
||||||
@@ -383,6 +386,8 @@ class AutoscalePool(WorkerPool):
|
|||||||
reaper.reap_job(j, 'failed')
|
reaper.reap_job(j, 'failed')
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception('failed to reap job UUID {}'.format(w.current_task['uuid']))
|
logger.exception('failed to reap job UUID {}'.format(w.current_task['uuid']))
|
||||||
|
else:
|
||||||
|
logger.warning(f'Worker was told to quit but has not, pid={w.pid}')
|
||||||
orphaned.extend(w.orphaned_tasks)
|
orphaned.extend(w.orphaned_tasks)
|
||||||
self.workers.remove(w)
|
self.workers.remove(w)
|
||||||
elif w.idle and len(self.workers) > self.min_workers:
|
elif w.idle and len(self.workers) > self.min_workers:
|
||||||
@@ -401,13 +406,15 @@ class AutoscalePool(WorkerPool):
|
|||||||
# the task manager to never do more work
|
# the task manager to never do more work
|
||||||
current_task = w.current_task
|
current_task = w.current_task
|
||||||
if current_task and isinstance(current_task, dict):
|
if current_task and isinstance(current_task, dict):
|
||||||
if current_task.get('task', '').endswith('tasks.run_task_manager'):
|
endings = ['tasks.task_manager', 'tasks.dependency_manager', 'tasks.workflow_manager']
|
||||||
|
current_task_name = current_task.get('task', '')
|
||||||
|
if any(current_task_name.endswith(e) for e in endings):
|
||||||
if 'started' not in current_task:
|
if 'started' not in current_task:
|
||||||
w.managed_tasks[current_task['uuid']]['started'] = time.time()
|
w.managed_tasks[current_task['uuid']]['started'] = time.time()
|
||||||
age = time.time() - current_task['started']
|
age = time.time() - current_task['started']
|
||||||
w.managed_tasks[current_task['uuid']]['age'] = age
|
w.managed_tasks[current_task['uuid']]['age'] = age
|
||||||
if age > (60 * 5):
|
if age > self.task_manager_timeout:
|
||||||
logger.error(f'run_task_manager has held the advisory lock for >5m, sending SIGTERM to {w.pid}') # noqa
|
logger.error(f'{current_task_name} has held the advisory lock for {age}, sending SIGTERM to {w.pid}')
|
||||||
os.kill(w.pid, signal.SIGTERM)
|
os.kill(w.pid, signal.SIGTERM)
|
||||||
|
|
||||||
for m in orphaned:
|
for m in orphaned:
|
||||||
@@ -417,13 +424,17 @@ class AutoscalePool(WorkerPool):
|
|||||||
idx = random.choice(range(len(self.workers)))
|
idx = random.choice(range(len(self.workers)))
|
||||||
self.write(idx, m)
|
self.write(idx, m)
|
||||||
|
|
||||||
# if the database says a job is running on this node, but it's *not*,
|
def add_bind_kwargs(self, body):
|
||||||
# then reap it
|
bind_kwargs = body.pop('bind_kwargs', [])
|
||||||
running_uuids = []
|
body.setdefault('kwargs', {})
|
||||||
for worker in self.workers:
|
if 'dispatch_time' in bind_kwargs:
|
||||||
worker.calculate_managed_tasks()
|
body['kwargs']['dispatch_time'] = tz_now().isoformat()
|
||||||
running_uuids.extend(list(worker.managed_tasks.keys()))
|
if 'worker_tasks' in bind_kwargs:
|
||||||
reaper.reap(excluded_uuids=running_uuids)
|
worker_tasks = {}
|
||||||
|
for worker in self.workers:
|
||||||
|
worker.calculate_managed_tasks()
|
||||||
|
worker_tasks[worker.pid] = list(worker.managed_tasks.keys())
|
||||||
|
body['kwargs']['worker_tasks'] = worker_tasks
|
||||||
|
|
||||||
def up(self):
|
def up(self):
|
||||||
if self.full:
|
if self.full:
|
||||||
@@ -438,9 +449,8 @@ class AutoscalePool(WorkerPool):
|
|||||||
if 'guid' in body:
|
if 'guid' in body:
|
||||||
set_guid(body['guid'])
|
set_guid(body['guid'])
|
||||||
try:
|
try:
|
||||||
# when the cluster heartbeat occurs, clean up internally
|
if isinstance(body, dict) and body.get('bind_kwargs'):
|
||||||
if isinstance(body, dict) and 'cluster_node_heartbeat' in body['task']:
|
self.add_bind_kwargs(body)
|
||||||
self.cleanup()
|
|
||||||
if self.should_grow:
|
if self.should_grow:
|
||||||
self.up()
|
self.up()
|
||||||
# we don't care about "preferred queue" round robin distribution, just
|
# we don't care about "preferred queue" round robin distribution, just
|
||||||
@@ -452,6 +462,10 @@ class AutoscalePool(WorkerPool):
|
|||||||
w.put(body)
|
w.put(body)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
task_name = 'unknown'
|
||||||
|
if isinstance(body, dict):
|
||||||
|
task_name = body.get('task')
|
||||||
|
logger.warning(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
|
||||||
return super(AutoscalePool, self).write(preferred_queue, body)
|
return super(AutoscalePool, self).write(preferred_queue, body)
|
||||||
except Exception:
|
except Exception:
|
||||||
for conn in connections.all():
|
for conn in connections.all():
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
import json
|
import json
|
||||||
|
import time
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django_guid import get_guid
|
from django_guid import get_guid
|
||||||
|
|
||||||
from . import pg_bus_conn
|
from . import pg_bus_conn
|
||||||
|
from awx.main.utils import is_testing
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.dispatch')
|
logger = logging.getLogger('awx.main.dispatch')
|
||||||
|
|
||||||
@@ -49,16 +49,23 @@ class task:
|
|||||||
@task(queue='tower_broadcast')
|
@task(queue='tower_broadcast')
|
||||||
def announce():
|
def announce():
|
||||||
print("Run this everywhere!")
|
print("Run this everywhere!")
|
||||||
|
|
||||||
|
# The special parameter bind_kwargs tells the main dispatcher process to add certain kwargs
|
||||||
|
|
||||||
|
@task(bind_kwargs=['dispatch_time'])
|
||||||
|
def print_time(dispatch_time=None):
|
||||||
|
print(f"Time I was dispatched: {dispatch_time}")
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, queue=None):
|
def __init__(self, queue=None, bind_kwargs=None):
|
||||||
self.queue = queue
|
self.queue = queue
|
||||||
|
self.bind_kwargs = bind_kwargs
|
||||||
|
|
||||||
def __call__(self, fn=None):
|
def __call__(self, fn=None):
|
||||||
queue = self.queue
|
queue = self.queue
|
||||||
|
bind_kwargs = self.bind_kwargs
|
||||||
|
|
||||||
class PublisherMixin(object):
|
class PublisherMixin(object):
|
||||||
|
|
||||||
queue = None
|
queue = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -75,14 +82,16 @@ class task:
|
|||||||
msg = f'{cls.name}: Queue value required and may not be None'
|
msg = f'{cls.name}: Queue value required and may not be None'
|
||||||
logger.error(msg)
|
logger.error(msg)
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
obj = {'uuid': task_id, 'args': args, 'kwargs': kwargs, 'task': cls.name}
|
obj = {'uuid': task_id, 'args': args, 'kwargs': kwargs, 'task': cls.name, 'time_pub': time.time()}
|
||||||
guid = get_guid()
|
guid = get_guid()
|
||||||
if guid:
|
if guid:
|
||||||
obj['guid'] = guid
|
obj['guid'] = guid
|
||||||
|
if bind_kwargs:
|
||||||
|
obj['bind_kwargs'] = bind_kwargs
|
||||||
obj.update(**kw)
|
obj.update(**kw)
|
||||||
if callable(queue):
|
if callable(queue):
|
||||||
queue = queue()
|
queue = queue()
|
||||||
if not settings.IS_TESTING(sys.argv):
|
if not is_testing():
|
||||||
with pg_bus_conn() as conn:
|
with pg_bus_conn() as conn:
|
||||||
conn.notify(queue, json.dumps(obj))
|
conn.notify(queue, json.dumps(obj))
|
||||||
return (obj, queue)
|
return (obj, queue)
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ from datetime import timedelta
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
|
from django.conf import settings
|
||||||
from django.utils.timezone import now as tz_now
|
from django.utils.timezone import now as tz_now
|
||||||
from django.contrib.contenttypes.models import ContentType
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
|
||||||
@@ -15,58 +16,73 @@ def startup_reaping():
|
|||||||
If this particular instance is starting, then we know that any running jobs are invalid
|
If this particular instance is starting, then we know that any running jobs are invalid
|
||||||
so we will reap those jobs as a special action here
|
so we will reap those jobs as a special action here
|
||||||
"""
|
"""
|
||||||
me = Instance.objects.me()
|
jobs = UnifiedJob.objects.filter(status='running', controller_node=Instance.objects.my_hostname())
|
||||||
jobs = UnifiedJob.objects.filter(status='running', controller_node=me.hostname)
|
|
||||||
job_ids = []
|
job_ids = []
|
||||||
for j in jobs:
|
for j in jobs:
|
||||||
job_ids.append(j.id)
|
job_ids.append(j.id)
|
||||||
j.status = 'failed'
|
reap_job(
|
||||||
j.start_args = ''
|
j,
|
||||||
j.job_explanation += 'Task was marked as running at system start up. The system must have not shut down properly, so it has been marked as failed.'
|
'failed',
|
||||||
j.save(update_fields=['status', 'start_args', 'job_explanation'])
|
job_explanation='Task was marked as running at system start up. The system must have not shut down properly, so it has been marked as failed.',
|
||||||
if hasattr(j, 'send_notification_templates'):
|
)
|
||||||
j.send_notification_templates('failed')
|
|
||||||
j.websocket_emit_status('failed')
|
|
||||||
if job_ids:
|
if job_ids:
|
||||||
logger.error(f'Unified jobs {job_ids} were reaped on dispatch startup')
|
logger.error(f'Unified jobs {job_ids} were reaped on dispatch startup')
|
||||||
|
|
||||||
|
|
||||||
def reap_job(j, status):
|
def reap_job(j, status, job_explanation=None):
|
||||||
if UnifiedJob.objects.get(id=j.id).status not in ('running', 'waiting'):
|
j.refresh_from_db(fields=['status', 'job_explanation'])
|
||||||
|
status_before = j.status
|
||||||
|
if status_before not in ('running', 'waiting'):
|
||||||
# just in case, don't reap jobs that aren't running
|
# just in case, don't reap jobs that aren't running
|
||||||
return
|
return
|
||||||
j.status = status
|
j.status = status
|
||||||
j.start_args = '' # blank field to remove encrypted passwords
|
j.start_args = '' # blank field to remove encrypted passwords
|
||||||
j.job_explanation += ' '.join(
|
if j.job_explanation:
|
||||||
(
|
j.job_explanation += ' ' # Separate messages for readability
|
||||||
'Task was marked as running but was not present in',
|
if job_explanation is None:
|
||||||
'the job queue, so it has been marked as failed.',
|
j.job_explanation += 'Task was marked as running but was not present in the job queue, so it has been marked as failed.'
|
||||||
)
|
else:
|
||||||
)
|
j.job_explanation += job_explanation
|
||||||
j.save(update_fields=['status', 'start_args', 'job_explanation'])
|
j.save(update_fields=['status', 'start_args', 'job_explanation'])
|
||||||
if hasattr(j, 'send_notification_templates'):
|
if hasattr(j, 'send_notification_templates'):
|
||||||
j.send_notification_templates('failed')
|
j.send_notification_templates('failed')
|
||||||
j.websocket_emit_status(status)
|
j.websocket_emit_status(status)
|
||||||
logger.error('{} is no longer running; reaping'.format(j.log_format))
|
logger.error(f'{j.log_format} is no longer {status_before}; reaping')
|
||||||
|
|
||||||
|
|
||||||
def reap(instance=None, status='failed', excluded_uuids=[]):
|
def reap_waiting(instance=None, status='failed', job_explanation=None, grace_period=None, excluded_uuids=None, ref_time=None):
|
||||||
"""
|
"""
|
||||||
Reap all jobs in waiting|running for this instance.
|
Reap all jobs in waiting for this instance.
|
||||||
"""
|
"""
|
||||||
me = instance
|
if grace_period is None:
|
||||||
if me is None:
|
grace_period = settings.JOB_WAITING_GRACE_PERIOD + settings.TASK_MANAGER_TIMEOUT
|
||||||
try:
|
|
||||||
me = Instance.objects.me()
|
if instance is None:
|
||||||
except RuntimeError as e:
|
hostname = Instance.objects.my_hostname()
|
||||||
logger.warning(f'Local instance is not registered, not running reaper: {e}')
|
else:
|
||||||
return
|
hostname = instance.hostname
|
||||||
now = tz_now()
|
if ref_time is None:
|
||||||
|
ref_time = tz_now()
|
||||||
|
jobs = UnifiedJob.objects.filter(status='waiting', modified__lte=ref_time - timedelta(seconds=grace_period), controller_node=hostname)
|
||||||
|
if excluded_uuids:
|
||||||
|
jobs = jobs.exclude(celery_task_id__in=excluded_uuids)
|
||||||
|
for j in jobs:
|
||||||
|
reap_job(j, status, job_explanation=job_explanation)
|
||||||
|
|
||||||
|
|
||||||
|
def reap(instance=None, status='failed', job_explanation=None, excluded_uuids=None):
|
||||||
|
"""
|
||||||
|
Reap all jobs in running for this instance.
|
||||||
|
"""
|
||||||
|
if instance is None:
|
||||||
|
hostname = Instance.objects.my_hostname()
|
||||||
|
else:
|
||||||
|
hostname = instance.hostname
|
||||||
workflow_ctype_id = ContentType.objects.get_for_model(WorkflowJob).id
|
workflow_ctype_id = ContentType.objects.get_for_model(WorkflowJob).id
|
||||||
jobs = UnifiedJob.objects.filter(
|
jobs = UnifiedJob.objects.filter(
|
||||||
(Q(status='running') | Q(status='waiting', modified__lte=now - timedelta(seconds=60)))
|
Q(status='running') & (Q(execution_node=hostname) | Q(controller_node=hostname)) & ~Q(polymorphic_ctype_id=workflow_ctype_id)
|
||||||
& (Q(execution_node=me.hostname) | Q(controller_node=me.hostname))
|
)
|
||||||
& ~Q(polymorphic_ctype_id=workflow_ctype_id)
|
if excluded_uuids:
|
||||||
).exclude(celery_task_id__in=excluded_uuids)
|
jobs = jobs.exclude(celery_task_id__in=excluded_uuids)
|
||||||
for j in jobs:
|
for j in jobs:
|
||||||
reap_job(j, status)
|
reap_job(j, status, job_explanation=job_explanation)
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ from django.conf import settings
|
|||||||
|
|
||||||
from awx.main.dispatch.pool import WorkerPool
|
from awx.main.dispatch.pool import WorkerPool
|
||||||
from awx.main.dispatch import pg_bus_conn
|
from awx.main.dispatch import pg_bus_conn
|
||||||
|
from awx.main.utils.common import log_excess_runtime
|
||||||
|
|
||||||
if 'run_callback_receiver' in sys.argv:
|
if 'run_callback_receiver' in sys.argv:
|
||||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||||
@@ -39,7 +40,6 @@ class WorkerSignalHandler:
|
|||||||
|
|
||||||
|
|
||||||
class AWXConsumerBase(object):
|
class AWXConsumerBase(object):
|
||||||
|
|
||||||
last_stats = time.time()
|
last_stats = time.time()
|
||||||
|
|
||||||
def __init__(self, name, worker, queues=[], pool=None):
|
def __init__(self, name, worker, queues=[], pool=None):
|
||||||
@@ -62,7 +62,7 @@ class AWXConsumerBase(object):
|
|||||||
def control(self, body):
|
def control(self, body):
|
||||||
logger.warning(f'Received control signal:\n{body}')
|
logger.warning(f'Received control signal:\n{body}')
|
||||||
control = body.get('control')
|
control = body.get('control')
|
||||||
if control in ('status', 'running'):
|
if control in ('status', 'running', 'cancel'):
|
||||||
reply_queue = body['reply_to']
|
reply_queue = body['reply_to']
|
||||||
if control == 'status':
|
if control == 'status':
|
||||||
msg = '\n'.join([self.listening_on, self.pool.debug()])
|
msg = '\n'.join([self.listening_on, self.pool.debug()])
|
||||||
@@ -71,6 +71,17 @@ class AWXConsumerBase(object):
|
|||||||
for worker in self.pool.workers:
|
for worker in self.pool.workers:
|
||||||
worker.calculate_managed_tasks()
|
worker.calculate_managed_tasks()
|
||||||
msg.extend(worker.managed_tasks.keys())
|
msg.extend(worker.managed_tasks.keys())
|
||||||
|
elif control == 'cancel':
|
||||||
|
msg = []
|
||||||
|
task_ids = set(body['task_ids'])
|
||||||
|
for worker in self.pool.workers:
|
||||||
|
task = worker.current_task
|
||||||
|
if task and task['uuid'] in task_ids:
|
||||||
|
logger.warn(f'Sending SIGTERM to task id={task["uuid"]}, task={task.get("task")}, args={task.get("args")}')
|
||||||
|
os.kill(worker.pid, signal.SIGTERM)
|
||||||
|
msg.append(task['uuid'])
|
||||||
|
if task_ids and not msg:
|
||||||
|
logger.info(f'Could not locate running tasks to cancel with ids={task_ids}')
|
||||||
|
|
||||||
with pg_bus_conn() as conn:
|
with pg_bus_conn() as conn:
|
||||||
conn.notify(reply_queue, json.dumps(msg))
|
conn.notify(reply_queue, json.dumps(msg))
|
||||||
@@ -81,6 +92,9 @@ class AWXConsumerBase(object):
|
|||||||
logger.error('unrecognized control message: {}'.format(control))
|
logger.error('unrecognized control message: {}'.format(control))
|
||||||
|
|
||||||
def process_task(self, body):
|
def process_task(self, body):
|
||||||
|
if isinstance(body, dict):
|
||||||
|
body['time_ack'] = time.time()
|
||||||
|
|
||||||
if 'control' in body:
|
if 'control' in body:
|
||||||
try:
|
try:
|
||||||
return self.control(body)
|
return self.control(body)
|
||||||
@@ -99,8 +113,8 @@ class AWXConsumerBase(object):
|
|||||||
queue = 0
|
queue = 0
|
||||||
self.pool.write(queue, body)
|
self.pool.write(queue, body)
|
||||||
self.total_messages += 1
|
self.total_messages += 1
|
||||||
self.record_statistics()
|
|
||||||
|
|
||||||
|
@log_excess_runtime(logger)
|
||||||
def record_statistics(self):
|
def record_statistics(self):
|
||||||
if time.time() - self.last_stats > 1: # buffer stat recording to once per second
|
if time.time() - self.last_stats > 1: # buffer stat recording to once per second
|
||||||
try:
|
try:
|
||||||
@@ -140,6 +154,16 @@ class AWXConsumerPG(AWXConsumerBase):
|
|||||||
# if no successful loops have ran since startup, then we should fail right away
|
# if no successful loops have ran since startup, then we should fail right away
|
||||||
self.pg_is_down = True # set so that we fail if we get database errors on startup
|
self.pg_is_down = True # set so that we fail if we get database errors on startup
|
||||||
self.pg_down_time = time.time() - self.pg_max_wait # allow no grace period
|
self.pg_down_time = time.time() - self.pg_max_wait # allow no grace period
|
||||||
|
self.last_cleanup = time.time()
|
||||||
|
|
||||||
|
def run_periodic_tasks(self):
|
||||||
|
self.record_statistics() # maintains time buffer in method
|
||||||
|
|
||||||
|
if time.time() - self.last_cleanup > 60: # same as cluster_node_heartbeat
|
||||||
|
# NOTE: if we run out of database connections, it is important to still run cleanup
|
||||||
|
# so that we scale down workers and free up connections
|
||||||
|
self.pool.cleanup()
|
||||||
|
self.last_cleanup = time.time()
|
||||||
|
|
||||||
def run(self, *args, **kwargs):
|
def run(self, *args, **kwargs):
|
||||||
super(AWXConsumerPG, self).run(*args, **kwargs)
|
super(AWXConsumerPG, self).run(*args, **kwargs)
|
||||||
@@ -149,14 +173,16 @@ class AWXConsumerPG(AWXConsumerBase):
|
|||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
with pg_bus_conn() as conn:
|
with pg_bus_conn(new_connection=True) as conn:
|
||||||
for queue in self.queues:
|
for queue in self.queues:
|
||||||
conn.listen(queue)
|
conn.listen(queue)
|
||||||
if init is False:
|
if init is False:
|
||||||
self.worker.on_start()
|
self.worker.on_start()
|
||||||
init = True
|
init = True
|
||||||
for e in conn.events():
|
for e in conn.events(yield_timeouts=True):
|
||||||
self.process_task(json.loads(e.payload))
|
if e is not None:
|
||||||
|
self.process_task(json.loads(e.payload))
|
||||||
|
self.run_periodic_tasks()
|
||||||
self.pg_is_down = False
|
self.pg_is_down = False
|
||||||
if self.should_stop:
|
if self.should_stop:
|
||||||
return
|
return
|
||||||
@@ -213,6 +239,8 @@ class BaseWorker(object):
|
|||||||
# so we can establish a new connection
|
# so we can establish a new connection
|
||||||
conn.close_if_unusable_or_obsolete()
|
conn.close_if_unusable_or_obsolete()
|
||||||
self.perform_work(body, *args)
|
self.perform_work(body, *args)
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f'Unhandled exception in perform_work in worker pid={os.getpid()}')
|
||||||
finally:
|
finally:
|
||||||
if 'uuid' in body:
|
if 'uuid' in body:
|
||||||
uuid = body['uuid']
|
uuid = body['uuid']
|
||||||
|
|||||||
@@ -3,14 +3,12 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
import time
|
import time
|
||||||
import traceback
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
from django.utils.timezone import now as tz_now
|
from django.utils.timezone import now as tz_now
|
||||||
from django.db import DatabaseError, OperationalError, transaction, connection as django_connection
|
from django.db import transaction, connection as django_connection
|
||||||
from django.db.utils import InterfaceError, InternalError
|
|
||||||
from django_guid import set_guid
|
from django_guid import set_guid
|
||||||
|
|
||||||
import psutil
|
import psutil
|
||||||
@@ -64,6 +62,7 @@ class CallbackBrokerWorker(BaseWorker):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
MAX_RETRIES = 2
|
MAX_RETRIES = 2
|
||||||
|
INDIVIDUAL_EVENT_RETRIES = 3
|
||||||
last_stats = time.time()
|
last_stats = time.time()
|
||||||
last_flush = time.time()
|
last_flush = time.time()
|
||||||
total = 0
|
total = 0
|
||||||
@@ -155,6 +154,8 @@ class CallbackBrokerWorker(BaseWorker):
|
|||||||
metrics_events_missing_created = 0
|
metrics_events_missing_created = 0
|
||||||
metrics_total_job_event_processing_seconds = datetime.timedelta(seconds=0)
|
metrics_total_job_event_processing_seconds = datetime.timedelta(seconds=0)
|
||||||
for cls, events in self.buff.items():
|
for cls, events in self.buff.items():
|
||||||
|
if not events:
|
||||||
|
continue
|
||||||
logger.debug(f'{cls.__name__}.objects.bulk_create({len(events)})')
|
logger.debug(f'{cls.__name__}.objects.bulk_create({len(events)})')
|
||||||
for e in events:
|
for e in events:
|
||||||
e.modified = now # this can be set before created because now is set above on line 149
|
e.modified = now # this can be set before created because now is set above on line 149
|
||||||
@@ -164,28 +165,48 @@ class CallbackBrokerWorker(BaseWorker):
|
|||||||
else: # only calculate the seconds if the created time already has been set
|
else: # only calculate the seconds if the created time already has been set
|
||||||
metrics_total_job_event_processing_seconds += e.modified - e.created
|
metrics_total_job_event_processing_seconds += e.modified - e.created
|
||||||
metrics_duration_to_save = time.perf_counter()
|
metrics_duration_to_save = time.perf_counter()
|
||||||
|
saved_events = []
|
||||||
try:
|
try:
|
||||||
cls.objects.bulk_create(events)
|
cls.objects.bulk_create(events)
|
||||||
metrics_bulk_events_saved += len(events)
|
metrics_bulk_events_saved += len(events)
|
||||||
except Exception:
|
saved_events = events
|
||||||
|
self.buff[cls] = []
|
||||||
|
except Exception as exc:
|
||||||
|
# If the database is flaking, let ensure_connection throw a general exception
|
||||||
|
# will be caught by the outer loop, which goes into a proper sleep and retry loop
|
||||||
|
django_connection.ensure_connection()
|
||||||
|
logger.warning(f'Error in events bulk_create, will try indiviually, error: {str(exc)}')
|
||||||
# if an exception occurs, we should re-attempt to save the
|
# if an exception occurs, we should re-attempt to save the
|
||||||
# events one-by-one, because something in the list is
|
# events one-by-one, because something in the list is
|
||||||
# broken/stale
|
# broken/stale
|
||||||
metrics_events_batch_save_errors += 1
|
metrics_events_batch_save_errors += 1
|
||||||
for e in events:
|
for e in events.copy():
|
||||||
try:
|
try:
|
||||||
e.save()
|
e.save()
|
||||||
metrics_singular_events_saved += 1
|
metrics_singular_events_saved += 1
|
||||||
except Exception:
|
events.remove(e)
|
||||||
logger.exception('Database Error Saving Job Event')
|
saved_events.append(e) # Importantly, remove successfully saved events from the buffer
|
||||||
|
except Exception as exc_indv:
|
||||||
|
retry_count = getattr(e, '_retry_count', 0) + 1
|
||||||
|
e._retry_count = retry_count
|
||||||
|
|
||||||
|
# special sanitization logic for postgres treatment of NUL 0x00 char
|
||||||
|
if (retry_count == 1) and isinstance(exc_indv, ValueError) and ("\x00" in e.stdout):
|
||||||
|
e.stdout = e.stdout.replace("\x00", "")
|
||||||
|
|
||||||
|
if retry_count >= self.INDIVIDUAL_EVENT_RETRIES:
|
||||||
|
logger.error(f'Hit max retries ({retry_count}) saving individual Event error: {str(exc_indv)}\ndata:\n{e.__dict__}')
|
||||||
|
events.remove(e)
|
||||||
|
else:
|
||||||
|
logger.info(f'Database Error Saving individual Event uuid={e.uuid} try={retry_count}, error: {str(exc_indv)}')
|
||||||
|
|
||||||
metrics_duration_to_save = time.perf_counter() - metrics_duration_to_save
|
metrics_duration_to_save = time.perf_counter() - metrics_duration_to_save
|
||||||
for e in events:
|
for e in saved_events:
|
||||||
if not getattr(e, '_skip_websocket_message', False):
|
if not getattr(e, '_skip_websocket_message', False):
|
||||||
metrics_events_broadcast += 1
|
metrics_events_broadcast += 1
|
||||||
emit_event_detail(e)
|
emit_event_detail(e)
|
||||||
if getattr(e, '_notification_trigger_event', False):
|
if getattr(e, '_notification_trigger_event', False):
|
||||||
job_stats_wrapup(getattr(e, e.JOB_REFERENCE), event=e)
|
job_stats_wrapup(getattr(e, e.JOB_REFERENCE), event=e)
|
||||||
self.buff = {}
|
|
||||||
self.last_flush = time.time()
|
self.last_flush = time.time()
|
||||||
# only update metrics if we saved events
|
# only update metrics if we saved events
|
||||||
if (metrics_bulk_events_saved + metrics_singular_events_saved) > 0:
|
if (metrics_bulk_events_saved + metrics_singular_events_saved) > 0:
|
||||||
@@ -257,19 +278,16 @@ class CallbackBrokerWorker(BaseWorker):
|
|||||||
try:
|
try:
|
||||||
self.flush(force=flush)
|
self.flush(force=flush)
|
||||||
break
|
break
|
||||||
except (OperationalError, InterfaceError, InternalError):
|
except Exception as exc:
|
||||||
|
# Aside form bugs, exceptions here are assumed to be due to database flake
|
||||||
if retries >= self.MAX_RETRIES:
|
if retries >= self.MAX_RETRIES:
|
||||||
logger.exception('Worker could not re-establish database connectivity, giving up on one or more events.')
|
logger.exception('Worker could not re-establish database connectivity, giving up on one or more events.')
|
||||||
|
self.buff = {}
|
||||||
return
|
return
|
||||||
delay = 60 * retries
|
delay = 60 * retries
|
||||||
logger.exception('Database Error Saving Job Event, retry #{i} in {delay} seconds:'.format(i=retries + 1, delay=delay))
|
logger.warning(f'Database Error Flushing Job Events, retry #{retries + 1} in {delay} seconds: {str(exc)}')
|
||||||
django_connection.close()
|
django_connection.close()
|
||||||
time.sleep(delay)
|
time.sleep(delay)
|
||||||
retries += 1
|
retries += 1
|
||||||
except DatabaseError:
|
except Exception:
|
||||||
logger.exception('Database Error Saving Job Event')
|
logger.exception(f'Callback Task Processor Raised Unexpected Exception processing event data:\n{body}')
|
||||||
break
|
|
||||||
except Exception as exc:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
logger.error('Callback Task Processor Raised Exception: %r', exc)
|
|
||||||
logger.error('Detail: {}'.format(tb))
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import logging
|
|||||||
import importlib
|
import importlib
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
import time
|
||||||
|
|
||||||
from kubernetes.config import kube_config
|
from kubernetes.config import kube_config
|
||||||
|
|
||||||
@@ -60,8 +61,19 @@ class TaskWorker(BaseWorker):
|
|||||||
# the callable is a class, e.g., RunJob; instantiate and
|
# the callable is a class, e.g., RunJob; instantiate and
|
||||||
# return its `run()` method
|
# return its `run()` method
|
||||||
_call = _call().run
|
_call = _call().run
|
||||||
|
|
||||||
|
log_extra = ''
|
||||||
|
logger_method = logger.debug
|
||||||
|
if ('time_ack' in body) and ('time_pub' in body):
|
||||||
|
time_publish = body['time_ack'] - body['time_pub']
|
||||||
|
time_waiting = time.time() - body['time_ack']
|
||||||
|
if time_waiting > 5.0 or time_publish > 5.0:
|
||||||
|
# If task too a very long time to process, add this information to the log
|
||||||
|
log_extra = f' took {time_publish:.4f} to ack, {time_waiting:.4f} in local dispatcher'
|
||||||
|
logger_method = logger.info
|
||||||
# don't print kwargs, they often contain launch-time secrets
|
# don't print kwargs, they often contain launch-time secrets
|
||||||
logger.debug('task {} starting {}(*{})'.format(uuid, task, args))
|
logger_method(f'task {uuid} starting {task}(*{args}){log_extra}')
|
||||||
|
|
||||||
return _call(*args, **kwargs)
|
return _call(*args, **kwargs)
|
||||||
|
|
||||||
def perform_work(self, body):
|
def perform_work(self, body):
|
||||||
|
|||||||
@@ -232,7 +232,6 @@ class ImplicitRoleField(models.ForeignKey):
|
|||||||
field_names = [field_names]
|
field_names = [field_names]
|
||||||
|
|
||||||
for field_name in field_names:
|
for field_name in field_names:
|
||||||
|
|
||||||
if field_name.startswith('singleton:'):
|
if field_name.startswith('singleton:'):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -244,7 +243,6 @@ class ImplicitRoleField(models.ForeignKey):
|
|||||||
field = getattr(cls, field_name, None)
|
field = getattr(cls, field_name, None)
|
||||||
|
|
||||||
if field and type(field) is ReverseManyToOneDescriptor or type(field) is ManyToManyDescriptor:
|
if field and type(field) is ReverseManyToOneDescriptor or type(field) is ManyToManyDescriptor:
|
||||||
|
|
||||||
if '.' in field_attr:
|
if '.' in field_attr:
|
||||||
raise Exception('Referencing deep roles through ManyToMany fields is unsupported.')
|
raise Exception('Referencing deep roles through ManyToMany fields is unsupported.')
|
||||||
|
|
||||||
@@ -629,7 +627,6 @@ class CredentialInputField(JSONSchemaField):
|
|||||||
# `ssh_key_unlock` requirements are very specific and can't be
|
# `ssh_key_unlock` requirements are very specific and can't be
|
||||||
# represented without complicated JSON schema
|
# represented without complicated JSON schema
|
||||||
if model_instance.credential_type.managed is True and 'ssh_key_unlock' in defined_fields:
|
if model_instance.credential_type.managed is True and 'ssh_key_unlock' in defined_fields:
|
||||||
|
|
||||||
# in order to properly test the necessity of `ssh_key_unlock`, we
|
# in order to properly test the necessity of `ssh_key_unlock`, we
|
||||||
# need to know the real value of `ssh_key_data`; for a payload like:
|
# need to know the real value of `ssh_key_data`; for a payload like:
|
||||||
# {
|
# {
|
||||||
@@ -791,7 +788,8 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
|||||||
'type': 'object',
|
'type': 'object',
|
||||||
'patternProperties': {
|
'patternProperties': {
|
||||||
# http://docs.ansible.com/ansible/playbooks_variables.html#what-makes-a-valid-variable-name
|
# http://docs.ansible.com/ansible/playbooks_variables.html#what-makes-a-valid-variable-name
|
||||||
'^[a-zA-Z_]+[a-zA-Z0-9_]*$': {'type': 'string'},
|
# plus, add ability to template
|
||||||
|
r'^[a-zA-Z_\{\}]+[a-zA-Z0-9_\{\}]*$': {"anyOf": [{'type': 'string'}, {'type': 'array'}, {'$ref': '#/properties/extra_vars'}]}
|
||||||
},
|
},
|
||||||
'additionalProperties': False,
|
'additionalProperties': False,
|
||||||
},
|
},
|
||||||
@@ -858,27 +856,44 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
|||||||
template_name = template_name.split('.')[1]
|
template_name = template_name.split('.')[1]
|
||||||
setattr(valid_namespace['tower'].filename, template_name, 'EXAMPLE_FILENAME')
|
setattr(valid_namespace['tower'].filename, template_name, 'EXAMPLE_FILENAME')
|
||||||
|
|
||||||
|
def validate_template_string(type_, key, tmpl):
|
||||||
|
try:
|
||||||
|
sandbox.ImmutableSandboxedEnvironment(undefined=StrictUndefined).from_string(tmpl).render(valid_namespace)
|
||||||
|
except UndefinedError as e:
|
||||||
|
raise django_exceptions.ValidationError(
|
||||||
|
_('{sub_key} uses an undefined field ({error_msg})').format(sub_key=key, error_msg=e),
|
||||||
|
code='invalid',
|
||||||
|
params={'value': value},
|
||||||
|
)
|
||||||
|
except SecurityError as e:
|
||||||
|
raise django_exceptions.ValidationError(_('Encountered unsafe code execution: {}').format(e))
|
||||||
|
except TemplateSyntaxError as e:
|
||||||
|
raise django_exceptions.ValidationError(
|
||||||
|
_('Syntax error rendering template for {sub_key} inside of {type} ({error_msg})').format(sub_key=key, type=type_, error_msg=e),
|
||||||
|
code='invalid',
|
||||||
|
params={'value': value},
|
||||||
|
)
|
||||||
|
|
||||||
|
def validate_extra_vars(key, node):
|
||||||
|
if isinstance(node, dict):
|
||||||
|
for k, v in node.items():
|
||||||
|
validate_template_string("extra_vars", 'a key' if key is None else key, k)
|
||||||
|
validate_extra_vars(k if key is None else "{key}.{k}".format(key=key, k=k), v)
|
||||||
|
elif isinstance(node, list):
|
||||||
|
for i, x in enumerate(node):
|
||||||
|
validate_extra_vars("{key}[{i}]".format(key=key, i=i), x)
|
||||||
|
else:
|
||||||
|
validate_template_string("extra_vars", key, node)
|
||||||
|
|
||||||
for type_, injector in value.items():
|
for type_, injector in value.items():
|
||||||
if type_ == 'env':
|
if type_ == 'env':
|
||||||
for key in injector.keys():
|
for key in injector.keys():
|
||||||
self.validate_env_var_allowed(key)
|
self.validate_env_var_allowed(key)
|
||||||
for key, tmpl in injector.items():
|
if type_ == 'extra_vars':
|
||||||
try:
|
validate_extra_vars(None, injector)
|
||||||
sandbox.ImmutableSandboxedEnvironment(undefined=StrictUndefined).from_string(tmpl).render(valid_namespace)
|
else:
|
||||||
except UndefinedError as e:
|
for key, tmpl in injector.items():
|
||||||
raise django_exceptions.ValidationError(
|
validate_template_string(type_, key, tmpl)
|
||||||
_('{sub_key} uses an undefined field ({error_msg})').format(sub_key=key, error_msg=e),
|
|
||||||
code='invalid',
|
|
||||||
params={'value': value},
|
|
||||||
)
|
|
||||||
except SecurityError as e:
|
|
||||||
raise django_exceptions.ValidationError(_('Encountered unsafe code execution: {}').format(e))
|
|
||||||
except TemplateSyntaxError as e:
|
|
||||||
raise django_exceptions.ValidationError(
|
|
||||||
_('Syntax error rendering template for {sub_key} inside of {type} ({error_msg})').format(sub_key=key, type=type_, error_msg=e),
|
|
||||||
code='invalid',
|
|
||||||
params={'value': value},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AskForField(models.BooleanField):
|
class AskForField(models.BooleanField):
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ class Command(BaseCommand):
|
|||||||
with connection.cursor() as cursor:
|
with connection.cursor() as cursor:
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
f'''
|
f'''
|
||||||
SELECT
|
SELECT
|
||||||
b.id, b.job_id, b.host_name, b.created - a.created delta,
|
b.id, b.job_id, b.host_name, b.created - a.created delta,
|
||||||
b.task task,
|
b.task task,
|
||||||
b.event_data::json->'task_action' task_action,
|
b.event_data::json->'task_action' task_action,
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ class Command(BaseCommand):
|
|||||||
"""Checks connection to the database, and prints out connection info if not connected"""
|
"""Checks connection to the database, and prints out connection info if not connected"""
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
|
|
||||||
with connection.cursor() as cursor:
|
with connection.cursor() as cursor:
|
||||||
cursor.execute("SELECT version()")
|
cursor.execute("SELECT version()")
|
||||||
version = str(cursor.fetchone()[0])
|
version = str(cursor.fetchone()[0])
|
||||||
|
|||||||
@@ -82,7 +82,6 @@ class DeleteMeta:
|
|||||||
part_drop = {}
|
part_drop = {}
|
||||||
|
|
||||||
for pk, status, created in self.jobs_qs:
|
for pk, status, created in self.jobs_qs:
|
||||||
|
|
||||||
part_key = partition_table_name(self.job_class, created)
|
part_key = partition_table_name(self.job_class, created)
|
||||||
if status in ['pending', 'waiting', 'running']:
|
if status in ['pending', 'waiting', 'running']:
|
||||||
part_drop[part_key] = False
|
part_drop[part_key] = False
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
if not options['user']:
|
if not options['user']:
|
||||||
|
|
||||||
raise CommandError('Username not supplied. Usage: awx-manage create_oauth2_token --user=username.')
|
raise CommandError('Username not supplied. Usage: awx-manage create_oauth2_token --user=username.')
|
||||||
try:
|
try:
|
||||||
user = User.objects.get(username=options['user'])
|
user = User.objects.get(username=options['user'])
|
||||||
|
|||||||
35
awx/main/management/commands/enable_local_authentication.py
Normal file
35
awx/main/management/commands/enable_local_authentication.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""enable or disable authentication system"""
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
"""
|
||||||
|
This adds the --enable --disable functionalities to the command using mutally_exclusive to avoid situations in which users pass both flags
|
||||||
|
"""
|
||||||
|
group = parser.add_mutually_exclusive_group()
|
||||||
|
group.add_argument('--enable', dest='enable', action='store_true', help='Pass --enable to enable local authentication')
|
||||||
|
group.add_argument('--disable', dest='disable', action='store_true', help='Pass --disable to disable local authentication')
|
||||||
|
|
||||||
|
def _enable_disable_auth(self, enable, disable):
|
||||||
|
"""
|
||||||
|
this method allows the disabling or enabling of local authenication based on the argument passed into the parser
|
||||||
|
if no arguments throw a command error, if --enable set the DISABLE_LOCAL_AUTH to False
|
||||||
|
if --disable it's set to True. Realizing that the flag is counterintuitive to what is expected.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if enable:
|
||||||
|
settings.DISABLE_LOCAL_AUTH = False
|
||||||
|
print("Setting has changed to {} allowing local authentication".format(settings.DISABLE_LOCAL_AUTH))
|
||||||
|
|
||||||
|
elif disable:
|
||||||
|
settings.DISABLE_LOCAL_AUTH = True
|
||||||
|
print("Setting has changed to {} disallowing local authentication".format(settings.DISABLE_LOCAL_AUTH))
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise CommandError('Please pass --enable flag to allow local auth or --disable flag to disable local auth')
|
||||||
|
|
||||||
|
def handle(self, **options):
|
||||||
|
self._enable_disable_auth(options.get('enable'), options.get('disable'))
|
||||||
@@ -10,7 +10,6 @@ from django.utils.text import slugify
|
|||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
|
|
||||||
help = 'Export custom inventory scripts into a tarfile.'
|
help = 'Export custom inventory scripts into a tarfile.'
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
@@ -21,7 +20,6 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
with tempfile.TemporaryDirectory() as tmpdirname:
|
with tempfile.TemporaryDirectory() as tmpdirname:
|
||||||
with tarfile.open(tar_filename, "w") as tar:
|
with tarfile.open(tar_filename, "w") as tar:
|
||||||
|
|
||||||
for cis in CustomInventoryScript.objects.all():
|
for cis in CustomInventoryScript.objects.all():
|
||||||
# naming convention similar to project paths
|
# naming convention similar to project paths
|
||||||
slug_name = slugify(str(cis.name)).replace(u'-', u'_')
|
slug_name = slugify(str(cis.name)).replace(u'-', u'_')
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user