mirror of
https://github.com/ansible/awx.git
synced 2026-02-09 21:54:43 -03:30
Compare commits
834 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
05af2972bf | ||
|
|
60458bebfd | ||
|
|
46ea031566 | ||
|
|
0d7bbb4389 | ||
|
|
1dda373aaf | ||
|
|
33c1968210 | ||
|
|
049a158638 | ||
|
|
32f7295f44 | ||
|
|
6772fb876b | ||
|
|
51112b95bc | ||
|
|
6c1d4a5cfd | ||
|
|
2e9106d8ea | ||
|
|
84822784e8 | ||
|
|
0f3adb52b1 | ||
|
|
59da9a29df | ||
|
|
a949ee048a | ||
|
|
b959bc278f | ||
|
|
052644eb9d | ||
|
|
4e18827909 | ||
|
|
59ce8c4148 | ||
|
|
3b9c04bf1e | ||
|
|
f28203913f | ||
|
|
9b2725e5fe | ||
|
|
1af955d28c | ||
|
|
0815f935ca | ||
|
|
6997876da6 | ||
|
|
93d84fe2c9 | ||
|
|
f5785976be | ||
|
|
61c7d4e4ca | ||
|
|
a2f528e6e5 | ||
|
|
058ae132cf | ||
|
|
6483575437 | ||
|
|
a15a23c1d3 | ||
|
|
ffdcb9f4dd | ||
|
|
2d9da11443 | ||
|
|
5ce6c14f74 | ||
|
|
61748c072d | ||
|
|
89dae3865d | ||
|
|
808ab9803e | ||
|
|
d64b6d4dfe | ||
|
|
c9d931ceee | ||
|
|
8fb831d3de | ||
|
|
64865af3bb | ||
|
|
9f63c99bee | ||
|
|
d7025a919c | ||
|
|
dab7d91cff | ||
|
|
61821faa00 | ||
|
|
c26d211ee0 | ||
|
|
6a79d19668 | ||
|
|
47176cb31b | ||
|
|
5163795cc0 | ||
|
|
b0a4173545 | ||
|
|
ea9c52aca6 | ||
|
|
a7ebce1fef | ||
|
|
5de9cf748d | ||
|
|
ebea78943d | ||
|
|
bb387f939b | ||
|
|
bda806fd03 | ||
|
|
9777ce7fb8 | ||
|
|
1e33bc4020 | ||
|
|
d8e7c59fe8 | ||
|
|
4470b80059 | ||
|
|
e9ad01e806 | ||
|
|
8a4059d266 | ||
|
|
01a7076267 | ||
|
|
32b6aec66b | ||
|
|
884ab424d5 | ||
|
|
7e55305c45 | ||
|
|
7f6f57bfee | ||
|
|
ae92f8292f | ||
|
|
51e244e183 | ||
|
|
ad4e257fdb | ||
|
|
fcf56950b3 | ||
|
|
27ea239c00 | ||
|
|
128a130b84 | ||
|
|
d75f12c001 | ||
|
|
2034eac620 | ||
|
|
e9a1582b70 | ||
|
|
51ef1e808d | ||
|
|
11fbfc2063 | ||
|
|
f6395c69dd | ||
|
|
ca07bc85cb | ||
|
|
b87dd6dc56 | ||
|
|
f8d46d5e71 | ||
|
|
ce0a456ecc | ||
|
|
5775ff1422 | ||
|
|
82e8bcd2bb | ||
|
|
d73cc501d5 | ||
|
|
7e40a4daed | ||
|
|
47e824dd11 | ||
|
|
4643b816fe | ||
|
|
79d9329cfa | ||
|
|
6492c03965 | ||
|
|
98107301a5 | ||
|
|
4810099158 | ||
|
|
1aca9929ab | ||
|
|
2aa58bc17d | ||
|
|
be4b826259 | ||
|
|
b99a434dee | ||
|
|
6cee99a9f9 | ||
|
|
ee509aea56 | ||
|
|
b5452a48f8 | ||
|
|
68e555824d | ||
|
|
0c980fa7d5 | ||
|
|
e34ce8c795 | ||
|
|
58bad6cfa9 | ||
|
|
3543644e0e | ||
|
|
36c0d07b30 | ||
|
|
03b0281fde | ||
|
|
6f6f04a071 | ||
|
|
239827a9cf | ||
|
|
ac9871b36f | ||
|
|
f739908ccf | ||
|
|
cf1ec07eab | ||
|
|
d968b648de | ||
|
|
5dd0eab806 | ||
|
|
41f3f381ec | ||
|
|
ac8cff75ce | ||
|
|
94b34b801c | ||
|
|
8f6849fc22 | ||
|
|
821b1701bf | ||
|
|
b7f2825909 | ||
|
|
e87e041a2a | ||
|
|
cc336e791c | ||
|
|
c2a3c3b285 | ||
|
|
7b8dcc98e7 | ||
|
|
d5011492bf | ||
|
|
e363ddf470 | ||
|
|
987709cdb3 | ||
|
|
f04ac3c798 | ||
|
|
71a6baccdb | ||
|
|
d07076b686 | ||
|
|
7129f3e8cd | ||
|
|
df61a5cea1 | ||
|
|
a4b950f79b | ||
|
|
1d87e6e04c | ||
|
|
8be739d255 | ||
|
|
ca54195099 | ||
|
|
f0fcfdde39 | ||
|
|
80b1ba4a35 | ||
|
|
51f8e362dc | ||
|
|
737d6d8c8b | ||
|
|
beaf6b6058 | ||
|
|
aad1fbcef8 | ||
|
|
0b96d617ac | ||
|
|
fe768a159b | ||
|
|
c1ebea858b | ||
|
|
da9b8135e8 | ||
|
|
76cecf3f6b | ||
|
|
7b2938f515 | ||
|
|
916b5642d2 | ||
|
|
e524d3df3e | ||
|
|
01e9a611ea | ||
|
|
ef29589940 | ||
|
|
cec2d2dfb9 | ||
|
|
15b7ad3570 | ||
|
|
36ff9cbc6d | ||
|
|
ed74d80ecb | ||
|
|
a0b8215c06 | ||
|
|
f88b993b18 | ||
|
|
4a7f4d0ed4 | ||
|
|
6e08c3567f | ||
|
|
adbcb5c5e4 | ||
|
|
8054c6aedc | ||
|
|
58734a33c4 | ||
|
|
2832f28014 | ||
|
|
e5057691ee | ||
|
|
a0cfd8501c | ||
|
|
99b643bd77 | ||
|
|
305b39d8e5 | ||
|
|
bb047baeba | ||
|
|
9637aad37e | ||
|
|
fbc06ec623 | ||
|
|
57430afc55 | ||
|
|
7aae7e8ed4 | ||
|
|
a67d107a58 | ||
|
|
642003e207 | ||
|
|
ec7e2284df | ||
|
|
ff7facdfa2 | ||
|
|
6df4e62132 | ||
|
|
6289bfb639 | ||
|
|
95e4b2064f | ||
|
|
48eba60be4 | ||
|
|
c7efa8b4e0 | ||
|
|
657b5cb1aa | ||
|
|
06daebbecf | ||
|
|
fb37f22bf4 | ||
|
|
71f326b705 | ||
|
|
6508ab4a33 | ||
|
|
bf871bd427 | ||
|
|
e403c603d6 | ||
|
|
4b7b3c7c7d | ||
|
|
1cdd2cad67 | ||
|
|
86856f242a | ||
|
|
65c3db8cb8 | ||
|
|
7fa9dcbc2a | ||
|
|
7cfb957de3 | ||
|
|
d0d467e863 | ||
|
|
eaccf32aa3 | ||
|
|
a8fdb22ab3 | ||
|
|
ae79f94a48 | ||
|
|
40499a4084 | ||
|
|
b36fa93005 | ||
|
|
8839b4e90b | ||
|
|
7866135d6c | ||
|
|
fe48dc412f | ||
|
|
3a25c4221f | ||
|
|
7e1be3ef94 | ||
|
|
b2f8ca09ba | ||
|
|
c7692f5c56 | ||
|
|
3b24afa7f2 | ||
|
|
2b3f3e2043 | ||
|
|
68614b83c0 | ||
|
|
a1edc75c11 | ||
|
|
4b0e7a5cde | ||
|
|
01c6ac1b14 | ||
|
|
f0481d0a60 | ||
|
|
fd2a8b8531 | ||
|
|
239959a4c9 | ||
|
|
84f2b91105 | ||
|
|
9d7b249b20 | ||
|
|
5bd15dd48d | ||
|
|
d03348c6e4 | ||
|
|
5faeff6bec | ||
|
|
b94a126c02 | ||
|
|
eedd146643 | ||
|
|
d30c5ca9cd | ||
|
|
a3b21b261c | ||
|
|
d1d60c9ef1 | ||
|
|
925e055bb3 | ||
|
|
9f40d7a05c | ||
|
|
d34f6af830 | ||
|
|
163ccfd410 | ||
|
|
968c316c0c | ||
|
|
2fdce43f9e | ||
|
|
fa305a7bfa | ||
|
|
0933a96d60 | ||
|
|
8b9db837ca | ||
|
|
1106367962 | ||
|
|
721e19e1c8 | ||
|
|
f9bb26ad33 | ||
|
|
87363af615 | ||
|
|
332c433b6e | ||
|
|
e029cf7196 | ||
|
|
a1d34462b0 | ||
|
|
e4283841d6 | ||
|
|
477a63d1b4 | ||
|
|
4a30cc244f | ||
|
|
271613b86d | ||
|
|
1f939aa25e | ||
|
|
ac57f5cb28 | ||
|
|
86b0a3d4f1 | ||
|
|
b269ed48ee | ||
|
|
fe1b37afaf | ||
|
|
c39172f516 | ||
|
|
87dd8c118d | ||
|
|
d6004fd2d3 | ||
|
|
3d3e4ad150 | ||
|
|
81821fd378 | ||
|
|
9b047c2af6 | ||
|
|
f0d6bc0dc8 | ||
|
|
8e5af2b5f2 | ||
|
|
918db89dc8 | ||
|
|
7590301ae7 | ||
|
|
6e25a552d3 | ||
|
|
0db75fdbfd | ||
|
|
83c48bb5fa | ||
|
|
1c65339a24 | ||
|
|
75e6366c5e | ||
|
|
af6fec5592 | ||
|
|
893dba7076 | ||
|
|
d571b9bbbc | ||
|
|
b28cc34ff3 | ||
|
|
776d39f057 | ||
|
|
61b242d194 | ||
|
|
22b81f5dd3 | ||
|
|
99e1920d42 | ||
|
|
2218fd5c25 | ||
|
|
3c656842f0 | ||
|
|
bd7635e74e | ||
|
|
0faa999ceb | ||
|
|
1bedf32baf | ||
|
|
577f102e53 | ||
|
|
c5cf39abb7 | ||
|
|
6b315f39de | ||
|
|
529a936d0a | ||
|
|
6538d34b48 | ||
|
|
e40824bded | ||
|
|
ed318ea784 | ||
|
|
d2b69e05f6 | ||
|
|
b57ae592ed | ||
|
|
e22f887765 | ||
|
|
fc838ba44b | ||
|
|
b19aa4a88d | ||
|
|
eba24db74c | ||
|
|
153a197fad | ||
|
|
8f4c329c2a | ||
|
|
368eb46f5b | ||
|
|
d6fea77082 | ||
|
|
878035c13b | ||
|
|
2cc971a43f | ||
|
|
9d77c54612 | ||
|
|
ef651a3a21 | ||
|
|
aaf6f5f17e | ||
|
|
3303f7bfcf | ||
|
|
95dba81a9d | ||
|
|
4b308d313a | ||
|
|
d80db763bc | ||
|
|
41fd6ea37f | ||
|
|
4808a0053f | ||
|
|
de41601f27 | ||
|
|
ddd09461fb | ||
|
|
6d192927ae | ||
|
|
487efb77ce | ||
|
|
e655e1dbc2 | ||
|
|
e41f20320a | ||
|
|
192f45bbd0 | ||
|
|
e013d25e2d | ||
|
|
8a6ad47ca5 | ||
|
|
cba780a8f8 | ||
|
|
3fc67dc76c | ||
|
|
6f85aef5fe | ||
|
|
4d9b8400da | ||
|
|
eeb9d61488 | ||
|
|
234ce529fc | ||
|
|
4f36943b47 | ||
|
|
25737ba7c6 | ||
|
|
7127d18072 | ||
|
|
e5c834383c | ||
|
|
b9c9800210 | ||
|
|
c94dc08cf3 | ||
|
|
a0594c8948 | ||
|
|
ab5ea46006 | ||
|
|
6b471e468c | ||
|
|
50614b961e | ||
|
|
a2be320605 | ||
|
|
8a959e9586 | ||
|
|
1db189c7ee | ||
|
|
39c2fcd8c2 | ||
|
|
da857ea334 | ||
|
|
d50c97ae22 | ||
|
|
0f150aa3b3 | ||
|
|
cdb51a75b8 | ||
|
|
22b6ae6903 | ||
|
|
871175f97f | ||
|
|
e6497be200 | ||
|
|
3b9333be9f | ||
|
|
04b814cfd8 | ||
|
|
bb2e5cba0a | ||
|
|
42a4e9f10f | ||
|
|
882d2fdbe8 | ||
|
|
0d69d40859 | ||
|
|
2e38bbcbcd | ||
|
|
6f741b909a | ||
|
|
bbb00e0674 | ||
|
|
560b952dd6 | ||
|
|
62c773e912 | ||
|
|
fd38c926b2 | ||
|
|
7a8874b947 | ||
|
|
150c55c72a | ||
|
|
417ac3b88c | ||
|
|
9e0d1a678c | ||
|
|
1a766c09e7 | ||
|
|
7849c0fb1e | ||
|
|
35a7e43f22 | ||
|
|
47a6a73fc5 | ||
|
|
805091cfc1 | ||
|
|
8d05e339ae | ||
|
|
8472e3a26d | ||
|
|
174121cdbe | ||
|
|
385a2eabce | ||
|
|
a64467c5a6 | ||
|
|
58772d79c7 | ||
|
|
235ed2f0d0 | ||
|
|
03eaeac459 | ||
|
|
63fd18edcb | ||
|
|
208254ab81 | ||
|
|
aae57378f0 | ||
|
|
a4fba37222 | ||
|
|
3a09522d3e | ||
|
|
b5db710c8b | ||
|
|
534763727f | ||
|
|
b964905c80 | ||
|
|
37717ce3d5 | ||
|
|
8333b0cf66 | ||
|
|
d1588b94b0 | ||
|
|
2dcc7ec749 | ||
|
|
2d756959d3 | ||
|
|
e6518a1d1c | ||
|
|
84d00722b9 | ||
|
|
a95a76ec56 | ||
|
|
420b3c8b84 | ||
|
|
5ba0bf3a64 | ||
|
|
7031753a6d | ||
|
|
6415671d93 | ||
|
|
e5fd42c4da | ||
|
|
0f675cd375 | ||
|
|
a85268f74a | ||
|
|
0983bd8dc0 | ||
|
|
87c65c9997 | ||
|
|
1b46805373 | ||
|
|
d48e31b928 | ||
|
|
ea51e137eb | ||
|
|
d9f5193a18 | ||
|
|
710b02a443 | ||
|
|
e7c75f3510 | ||
|
|
cfce31419d | ||
|
|
5b5aac675b | ||
|
|
6b0618b244 | ||
|
|
ceea0a0a39 | ||
|
|
6b86c450b1 | ||
|
|
8e83f9b134 | ||
|
|
d3eb2c1975 | ||
|
|
1a696c4f25 | ||
|
|
34501fee24 | ||
|
|
5aa55d7347 | ||
|
|
65179d9cd0 | ||
|
|
42109fb45a | ||
|
|
ca46aec483 | ||
|
|
2e9956c9fc | ||
|
|
5648d9d96f | ||
|
|
2b2ddb68cf | ||
|
|
12e8608f98 | ||
|
|
eaad749cc9 | ||
|
|
4ffa577d05 | ||
|
|
7143777638 | ||
|
|
cc6eaa7f44 | ||
|
|
5551874352 | ||
|
|
84fa19f2ad | ||
|
|
c101619d08 | ||
|
|
cdd2282282 | ||
|
|
6e57bc47aa | ||
|
|
a1a4f26f19 | ||
|
|
fb4a7373a1 | ||
|
|
9c2185c68f | ||
|
|
a66b27edff | ||
|
|
80a0842df1 | ||
|
|
2dcb127d4e | ||
|
|
790998335c | ||
|
|
2dd2931ab2 | ||
|
|
e83a4d7234 | ||
|
|
88f0ab0233 | ||
|
|
3ad7913353 | ||
|
|
795569227a | ||
|
|
93f50b5211 | ||
|
|
c53228daf5 | ||
|
|
5b7a359c91 | ||
|
|
01b41afa0f | ||
|
|
bf8ba63860 | ||
|
|
ba26909dc5 | ||
|
|
7d645c8ff6 | ||
|
|
b879cbc2ec | ||
|
|
af8b5243a3 | ||
|
|
4bf612851f | ||
|
|
ada0d45654 | ||
|
|
c153ac9d3b | ||
|
|
78cc9fb019 | ||
|
|
301807466d | ||
|
|
e0c9013d9c | ||
|
|
9c6aa93093 | ||
|
|
4a41098b24 | ||
|
|
0510978516 | ||
|
|
6009d98163 | ||
|
|
532ad777a3 | ||
|
|
b4edfc24ac | ||
|
|
0e578534fa | ||
|
|
6619cc39f7 | ||
|
|
d4b25058cd | ||
|
|
c1ba769b20 | ||
|
|
fd10d83893 | ||
|
|
b1168ce77d | ||
|
|
1fde9c4f0c | ||
|
|
03685e51b5 | ||
|
|
08c18d71bf | ||
|
|
dfe6ce1ba8 | ||
|
|
eaa4f2483f | ||
|
|
68a44529b6 | ||
|
|
25afb8477e | ||
|
|
f3a9d4db07 | ||
|
|
cb49eec2b5 | ||
|
|
3333080616 | ||
|
|
e2b9352dad | ||
|
|
da945eed93 | ||
|
|
ebd200380a | ||
|
|
1b650d6927 | ||
|
|
b6946c7e35 | ||
|
|
0b1891d82a | ||
|
|
3bc86ca8cb | ||
|
|
dba03616f4 | ||
|
|
a59aa44249 | ||
|
|
3b024a057f | ||
|
|
e1c33935fb | ||
|
|
8ebeeaf148 | ||
|
|
28f24c8811 | ||
|
|
89a6162dcd | ||
|
|
7e627e1d1e | ||
|
|
0465a10df5 | ||
|
|
5051224781 | ||
|
|
7956fc3c31 | ||
|
|
9b034ad574 | ||
|
|
4bf9925cf7 | ||
|
|
d2c63a9b36 | ||
|
|
5d3a19e542 | ||
|
|
e4518f7b13 | ||
|
|
350efc12f5 | ||
|
|
604fac2295 | ||
|
|
24bfacb654 | ||
|
|
3bcd539b3d | ||
|
|
81e68cb9bf | ||
|
|
a575f17db5 | ||
|
|
2fba3db48f | ||
|
|
ff6fb32297 | ||
|
|
4c64fb3323 | ||
|
|
1cfbc02d98 | ||
|
|
e231e08869 | ||
|
|
e069150fbf | ||
|
|
61093b2532 | ||
|
|
23f4f7bb00 | ||
|
|
816e491d17 | ||
|
|
dca27b59c9 | ||
|
|
7de5f77262 | ||
|
|
86e7151508 | ||
|
|
75597cf29c | ||
|
|
d07177be9c | ||
|
|
b38e08174a | ||
|
|
b501b30db4 | ||
|
|
64dad61b29 | ||
|
|
2369dc9621 | ||
|
|
ef90adb67e | ||
|
|
a528a78e0e | ||
|
|
ffe970aee5 | ||
|
|
4579ab0d60 | ||
|
|
efeeeefd4c | ||
|
|
c1b20a8ba7 | ||
|
|
2a30a9b10f | ||
|
|
34e8087aee | ||
|
|
ead56bfa1b | ||
|
|
d63c940e2f | ||
|
|
e05eaeccab | ||
|
|
e076f1ee2a | ||
|
|
68e11d2b81 | ||
|
|
697193d3d6 | ||
|
|
4f5596eb0c | ||
|
|
42a7866da9 | ||
|
|
809df74050 | ||
|
|
2e217ed466 | ||
|
|
d5d24e421b | ||
|
|
663ef2cc64 | ||
|
|
4e665ca77f | ||
|
|
33c0fb79d6 | ||
|
|
04d0e3915c | ||
|
|
8e2003a36b | ||
|
|
a27680f7e9 | ||
|
|
4f52343cd9 | ||
|
|
4072b2786a | ||
|
|
d0b95c063b | ||
|
|
948d300f43 | ||
|
|
1b9326888e | ||
|
|
d67aef9d8e | ||
|
|
358024d029 | ||
|
|
9df447fe75 | ||
|
|
7e7991bb63 | ||
|
|
35e9d00beb | ||
|
|
461b5221f3 | ||
|
|
10d06f219d | ||
|
|
ecc4f46334 | ||
|
|
a227fea5ef | ||
|
|
3f4d0bc15d | ||
|
|
0812425671 | ||
|
|
94344c0214 | ||
|
|
16da9b784a | ||
|
|
1e952bab95 | ||
|
|
484db004db | ||
|
|
7465d7685f | ||
|
|
15fd5559a7 | ||
|
|
f0c125efb3 | ||
|
|
2d39b81e12 | ||
|
|
1044d34d98 | ||
|
|
63567fcc52 | ||
|
|
492ef6cf64 | ||
|
|
9041dc9dcd | ||
|
|
78973f845b | ||
|
|
cea8c16064 | ||
|
|
e7c97923a3 | ||
|
|
078c3ae6d8 | ||
|
|
1ab3dba476 | ||
|
|
15964dc395 | ||
|
|
b83b65da16 | ||
|
|
430f1986c7 | ||
|
|
c589f8776c | ||
|
|
82679ce9a3 | ||
|
|
6d2e28bfb0 | ||
|
|
7a4da5a8fa | ||
|
|
c475a7b6c0 | ||
|
|
32bb603554 | ||
|
|
8d71292d1a | ||
|
|
e896dc1aa7 | ||
|
|
f5a2246817 | ||
|
|
c467b6ea13 | ||
|
|
1636f6b196 | ||
|
|
5da528ffbb | ||
|
|
2e65ae49a5 | ||
|
|
d06bc815f8 | ||
|
|
0290784f9b | ||
|
|
1cc52afc42 | ||
|
|
88f7f987cd | ||
|
|
f512971991 | ||
|
|
53de245877 | ||
|
|
749622427c | ||
|
|
725d6fa896 | ||
|
|
a107bb684c | ||
|
|
ccbc8ce7de | ||
|
|
260e1d4f2d | ||
|
|
1afa49f3ff | ||
|
|
6f88ea1dc7 | ||
|
|
c59bbdecdb | ||
|
|
f9428c10b9 | ||
|
|
1ca054f43d | ||
|
|
374f76b527 | ||
|
|
f9dd5e0f1c | ||
|
|
bb7509498e | ||
|
|
8a06ffbe15 | ||
|
|
8ad948f268 | ||
|
|
73f808dee7 | ||
|
|
fecab52f86 | ||
|
|
609c67d85e | ||
|
|
0005d249c0 | ||
|
|
8828ea706e | ||
|
|
4070ef3f33 | ||
|
|
39f6e2fa32 | ||
|
|
1dfdff4a9e | ||
|
|
310e354164 | ||
|
|
dda2931e60 | ||
|
|
6d207d2490 | ||
|
|
01037fa561 | ||
|
|
61f3e5cbed | ||
|
|
44995e944a | ||
|
|
4a92fcfc62 | ||
|
|
d3f15f5784 | ||
|
|
2437a84b48 | ||
|
|
696f099940 | ||
|
|
3f0f538c40 | ||
|
|
66529d0f70 | ||
|
|
974f845059 | ||
|
|
f6b3413a11 | ||
|
|
b4ef687b60 | ||
|
|
2ef531b2dc | ||
|
|
125801ec5b | ||
|
|
691d9d7dc4 | ||
|
|
5ca898541f | ||
|
|
24821ff030 | ||
|
|
99815f8962 | ||
|
|
d752e6ce6d | ||
|
|
457dd890cb | ||
|
|
4fbf5e9e2f | ||
|
|
687b4ac71d | ||
|
|
a1b364f80c | ||
|
|
271938c5fc | ||
|
|
ff49cc5636 | ||
|
|
9946e644c8 | ||
|
|
1ed7a50755 | ||
|
|
9f3396d867 | ||
|
|
bcd018707a | ||
|
|
a462978433 | ||
|
|
6d11003975 | ||
|
|
017e474325 | ||
|
|
5d717af778 | ||
|
|
8d08ac559d | ||
|
|
4e24867a0b | ||
|
|
2b4b8839d1 | ||
|
|
dba33f9ef5 | ||
|
|
db2649d7ba | ||
|
|
edc3da85cc | ||
|
|
2357e24d1d | ||
|
|
e4d1056450 | ||
|
|
37d9c9eb1b | ||
|
|
d42a85714a | ||
|
|
88bf03c6bf | ||
|
|
4b8a56be39 | ||
|
|
2aa99234f4 | ||
|
|
bf9f1b1d56 | ||
|
|
704e4781d9 | ||
|
|
4a8613ce4c | ||
|
|
e87fabe6bb | ||
|
|
532aa83555 | ||
|
|
d87bb973d5 | ||
|
|
a72da3bd1a | ||
|
|
56df3f0c2a | ||
|
|
e0c59d12c1 | ||
|
|
7645cc2707 | ||
|
|
6719010050 | ||
|
|
ccd46a1c0f | ||
|
|
cc1e349ea8 | ||
|
|
e509d5f1de | ||
|
|
4fca27c664 | ||
|
|
51be22aebd | ||
|
|
54b21e5872 | ||
|
|
85beb9eb70 | ||
|
|
56739ac246 | ||
|
|
1ea3c564df | ||
|
|
621833ef0e | ||
|
|
16be38bb54 | ||
|
|
c5976e2584 | ||
|
|
3c51cb130f | ||
|
|
c649809eb2 | ||
|
|
43a53f41dd | ||
|
|
a3fef27002 | ||
|
|
cfc1255812 | ||
|
|
278db2cdde | ||
|
|
64157f7207 | ||
|
|
9e8ba6ca09 | ||
|
|
268ab128d7 | ||
|
|
fad5934c1e | ||
|
|
c9e3873a28 | ||
|
|
6a19aabd44 | ||
|
|
11e63e2e89 | ||
|
|
7c885dcadb | ||
|
|
b84a192bad | ||
|
|
35afb10add | ||
|
|
13fc845bcc | ||
|
|
f1bd1f1dfc | ||
|
|
67c9e1a0cb | ||
|
|
f6da9a5073 | ||
|
|
38a0950f46 | ||
|
|
55d295c2a6 | ||
|
|
be45919ee4 | ||
|
|
0a4a9f96c2 | ||
|
|
1ae1da3f9c | ||
|
|
cae2c06190 | ||
|
|
993dd61024 | ||
|
|
ea07aef73e | ||
|
|
268a4ad32d | ||
|
|
3712af4df8 | ||
|
|
8cf75fce8c | ||
|
|
46be2d9e5b | ||
|
|
998000bfbe | ||
|
|
43a50cc62c | ||
|
|
30f556f845 | ||
|
|
c5985c4c81 | ||
|
|
a9170236e1 | ||
|
|
85a5b58d18 | ||
|
|
6fb3c8daa8 | ||
|
|
a0103acbef | ||
|
|
f7e6a32444 | ||
|
|
7bbc256ff1 | ||
|
|
64f62d6755 | ||
|
|
b4cfe868fb | ||
|
|
8d8681580d | ||
|
|
8892cf2622 | ||
|
|
585d3f4e2a | ||
|
|
2c9a0444e6 | ||
|
|
279cebcef3 | ||
|
|
e6f8852b05 | ||
|
|
d06a3f060d | ||
|
|
957b2b7188 | ||
|
|
b94b3a1e91 | ||
|
|
7776a81e22 | ||
|
|
bf89093fac | ||
|
|
76d76d13b0 | ||
|
|
e603c23b40 | ||
|
|
8af4dd5988 | ||
|
|
0a47d05d26 | ||
|
|
b3eb9e0193 | ||
|
|
b26d2ab0e9 | ||
|
|
7eb0c7dd28 | ||
|
|
236c1df676 | ||
|
|
ff118f2177 | ||
|
|
29d91da1d2 | ||
|
|
ad08eafb9a | ||
|
|
431b9370df | ||
|
|
3e93eefe62 | ||
|
|
782667a34e | ||
|
|
90524611ea | ||
|
|
583086ae62 | ||
|
|
19c24cba10 | ||
|
|
5290c692c1 | ||
|
|
90a19057d5 | ||
|
|
a05c328081 | ||
|
|
6d9e353a4e | ||
|
|
82c062eab9 | ||
|
|
c0d59801d5 | ||
|
|
93ea8a0919 | ||
|
|
67f1ab2237 | ||
|
|
71be8fadcb | ||
|
|
c41becec13 | ||
|
|
6d0d8e57a4 | ||
|
|
6446b627ad | ||
|
|
fcebd188a6 | ||
|
|
1fca505b61 | ||
|
|
a0e9c30b4a | ||
|
|
bc94dc0257 | ||
|
|
65771b7629 | ||
|
|
86a67abbce | ||
|
|
d555093325 | ||
|
|
95a099acc5 | ||
|
|
d1fc2702ec | ||
|
|
3aa8320fc7 | ||
|
|
734899228b | ||
|
|
87f729c642 | ||
|
|
62fc3994fb | ||
|
|
0d097964be | ||
|
|
9f8b3948e1 | ||
|
|
1ce8240192 | ||
|
|
1bcfc8f28e | ||
|
|
71925de902 | ||
|
|
54057f1c80 | ||
|
|
ae388d943d | ||
|
|
2d310dc4e5 | ||
|
|
fe1a767f4f | ||
|
|
8c6581d80a | ||
|
|
33e445f4f6 | ||
|
|
9bcb60d9e0 | ||
|
|
40109d58c7 | ||
|
|
2ef3f5f9e8 | ||
|
|
389c4a3180 | ||
|
|
bee48671cd | ||
|
|
21f551f48a | ||
|
|
cbb019ed09 | ||
|
|
bf5dfdaba7 | ||
|
|
0f7f8af9b8 | ||
|
|
0237402390 | ||
|
|
84d7fa882d | ||
|
|
cd2fae3471 | ||
|
|
8be64145f9 | ||
|
|
23d28fb4c8 | ||
|
|
aeffd6f393 | ||
|
|
ab6b4bad03 | ||
|
|
769c253ac2 | ||
|
|
8031b3d402 | ||
|
|
df38650aee | ||
|
|
a8e3c37bb9 | ||
|
|
1e57c84383 | ||
|
|
2b0846e8a2 | ||
|
|
29702400f1 |
@@ -1,3 +1,2 @@
|
|||||||
awx/ui/node_modules
|
|
||||||
Dockerfile
|
Dockerfile
|
||||||
.git
|
.git
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -25,7 +25,7 @@ Instead use the bug or feature request.
|
|||||||
<!--- Pick one below and delete the rest: -->
|
<!--- Pick one below and delete the rest: -->
|
||||||
- Breaking Change
|
- Breaking Change
|
||||||
- New or Enhanced Feature
|
- New or Enhanced Feature
|
||||||
- Bug or Docs Fix
|
- Bug, Docs Fix or other nominal change
|
||||||
|
|
||||||
|
|
||||||
##### COMPONENT NAME
|
##### COMPONENT NAME
|
||||||
|
|||||||
46
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
46
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -20,6 +20,19 @@ body:
|
|||||||
- label: I understand that AWX is open source software provided for free and that I might not receive a timely response.
|
- label: I understand that AWX is open source software provided for free and that I might not receive a timely response.
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
id: feature-type
|
||||||
|
attributes:
|
||||||
|
label: Feature type
|
||||||
|
description: >-
|
||||||
|
What kind of feature is this?
|
||||||
|
multiple: false
|
||||||
|
options:
|
||||||
|
- "New Feature"
|
||||||
|
- "Enhancement to Existing Feature"
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: summary
|
id: summary
|
||||||
attributes:
|
attributes:
|
||||||
@@ -40,3 +53,36 @@ body:
|
|||||||
- label: CLI
|
- label: CLI
|
||||||
- label: Other
|
- label: Other
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: steps-to-reproduce
|
||||||
|
attributes:
|
||||||
|
label: Steps to reproduce
|
||||||
|
description: >-
|
||||||
|
Describe the necessary steps to understand the scenario of the requested enhancement.
|
||||||
|
Include all the steps that will help the developer and QE team understand what you are requesting.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: current-results
|
||||||
|
attributes:
|
||||||
|
label: Current results
|
||||||
|
description: What is currently happening on the scenario?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: sugested-results
|
||||||
|
attributes:
|
||||||
|
label: Sugested feature result
|
||||||
|
description: What is the result this new feature will bring?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: additional-information
|
||||||
|
attributes:
|
||||||
|
label: Additional information
|
||||||
|
description: Please provide any other information you think is relevant that could help us understand your feature request.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -11,7 +11,7 @@ the change does.
|
|||||||
<!--- Pick one below and delete the rest: -->
|
<!--- Pick one below and delete the rest: -->
|
||||||
- Breaking Change
|
- Breaking Change
|
||||||
- New or Enhanced Feature
|
- New or Enhanced Feature
|
||||||
- Bug or Docs Fix
|
- Bug, Docs Fix or other nominal change
|
||||||
|
|
||||||
##### COMPONENT NAME
|
##### COMPONENT NAME
|
||||||
<!--- Name of the module/plugin/module/task -->
|
<!--- Name of the module/plugin/module/task -->
|
||||||
|
|||||||
42
.github/triage_replies.md
vendored
42
.github/triage_replies.md
vendored
@@ -1,5 +1,5 @@
|
|||||||
## General
|
## General
|
||||||
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
|
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
|
||||||
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
|
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
|
||||||
- You can find the latest documentation here: https://docs.ansible.com/automation-controller/latest/html/userguide/index.html
|
- You can find the latest documentation here: https://docs.ansible.com/automation-controller/latest/html/userguide/index.html
|
||||||
|
|
||||||
@@ -53,15 +53,25 @@ https://github.com/ansible/awx/#get-involved \
|
|||||||
Thank you once again for this and your interest in AWX!
|
Thank you once again for this and your interest in AWX!
|
||||||
|
|
||||||
|
|
||||||
|
### Red Hat Support Team
|
||||||
|
- Hi! \
|
||||||
|
\
|
||||||
|
It appears that you are using an RPM build for RHEL. Please reach out to the Red Hat support team and submit a ticket. \
|
||||||
|
\
|
||||||
|
Here is the link to do so: \
|
||||||
|
\
|
||||||
|
https://access.redhat.com/support \
|
||||||
|
\
|
||||||
|
Thank you for your submission and for supporting AWX!
|
||||||
|
|
||||||
|
|
||||||
## Common
|
## Common
|
||||||
|
|
||||||
### Give us more info
|
### Give us more info
|
||||||
- Hello, we'd love to help, but we need a little more information about the problem you're having. Screenshots, log outputs, or any reproducers would be very helpful.
|
- Hello, we'd love to help, but we need a little more information about the problem you're having. Screenshots, log outputs, or any reproducers would be very helpful.
|
||||||
|
|
||||||
### Code of Conduct
|
### Code of Conduct
|
||||||
- Hello. Please keep in mind that Ansible adheres to a Code of Conduct in its community spaces. The spirit of the code of conduct is to be kind, and this is your friendly reminder to be so. Please see the full code of conduct here if you have questions: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html
|
- Hello. Please keep in mind that Ansible adheres to a Code of Conduct in its community spaces. The spirit of the code of conduct is to be kind, and this is your friendly reminder to be so. Please see the full code of conduct here if you have questions: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html
|
||||||
|
|
||||||
### EE Contents / Community General
|
### EE Contents / Community General
|
||||||
- Hello. The awx-ee contains the collections and dependencies needed for supported AWX features to function. Anything beyond that (like the community.general package) will require you to build your own EE. For information on how to do that, see https://ansible-builder.readthedocs.io/en/stable/ \
|
- Hello. The awx-ee contains the collections and dependencies needed for supported AWX features to function. Anything beyond that (like the community.general package) will require you to build your own EE. For information on how to do that, see https://ansible-builder.readthedocs.io/en/stable/ \
|
||||||
@@ -79,31 +89,41 @@ The Ansible Community is looking at building an EE that corresponds to all of th
|
|||||||
- Hello, we think your idea is good! Please consider contributing a PR for this following our contributing guidelines: https://github.com/ansible/awx/blob/devel/CONTRIBUTING.md
|
- Hello, we think your idea is good! Please consider contributing a PR for this following our contributing guidelines: https://github.com/ansible/awx/blob/devel/CONTRIBUTING.md
|
||||||
|
|
||||||
### Receptor
|
### Receptor
|
||||||
- You can find the receptor docs here: https://receptor.readthedocs.io/en/latest/
|
- You can find the receptor docs here: https://receptor.readthedocs.io/en/latest/
|
||||||
- Hello, your issue seems related to receptor. Could you please open an issue in the receptor repository? https://github.com/ansible/receptor. Thanks!
|
- Hello, your issue seems related to receptor. Could you please open an issue in the receptor repository? https://github.com/ansible/receptor. Thanks!
|
||||||
|
|
||||||
### Ansible Engine not AWX
|
### Ansible Engine not AWX
|
||||||
- Hello, your question seems to be about Ansible development, not about AWX. Try asking on the Ansible-devel specific mailing list: https://groups.google.com/g/ansible-devel
|
- Hello, your question seems to be about Ansible development, not about AWX. Try asking on the Ansible-devel specific mailing list: https://groups.google.com/g/ansible-devel
|
||||||
- Hello, your question seems to be about using Ansible, not about AWX. https://groups.google.com/g/ansible-project is the best place to visit for user questions about Ansible. Thanks!
|
- Hello, your question seems to be about using Ansible, not about AWX. https://groups.google.com/g/ansible-project is the best place to visit for user questions about Ansible. Thanks!
|
||||||
|
|
||||||
### Ansible Galaxy not AWX
|
### Ansible Galaxy not AWX
|
||||||
- Hey there. That sounds like an FAQ question. Did this: https://www.ansible.com/products/awx-project/faq cover your question?
|
- Hey there. That sounds like an FAQ question. Did this: https://www.ansible.com/products/awx-project/faq cover your question?
|
||||||
|
|
||||||
### Contributing Guidelines
|
### Contributing Guidelines
|
||||||
- AWX: https://github.com/ansible/awx/blob/devel/CONTRIBUTING.md
|
- AWX: https://github.com/ansible/awx/blob/devel/CONTRIBUTING.md
|
||||||
- AWX-Operator: https://github.com/ansible/awx-operator/blob/devel/CONTRIBUTING.md
|
- AWX-Operator: https://github.com/ansible/awx-operator/blob/devel/CONTRIBUTING.md
|
||||||
|
|
||||||
|
### Oracle AWX
|
||||||
|
We'd be happy to help if you can reproduce this with AWX since we do not have Oracle's Linux Automation Manager. If you need help with this specific version of Oracles Linux Automation Manager you will need to contact your Oracle for support.
|
||||||
|
|
||||||
|
### Community Resolved
|
||||||
|
Hi,
|
||||||
|
|
||||||
|
We are happy to see that it appears a fix has been provided for your issue, so we will go ahead and close this ticket. Please feel free to reopen if any other problems arise.
|
||||||
|
|
||||||
|
<name of community member who helped> thanks so much for taking the time to write a thoughtful and helpful response to this issue!
|
||||||
|
|
||||||
### AWX Release
|
### AWX Release
|
||||||
Subject: Announcing AWX X.Y.z
|
Subject: Announcing AWX Xa.Ya.za and AWX-Operator Xb.Yb.zb
|
||||||
|
|
||||||
- Hi all, \
|
- Hi all, \
|
||||||
\
|
\
|
||||||
We're happy to announce that the next release of AWX, version <X> is now available! \
|
We're happy to announce that the next release of AWX, version <b>`Xa.Ya.za`</b> is now available! \
|
||||||
In addition AWX Operator version <Y> has also been release! \
|
In addition AWX Operator version <b>`Xb.Yb.zb`</b> has also been released! \
|
||||||
\
|
\
|
||||||
Please see the releases pages for more details: \
|
Please see the releases pages for more details: \
|
||||||
AWX: https://github.com/ansible/awx/releases/tag/<X> \
|
AWX: https://github.com/ansible/awx/releases/tag/Xa.Ya.za \
|
||||||
Operator: https://github.com/ansible/awx-operator/releases/tag/<Y> \
|
Operator: https://github.com/ansible/awx-operator/releases/tag/Xb.Yb.zb \
|
||||||
\
|
\
|
||||||
The AWX team.
|
The AWX team.
|
||||||
|
|
||||||
|
|||||||
93
.github/workflows/ci.yml
vendored
93
.github/workflows/ci.yml
vendored
@@ -1,7 +1,10 @@
|
|||||||
---
|
---
|
||||||
name: CI
|
name: CI
|
||||||
env:
|
env:
|
||||||
BRANCH: ${{ github.base_ref || 'devel' }}
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
DEV_DOCKER_TAG_BASE: ghcr.io/${{ github.repository_owner }}
|
||||||
|
COMPOSE_TAG: ${{ github.base_ref || 'devel' }}
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
jobs:
|
jobs:
|
||||||
@@ -17,85 +20,33 @@ jobs:
|
|||||||
tests:
|
tests:
|
||||||
- name: api-test
|
- name: api-test
|
||||||
command: /start_tests.sh
|
command: /start_tests.sh
|
||||||
label: Run API Tests
|
|
||||||
- name: api-lint
|
- name: api-lint
|
||||||
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
||||||
label: Run API Linters
|
|
||||||
- name: api-swagger
|
- name: api-swagger
|
||||||
command: /start_tests.sh swagger
|
command: /start_tests.sh swagger
|
||||||
label: Generate API Reference
|
|
||||||
- name: awx-collection
|
- name: awx-collection
|
||||||
command: /start_tests.sh test_collection_all
|
command: /start_tests.sh test_collection_all
|
||||||
label: Run Collection Tests
|
|
||||||
- name: api-schema
|
- name: api-schema
|
||||||
label: Check API Schema
|
|
||||||
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
|
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
|
||||||
- name: ui-lint
|
- name: ui-lint
|
||||||
label: Run UI Linters
|
|
||||||
command: make ui-lint
|
command: make ui-lint
|
||||||
- name: ui-test-screens
|
- name: ui-test-screens
|
||||||
label: Run UI Screens Tests
|
|
||||||
command: make ui-test-screens
|
command: make ui-test-screens
|
||||||
- name: ui-test-general
|
- name: ui-test-general
|
||||||
label: Run UI General Tests
|
|
||||||
command: make ui-test-general
|
command: make ui-test-general
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
- name: Run check ${{ matrix.tests.name }}
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
run: AWX_DOCKER_CMD='${{ matrix.tests.command }}' make github_ci_runner
|
||||||
|
|
||||||
- name: Install python ${{ env.py_version }}
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.py_version }}
|
|
||||||
|
|
||||||
- name: Log in to registry
|
|
||||||
run: |
|
|
||||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
|
||||||
|
|
||||||
- name: Pre-pull image to warm build cache
|
|
||||||
run: |
|
|
||||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} || :
|
|
||||||
|
|
||||||
- name: Build image
|
|
||||||
run: |
|
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
|
||||||
|
|
||||||
- name: ${{ matrix.texts.label }}
|
|
||||||
run: |
|
|
||||||
docker run -u $(id -u) --rm -v ${{ github.workspace}}:/awx_devel/:Z \
|
|
||||||
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} ${{ matrix.tests.command }}
|
|
||||||
dev-env:
|
dev-env:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Install python ${{ env.py_version }}
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.py_version }}
|
|
||||||
|
|
||||||
- name: Log in to registry
|
|
||||||
run: |
|
|
||||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
|
||||||
|
|
||||||
- name: Pre-pull image to warm build cache
|
|
||||||
run: |
|
|
||||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} || :
|
|
||||||
|
|
||||||
- name: Build image
|
|
||||||
run: |
|
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
|
||||||
|
|
||||||
- name: Run smoke test
|
- name: Run smoke test
|
||||||
run: |
|
run: make github_ci_setup && ansible-playbook tools/docker-compose/ansible/smoke-test.yml -v
|
||||||
export DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }}
|
|
||||||
export COMPOSE_TAG=${{ env.BRANCH }}
|
|
||||||
ansible-playbook tools/docker-compose/ansible/smoke-test.yml -e repo_dir=$(pwd) -v
|
|
||||||
|
|
||||||
awx-operator:
|
awx-operator:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -111,9 +62,18 @@ jobs:
|
|||||||
repository: ansible/awx-operator
|
repository: ansible/awx-operator
|
||||||
path: awx-operator
|
path: awx-operator
|
||||||
|
|
||||||
|
- name: Get python version from Makefile
|
||||||
|
working-directory: awx
|
||||||
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install python ${{ env.py_version }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.py_version }}
|
||||||
|
|
||||||
- name: Install playbook dependencies
|
- name: Install playbook dependencies
|
||||||
run: |
|
run: |
|
||||||
python3 -m pip install docker setuptools_scm
|
python3 -m pip install docker
|
||||||
|
|
||||||
- name: Build AWX image
|
- name: Build AWX image
|
||||||
working-directory: awx
|
working-directory: awx
|
||||||
@@ -135,3 +95,22 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
AWX_TEST_IMAGE: awx
|
AWX_TEST_IMAGE: awx
|
||||||
AWX_TEST_VERSION: ci
|
AWX_TEST_VERSION: ci
|
||||||
|
|
||||||
|
collection-sanity:
|
||||||
|
name: awx_collection sanity
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
|
||||||
|
- name: Upgrade ansible-core
|
||||||
|
run: python3 -m pip install --upgrade ansible-core
|
||||||
|
|
||||||
|
- name: Run sanity tests
|
||||||
|
run: make test_collection_sanity
|
||||||
|
env:
|
||||||
|
# needed due to cgroupsv2. This is fixed, but a stable release
|
||||||
|
# with the fix has not been made yet.
|
||||||
|
ANSIBLE_TEST_PREFER_PODMAN: 1
|
||||||
|
|||||||
2
.github/workflows/devel_images.yml
vendored
2
.github/workflows/devel_images.yml
vendored
@@ -1,5 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: Build/Push Development Images
|
name: Build/Push Development Images
|
||||||
|
env:
|
||||||
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
|
|||||||
7
.github/workflows/e2e_test.yml
vendored
7
.github/workflows/e2e_test.yml
vendored
@@ -1,9 +1,12 @@
|
|||||||
---
|
---
|
||||||
name: E2E Tests
|
name: E2E Tests
|
||||||
|
env:
|
||||||
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request_target:
|
pull_request_target:
|
||||||
types: [labeled]
|
types: [labeled]
|
||||||
jobs:
|
jobs:
|
||||||
e2e-test:
|
e2e-test:
|
||||||
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
|
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -104,5 +107,3 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: AWX-logs-${{ matrix.job }}
|
name: AWX-logs-${{ matrix.job }}
|
||||||
path: make-docker-compose-output.log
|
path: make-docker-compose-output.log
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
26
.github/workflows/feature_branch_deletion.yml
vendored
Normal file
26
.github/workflows/feature_branch_deletion.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
---
|
||||||
|
name: Feature branch deletion cleanup
|
||||||
|
env:
|
||||||
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
on:
|
||||||
|
delete:
|
||||||
|
branches:
|
||||||
|
- feature_**
|
||||||
|
jobs:
|
||||||
|
push:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Delete API Schema
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
|
||||||
|
AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
|
||||||
|
AWS_REGION: 'us-east-1'
|
||||||
|
run: |
|
||||||
|
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||||
|
ansible localhost -c local -m aws_s3 \
|
||||||
|
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delete permission=public-read"
|
||||||
|
|
||||||
|
|
||||||
31
.github/workflows/label_issue.yml
vendored
31
.github/workflows/label_issue.yml
vendored
@@ -19,3 +19,34 @@ jobs:
|
|||||||
not-before: 2021-12-07T07:00:00Z
|
not-before: 2021-12-07T07:00:00Z
|
||||||
configuration-path: .github/issue_labeler.yml
|
configuration-path: .github/issue_labeler.yml
|
||||||
enable-versioned-regex: 0
|
enable-versioned-regex: 0
|
||||||
|
|
||||||
|
community:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Label Issue - Community
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
- name: Install python requests
|
||||||
|
run: pip install requests
|
||||||
|
- name: Check if user is a member of Ansible org
|
||||||
|
uses: jannekem/run-python-script-action@v1
|
||||||
|
id: check_user
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
import requests
|
||||||
|
headers = {'Accept': 'application/vnd.github+json', 'Authorization': 'token ${{ secrets.GITHUB_TOKEN }}'}
|
||||||
|
response = requests.get('${{ fromJson(toJson(github.event.issue.user.url)) }}/orgs?per_page=100', headers=headers)
|
||||||
|
is_member = False
|
||||||
|
for org in response.json():
|
||||||
|
if org['login'] == 'ansible':
|
||||||
|
is_member = True
|
||||||
|
if is_member:
|
||||||
|
print("User is member")
|
||||||
|
else:
|
||||||
|
print("User is community")
|
||||||
|
- name: Add community label if not a member
|
||||||
|
if: contains(steps.check_user.outputs.stdout, 'community')
|
||||||
|
uses: andymckay/labeler@e6c4322d0397f3240f0e7e30a33b5c5df2d39e90
|
||||||
|
with:
|
||||||
|
add-labels: "community"
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
31
.github/workflows/label_pr.yml
vendored
31
.github/workflows/label_pr.yml
vendored
@@ -18,3 +18,34 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
configuration-path: .github/pr_labeler.yml
|
configuration-path: .github/pr_labeler.yml
|
||||||
|
|
||||||
|
community:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Label PR - Community
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
- name: Install python requests
|
||||||
|
run: pip install requests
|
||||||
|
- name: Check if user is a member of Ansible org
|
||||||
|
uses: jannekem/run-python-script-action@v1
|
||||||
|
id: check_user
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
import requests
|
||||||
|
headers = {'Accept': 'application/vnd.github+json', 'Authorization': 'token ${{ secrets.GITHUB_TOKEN }}'}
|
||||||
|
response = requests.get('${{ fromJson(toJson(github.event.pull_request.user.url)) }}/orgs?per_page=100', headers=headers)
|
||||||
|
is_member = False
|
||||||
|
for org in response.json():
|
||||||
|
if org['login'] == 'ansible':
|
||||||
|
is_member = True
|
||||||
|
if is_member:
|
||||||
|
print("User is member")
|
||||||
|
else:
|
||||||
|
print("User is community")
|
||||||
|
- name: Add community label if not a member
|
||||||
|
if: contains(steps.check_user.outputs.stdout, 'community')
|
||||||
|
uses: andymckay/labeler@e6c4322d0397f3240f0e7e30a33b5c5df2d39e90
|
||||||
|
with:
|
||||||
|
add-labels: "community"
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
37
.github/workflows/pr_body_check.yml
vendored
Normal file
37
.github/workflows/pr_body_check.yml
vendored
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
---
|
||||||
|
name: PR Check
|
||||||
|
env:
|
||||||
|
BRANCH: ${{ github.base_ref || 'devel' }}
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, edited, reopened, synchronize]
|
||||||
|
jobs:
|
||||||
|
pr-check:
|
||||||
|
name: Scan PR description for semantic versioning keywords
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Check for each of the lines
|
||||||
|
env:
|
||||||
|
PR_BODY: ${{ github.event.pull_request.body }}
|
||||||
|
run: |
|
||||||
|
echo "$PR_BODY" | grep "Bug, Docs Fix or other nominal change" > Z
|
||||||
|
echo "$PR_BODY" | grep "New or Enhanced Feature" > Y
|
||||||
|
echo "$PR_BODY" | grep "Breaking Change" > X
|
||||||
|
exit 0
|
||||||
|
# We exit 0 and set the shell to prevent the returns from the greps from failing this step
|
||||||
|
# See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#exit-codes-and-error-action-preference
|
||||||
|
shell: bash {0}
|
||||||
|
|
||||||
|
- name: Check for exactly one item
|
||||||
|
run: |
|
||||||
|
if [ $(cat X Y Z | wc -l) != 1 ] ; then
|
||||||
|
echo "The PR body must contain exactly one of [ 'Bug, Docs Fix or other nominal change', 'New or Enhanced Feature', 'Breaking Change' ]"
|
||||||
|
echo "We counted $(cat X Y Z | wc -l)"
|
||||||
|
echo "See the default PR body for examples"
|
||||||
|
exit 255;
|
||||||
|
else
|
||||||
|
exit 0;
|
||||||
|
fi
|
||||||
19
.github/workflows/promote.yml
vendored
19
.github/workflows/promote.yml
vendored
@@ -1,5 +1,9 @@
|
|||||||
---
|
---
|
||||||
name: Promote Release
|
name: Promote Release
|
||||||
|
|
||||||
|
env:
|
||||||
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
|
||||||
on:
|
on:
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
@@ -34,9 +38,13 @@ jobs:
|
|||||||
- name: Build collection and publish to galaxy
|
- name: Build collection and publish to galaxy
|
||||||
run: |
|
run: |
|
||||||
COLLECTION_TEMPLATE_VERSION=true COLLECTION_NAMESPACE=${{ env.collection_namespace }} make build_collection
|
COLLECTION_TEMPLATE_VERSION=true COLLECTION_NAMESPACE=${{ env.collection_namespace }} make build_collection
|
||||||
ansible-galaxy collection publish \
|
if [ "$(curl --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz | tail -1)" == "302" ] ; then \
|
||||||
--token=${{ secrets.GALAXY_TOKEN }} \
|
echo "Galaxy release already done"; \
|
||||||
awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz
|
else \
|
||||||
|
ansible-galaxy collection publish \
|
||||||
|
--token=${{ secrets.GALAXY_TOKEN }} \
|
||||||
|
awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz; \
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Set official pypi info
|
- name: Set official pypi info
|
||||||
run: echo pypi_repo=pypi >> $GITHUB_ENV
|
run: echo pypi_repo=pypi >> $GITHUB_ENV
|
||||||
@@ -48,6 +56,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Build awxkit and upload to pypi
|
- name: Build awxkit and upload to pypi
|
||||||
run: |
|
run: |
|
||||||
|
git reset --hard
|
||||||
cd awxkit && python3 setup.py bdist_wheel
|
cd awxkit && python3 setup.py bdist_wheel
|
||||||
twine upload \
|
twine upload \
|
||||||
-r ${{ env.pypi_repo }} \
|
-r ${{ env.pypi_repo }} \
|
||||||
@@ -70,4 +79,6 @@ jobs:
|
|||||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
|
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
|
||||||
docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||||
docker push quay.io/${{ github.repository }}:latest
|
docker push quay.io/${{ github.repository }}:latest
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||||
|
docker tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||||
|
docker push quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||||
|
|||||||
21
.github/workflows/stage.yml
vendored
21
.github/workflows/stage.yml
vendored
@@ -1,5 +1,9 @@
|
|||||||
---
|
---
|
||||||
name: Stage Release
|
name: Stage Release
|
||||||
|
|
||||||
|
env:
|
||||||
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
@@ -65,7 +69,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Install playbook dependencies
|
- name: Install playbook dependencies
|
||||||
run: |
|
run: |
|
||||||
python3 -m pip install docker setuptools_scm
|
python3 -m pip install docker
|
||||||
|
|
||||||
- name: Build and stage AWX
|
- name: Build and stage AWX
|
||||||
working-directory: awx
|
working-directory: awx
|
||||||
@@ -80,6 +84,20 @@ jobs:
|
|||||||
-e push=yes \
|
-e push=yes \
|
||||||
-e awx_official=yes
|
-e awx_official=yes
|
||||||
|
|
||||||
|
- name: Log in to GHCR
|
||||||
|
run: |
|
||||||
|
echo ${{ secrets.GITHUB_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Log in to Quay
|
||||||
|
run: |
|
||||||
|
echo ${{ secrets.QUAY_TOKEN }} | docker login quay.io -u ${{ secrets.QUAY_USER }} --password-stdin
|
||||||
|
|
||||||
|
- name: tag awx-ee:latest with version input
|
||||||
|
run: |
|
||||||
|
docker pull quay.io/ansible/awx-ee:latest
|
||||||
|
docker tag quay.io/ansible/awx-ee:latest ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||||
|
docker push ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||||
|
|
||||||
- name: Build and stage awx-operator
|
- name: Build and stage awx-operator
|
||||||
working-directory: awx-operator
|
working-directory: awx-operator
|
||||||
run: |
|
run: |
|
||||||
@@ -99,6 +117,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
AWX_TEST_IMAGE: ${{ github.repository }}
|
AWX_TEST_IMAGE: ${{ github.repository }}
|
||||||
AWX_TEST_VERSION: ${{ github.event.inputs.version }}
|
AWX_TEST_VERSION: ${{ github.event.inputs.version }}
|
||||||
|
AWX_EE_TEST_IMAGE: ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||||
|
|
||||||
- name: Create draft release for AWX
|
- name: Create draft release for AWX
|
||||||
working-directory: awx
|
working-directory: awx
|
||||||
|
|||||||
29
.github/workflows/update_dependabot_prs.yml
vendored
Normal file
29
.github/workflows/update_dependabot_prs.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
---
|
||||||
|
name: Dependency Pr Update
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [labeled, opened, reopened]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
pr-check:
|
||||||
|
name: Update Dependabot Prs
|
||||||
|
if: contains(github.event.pull_request.labels.*.name, 'dependencies') && contains(github.event.pull_request.labels.*.name, 'component:ui')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout branch
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Update PR Body
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||||
|
OWNER: ${{ github.repository_owner }}
|
||||||
|
REPO: ${{ github.event.repository.name }}
|
||||||
|
PR: ${{github.event.pull_request.number}}
|
||||||
|
PR_BODY: ${{github.event.pull_request.body}}
|
||||||
|
run: |
|
||||||
|
gh pr checkout ${{ env.PR }}
|
||||||
|
echo "${{ env.PR_BODY }}" > my_pr_body.txt
|
||||||
|
echo "" >> my_pr_body.txt
|
||||||
|
echo "Bug, Docs Fix or other nominal change" >> my_pr_body.txt
|
||||||
|
gh pr edit ${{env.PR}} --body-file my_pr_body.txt
|
||||||
5
.github/workflows/upload_schema.yml
vendored
5
.github/workflows/upload_schema.yml
vendored
@@ -1,10 +1,15 @@
|
|||||||
---
|
---
|
||||||
name: Upload API Schema
|
name: Upload API Schema
|
||||||
|
|
||||||
|
env:
|
||||||
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- devel
|
- devel
|
||||||
- release_**
|
- release_**
|
||||||
|
- feature_**
|
||||||
jobs:
|
jobs:
|
||||||
push:
|
push:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -153,9 +153,6 @@ use_dev_supervisor.txt
|
|||||||
/sanity/
|
/sanity/
|
||||||
/awx_collection_build/
|
/awx_collection_build/
|
||||||
|
|
||||||
# Setup for metrics gathering
|
|
||||||
tools/prometheus/prometheus.yml
|
|
||||||
|
|
||||||
.idea/*
|
.idea/*
|
||||||
*.unison.tmp
|
*.unison.tmp
|
||||||
*.#
|
*.#
|
||||||
|
|||||||
@@ -8,6 +8,8 @@ ignore: |
|
|||||||
awx/ui/test/e2e/tests/smoke-vars.yml
|
awx/ui/test/e2e/tests/smoke-vars.yml
|
||||||
awx/ui/node_modules
|
awx/ui/node_modules
|
||||||
tools/docker-compose/_sources
|
tools/docker-compose/_sources
|
||||||
|
# django template files
|
||||||
|
awx/api/templates/instance_install_bundle/**
|
||||||
|
|
||||||
extends: default
|
extends: default
|
||||||
|
|
||||||
|
|||||||
@@ -19,16 +19,17 @@ Have questions about this document or anything not covered here? Come chat with
|
|||||||
- [Purging containers and images](#purging-containers-and-images)
|
- [Purging containers and images](#purging-containers-and-images)
|
||||||
- [Pre commit hooks](#pre-commit-hooks)
|
- [Pre commit hooks](#pre-commit-hooks)
|
||||||
- [What should I work on?](#what-should-i-work-on)
|
- [What should I work on?](#what-should-i-work-on)
|
||||||
|
- [Translations](#translations)
|
||||||
- [Submitting Pull Requests](#submitting-pull-requests)
|
- [Submitting Pull Requests](#submitting-pull-requests)
|
||||||
- [PR Checks run by Zuul](#pr-checks-run-by-zuul)
|
|
||||||
- [Reporting Issues](#reporting-issues)
|
- [Reporting Issues](#reporting-issues)
|
||||||
|
- [Getting Help](#getting-help)
|
||||||
|
|
||||||
## Things to know prior to submitting code
|
## Things to know prior to submitting code
|
||||||
|
|
||||||
- All code submissions are done through pull requests against the `devel` branch.
|
- All code submissions are done through pull requests against the `devel` branch.
|
||||||
- You must use `git commit --signoff` for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md).
|
- You must use `git commit --signoff` for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md).
|
||||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
|
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
|
||||||
- If collaborating with someone else on the same branch, consider using `--force-with-lease` instead of `--force`. This will prevent you from accidentally overwriting commits pushed by someone else. For more information, see https://git-scm.com/docs/git-push#git-push---force-with-leaseltrefnamegt
|
- If collaborating with someone else on the same branch, consider using `--force-with-lease` instead of `--force`. This will prevent you from accidentally overwriting commits pushed by someone else. For more information, see [git push docs](https://git-scm.com/docs/git-push#git-push---force-with-leaseltrefnamegt).
|
||||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.libera.chat, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.libera.chat, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
||||||
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||||
|
|
||||||
@@ -42,8 +43,7 @@ The AWX development environment workflow and toolchain uses Docker and the docke
|
|||||||
|
|
||||||
Prior to starting the development services, you'll need `docker` and `docker-compose`. On Linux, you can generally find these in your distro's packaging, but you may find that Docker themselves maintain a separate repo that tracks more closely to the latest releases.
|
Prior to starting the development services, you'll need `docker` and `docker-compose`. On Linux, you can generally find these in your distro's packaging, but you may find that Docker themselves maintain a separate repo that tracks more closely to the latest releases.
|
||||||
|
|
||||||
For macOS and Windows, we recommend [Docker for Mac](https://www.docker.com/docker-mac) and [Docker for Windows](https://www.docker.com/docker-windows)
|
For macOS and Windows, we recommend [Docker for Mac](https://www.docker.com/docker-mac) and [Docker for Windows](https://www.docker.com/docker-windows) respectively.
|
||||||
respectively.
|
|
||||||
|
|
||||||
For Linux platforms, refer to the following from Docker:
|
For Linux platforms, refer to the following from Docker:
|
||||||
|
|
||||||
@@ -79,17 +79,13 @@ See the [README.md](./tools/docker-compose/README.md) for docs on how to build t
|
|||||||
|
|
||||||
### Building API Documentation
|
### Building API Documentation
|
||||||
|
|
||||||
AWX includes support for building [Swagger/OpenAPI
|
AWX includes support for building [Swagger/OpenAPI documentation](https://swagger.io). To build the documentation locally, run:
|
||||||
documentation](https://swagger.io). To build the documentation locally, run:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
(container)/awx_devel$ make swagger
|
(container)/awx_devel$ make swagger
|
||||||
```
|
```
|
||||||
|
|
||||||
This will write a file named `swagger.json` that contains the API specification
|
This will write a file named `swagger.json` that contains the API specification in OpenAPI format. A variety of online tools are available for translating this data into more consumable formats (such as HTML). http://editor.swagger.io is an example of one such service.
|
||||||
in OpenAPI format. A variety of online tools are available for translating
|
|
||||||
this data into more consumable formats (such as HTML). http://editor.swagger.io
|
|
||||||
is an example of one such service.
|
|
||||||
|
|
||||||
### Accessing the AWX web interface
|
### Accessing the AWX web interface
|
||||||
|
|
||||||
@@ -115,20 +111,30 @@ While you can use environment variables to skip the pre-commit hooks GitHub will
|
|||||||
|
|
||||||
## What should I work on?
|
## What should I work on?
|
||||||
|
|
||||||
|
We have a ["good first issue" label](https://github.com/ansible/awx/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) we put on some issues that might be a good starting point for new contributors.
|
||||||
|
|
||||||
|
Fixing bugs and updating the documentation are always appreciated, so reviewing the backlog of issues is always a good place to start.
|
||||||
|
|
||||||
For feature work, take a look at the current [Enhancements](https://github.com/ansible/awx/issues?q=is%3Aissue+is%3Aopen+label%3Atype%3Aenhancement).
|
For feature work, take a look at the current [Enhancements](https://github.com/ansible/awx/issues?q=is%3Aissue+is%3Aopen+label%3Atype%3Aenhancement).
|
||||||
|
|
||||||
If it has someone assigned to it then that person is the person responsible for working the enhancement. If you feel like you could contribute then reach out to that person.
|
If it has someone assigned to it then that person is the person responsible for working the enhancement. If you feel like you could contribute then reach out to that person.
|
||||||
|
|
||||||
Fixing bugs, adding translations, and updating the documentation are always appreciated, so reviewing the backlog of issues is always a good place to start. For extra information on debugging tools, see [Debugging](./docs/debugging/).
|
**NOTES**
|
||||||
|
|
||||||
|
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
|
||||||
|
|
||||||
**NOTE**
|
|
||||||
|
|
||||||
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||||
|
|
||||||
**NOTE**
|
|
||||||
|
|
||||||
> If you're planning to develop features or fixes for the UI, please review the [UI Developer doc](./awx/ui/README.md).
|
> If you're planning to develop features or fixes for the UI, please review the [UI Developer doc](./awx/ui/README.md).
|
||||||
|
|
||||||
|
### Translations
|
||||||
|
|
||||||
|
At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language.
|
||||||
|
|
||||||
|
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
|
||||||
|
|
||||||
|
|
||||||
## Submitting Pull Requests
|
## Submitting Pull Requests
|
||||||
|
|
||||||
Fixes and Features for AWX will go through the Github pull request process. Submit your pull request (PR) against the `devel` branch.
|
Fixes and Features for AWX will go through the Github pull request process. Submit your pull request (PR) against the `devel` branch.
|
||||||
@@ -152,28 +158,14 @@ We like to keep our commit history clean, and will require resubmission of pull
|
|||||||
|
|
||||||
Sometimes it might take us a while to fully review your PR. We try to keep the `devel` branch in good working order, and so we review requests carefully. Please be patient.
|
Sometimes it might take us a while to fully review your PR. We try to keep the `devel` branch in good working order, and so we review requests carefully. Please be patient.
|
||||||
|
|
||||||
All submitted PRs will have the linter and unit tests run against them via Zuul, and the status reported in the PR.
|
When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR.
|
||||||
|
|
||||||
## PR Checks run by Zuul
|
|
||||||
|
|
||||||
Zuul jobs for awx are defined in the [zuul-jobs](https://github.com/ansible/zuul-jobs) repo.
|
|
||||||
|
|
||||||
Zuul runs the following checks that must pass:
|
|
||||||
|
|
||||||
1. `tox-awx-api-lint`
|
|
||||||
2. `tox-awx-ui-lint`
|
|
||||||
3. `tox-awx-api`
|
|
||||||
4. `tox-awx-ui`
|
|
||||||
5. `tox-awx-swagger`
|
|
||||||
|
|
||||||
Zuul runs the following checks that are non-voting (can not pass but serve to inform PR reviewers):
|
|
||||||
|
|
||||||
1. `tox-awx-detect-schema-change`
|
|
||||||
This check generates the schema and diffs it against a reference copy of the `devel` version of the schema.
|
|
||||||
Reviewers should inspect the `job-output.txt.gz` related to the check if their is a failure (grep for `diff -u -b` to find beginning of diff).
|
|
||||||
If the schema change is expected and makes sense in relation to the changes made by the PR, then you are good to go!
|
|
||||||
If not, the schema changes should be fixed, but this decision must be enforced by reviewers.
|
|
||||||
|
|
||||||
## Reporting Issues
|
## Reporting Issues
|
||||||
|
|
||||||
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
|
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
|
||||||
|
|
||||||
|
## Getting Help
|
||||||
|
|
||||||
|
If you require additional assistance, please reach out to us at `#ansible-awx` on irc.libera.chat, or submit your question to the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||||
|
|
||||||
|
For extra information on debugging tools, see [Debugging](./docs/debugging/).
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ recursive-include awx *.po
|
|||||||
recursive-include awx *.mo
|
recursive-include awx *.mo
|
||||||
recursive-include awx/static *
|
recursive-include awx/static *
|
||||||
recursive-include awx/templates *.html
|
recursive-include awx/templates *.html
|
||||||
recursive-include awx/api/templates *.md *.html
|
recursive-include awx/api/templates *.md *.html *.yml
|
||||||
recursive-include awx/ui/build *.html
|
recursive-include awx/ui/build *.html
|
||||||
recursive-include awx/ui/build *
|
recursive-include awx/ui/build *
|
||||||
recursive-include awx/playbooks *.yml
|
recursive-include awx/playbooks *.yml
|
||||||
@@ -12,7 +12,7 @@ recursive-include awx/plugins *.ps1
|
|||||||
recursive-include requirements *.txt
|
recursive-include requirements *.txt
|
||||||
recursive-include requirements *.yml
|
recursive-include requirements *.yml
|
||||||
recursive-include config *
|
recursive-include config *
|
||||||
recursive-include docs/licenses *
|
recursive-include licenses *
|
||||||
recursive-exclude awx devonly.py*
|
recursive-exclude awx devonly.py*
|
||||||
recursive-exclude awx/api/tests *
|
recursive-exclude awx/api/tests *
|
||||||
recursive-exclude awx/main/tests *
|
recursive-exclude awx/main/tests *
|
||||||
|
|||||||
175
Makefile
175
Makefile
@@ -6,7 +6,20 @@ CHROMIUM_BIN=/tmp/chrome-linux/chrome
|
|||||||
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||||
MANAGEMENT_COMMAND ?= awx-manage
|
MANAGEMENT_COMMAND ?= awx-manage
|
||||||
VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py)
|
VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py)
|
||||||
COLLECTION_VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
|
|
||||||
|
# ansible-test requires semver compatable version, so we allow overrides to hack it
|
||||||
|
COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
|
||||||
|
# args for the ansible-test sanity command
|
||||||
|
COLLECTION_SANITY_ARGS ?= --docker
|
||||||
|
# collection unit testing directories
|
||||||
|
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
||||||
|
# collection integration test directories (defaults to all)
|
||||||
|
COLLECTION_TEST_TARGET ?=
|
||||||
|
# args for collection install
|
||||||
|
COLLECTION_PACKAGE ?= awx
|
||||||
|
COLLECTION_NAMESPACE ?= awx
|
||||||
|
COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
|
||||||
|
COLLECTION_TEMPLATE_VERSION ?= false
|
||||||
|
|
||||||
# NOTE: This defaults the container image version to the branch that's active
|
# NOTE: This defaults the container image version to the branch that's active
|
||||||
COMPOSE_TAG ?= $(GIT_BRANCH)
|
COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||||
@@ -34,7 +47,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
|||||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio
|
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio
|
||||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||||
# to install the actual requirements
|
# to install the actual requirements
|
||||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==58.2.0 setuptools_scm[toml]==6.4.2 wheel==0.36.2
|
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==7.0.5 wheel==0.38.4
|
||||||
|
|
||||||
NAME ?= awx
|
NAME ?= awx
|
||||||
|
|
||||||
@@ -52,7 +65,7 @@ I18N_FLAG_FILE = .i18n_built
|
|||||||
sdist \
|
sdist \
|
||||||
ui-release ui-devel \
|
ui-release ui-devel \
|
||||||
VERSION PYTHON_VERSION docker-compose-sources \
|
VERSION PYTHON_VERSION docker-compose-sources \
|
||||||
.git/hooks/pre-commit
|
.git/hooks/pre-commit github_ci_setup github_ci_runner
|
||||||
|
|
||||||
clean-tmp:
|
clean-tmp:
|
||||||
rm -rf tmp/
|
rm -rf tmp/
|
||||||
@@ -72,7 +85,7 @@ clean-languages:
|
|||||||
rm -f $(I18N_FLAG_FILE)
|
rm -f $(I18N_FLAG_FILE)
|
||||||
find ./awx/locale/ -type f -regex ".*\.mo$" -delete
|
find ./awx/locale/ -type f -regex ".*\.mo$" -delete
|
||||||
|
|
||||||
# Remove temporary build files, compiled Python files.
|
## Remove temporary build files, compiled Python files.
|
||||||
clean: clean-ui clean-api clean-awxkit clean-dist
|
clean: clean-ui clean-api clean-awxkit clean-dist
|
||||||
rm -rf awx/public
|
rm -rf awx/public
|
||||||
rm -rf awx/lib/site-packages
|
rm -rf awx/lib/site-packages
|
||||||
@@ -85,6 +98,7 @@ clean: clean-ui clean-api clean-awxkit clean-dist
|
|||||||
|
|
||||||
clean-api:
|
clean-api:
|
||||||
rm -rf build $(NAME)-$(VERSION) *.egg-info
|
rm -rf build $(NAME)-$(VERSION) *.egg-info
|
||||||
|
rm -rf .tox
|
||||||
find . -type f -regex ".*\.py[co]$$" -delete
|
find . -type f -regex ".*\.py[co]$$" -delete
|
||||||
find . -type d -name "__pycache__" -delete
|
find . -type d -name "__pycache__" -delete
|
||||||
rm -f awx/awx_test.sqlite3*
|
rm -f awx/awx_test.sqlite3*
|
||||||
@@ -94,7 +108,7 @@ clean-api:
|
|||||||
clean-awxkit:
|
clean-awxkit:
|
||||||
rm -rf awxkit/*.egg-info awxkit/.tox awxkit/build/*
|
rm -rf awxkit/*.egg-info awxkit/.tox awxkit/build/*
|
||||||
|
|
||||||
# convenience target to assert environment variables are defined
|
## convenience target to assert environment variables are defined
|
||||||
guard-%:
|
guard-%:
|
||||||
@if [ "$${$*}" = "" ]; then \
|
@if [ "$${$*}" = "" ]; then \
|
||||||
echo "The required environment variable '$*' is not set"; \
|
echo "The required environment variable '$*' is not set"; \
|
||||||
@@ -117,7 +131,7 @@ virtualenv_awx:
|
|||||||
fi; \
|
fi; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Install third-party requirements needed for AWX's environment.
|
## Install third-party requirements needed for AWX's environment.
|
||||||
# this does not use system site packages intentionally
|
# this does not use system site packages intentionally
|
||||||
requirements_awx: virtualenv_awx
|
requirements_awx: virtualenv_awx
|
||||||
if [[ "$(PIP_OPTIONS)" == *"--no-index"* ]]; then \
|
if [[ "$(PIP_OPTIONS)" == *"--no-index"* ]]; then \
|
||||||
@@ -136,7 +150,7 @@ requirements_dev: requirements_awx requirements_awx_dev
|
|||||||
|
|
||||||
requirements_test: requirements
|
requirements_test: requirements
|
||||||
|
|
||||||
# "Install" awx package in development mode.
|
## "Install" awx package in development mode.
|
||||||
develop:
|
develop:
|
||||||
@if [ "$(VIRTUAL_ENV)" ]; then \
|
@if [ "$(VIRTUAL_ENV)" ]; then \
|
||||||
pip uninstall -y awx; \
|
pip uninstall -y awx; \
|
||||||
@@ -153,21 +167,21 @@ version_file:
|
|||||||
fi; \
|
fi; \
|
||||||
$(PYTHON) -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \
|
$(PYTHON) -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \
|
||||||
|
|
||||||
# Refresh development environment after pulling new code.
|
## Refresh development environment after pulling new code.
|
||||||
refresh: clean requirements_dev version_file develop migrate
|
refresh: clean requirements_dev version_file develop migrate
|
||||||
|
|
||||||
# Create Django superuser.
|
## Create Django superuser.
|
||||||
adduser:
|
adduser:
|
||||||
$(MANAGEMENT_COMMAND) createsuperuser
|
$(MANAGEMENT_COMMAND) createsuperuser
|
||||||
|
|
||||||
# Create database tables and apply any new migrations.
|
## Create database tables and apply any new migrations.
|
||||||
migrate:
|
migrate:
|
||||||
if [ "$(VENV_BASE)" ]; then \
|
if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
$(MANAGEMENT_COMMAND) migrate --noinput
|
$(MANAGEMENT_COMMAND) migrate --noinput
|
||||||
|
|
||||||
# Run after making changes to the models to create a new migration.
|
## Run after making changes to the models to create a new migration.
|
||||||
dbchange:
|
dbchange:
|
||||||
$(MANAGEMENT_COMMAND) makemigrations
|
$(MANAGEMENT_COMMAND) makemigrations
|
||||||
|
|
||||||
@@ -181,7 +195,7 @@ collectstatic:
|
|||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
mkdir -p awx/public/static && $(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||||
|
|
||||||
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
||||||
|
|
||||||
@@ -218,7 +232,7 @@ wsbroadcast:
|
|||||||
fi; \
|
fi; \
|
||||||
$(PYTHON) manage.py run_wsbroadcast
|
$(PYTHON) manage.py run_wsbroadcast
|
||||||
|
|
||||||
# Run to start the background task dispatcher for development.
|
## Run to start the background task dispatcher for development.
|
||||||
dispatcher:
|
dispatcher:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
@@ -226,7 +240,7 @@ dispatcher:
|
|||||||
$(PYTHON) manage.py run_dispatcher
|
$(PYTHON) manage.py run_dispatcher
|
||||||
|
|
||||||
|
|
||||||
# Run to start the zeromq callback receiver
|
## Run to start the zeromq callback receiver
|
||||||
receiver:
|
receiver:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
@@ -278,7 +292,7 @@ awx-link:
|
|||||||
|
|
||||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||||
PYTEST_ARGS ?= -n auto
|
PYTEST_ARGS ?= -n auto
|
||||||
# Run all API unit tests.
|
## Run all API unit tests.
|
||||||
test:
|
test:
|
||||||
if [ "$(VENV_BASE)" ]; then \
|
if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
@@ -287,19 +301,28 @@ test:
|
|||||||
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
|
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
|
||||||
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
|
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
|
||||||
|
|
||||||
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
## Login to Github container image registry, pull image, then build image.
|
||||||
COLLECTION_TEST_TARGET ?=
|
github_ci_setup:
|
||||||
COLLECTION_PACKAGE ?= awx
|
# GITHUB_ACTOR is automatic github actions env var
|
||||||
COLLECTION_NAMESPACE ?= awx
|
# CI_GITHUB_TOKEN is defined in .github files
|
||||||
COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
|
echo $(CI_GITHUB_TOKEN) | docker login ghcr.io -u $(GITHUB_ACTOR) --password-stdin
|
||||||
COLLECTION_TEMPLATE_VERSION ?= false
|
docker pull $(DEVEL_IMAGE_NAME) || : # Pre-pull image to warm build cache
|
||||||
|
make docker-compose-build
|
||||||
|
|
||||||
|
## Runs AWX_DOCKER_CMD inside a new docker container.
|
||||||
|
docker-runner:
|
||||||
|
docker run -u $(shell id -u) --rm -v $(shell pwd):/awx_devel/:Z --workdir=/awx_devel $(DEVEL_IMAGE_NAME) $(AWX_DOCKER_CMD)
|
||||||
|
|
||||||
|
## Builds image and runs AWX_DOCKER_CMD in it, mainly for .github checks.
|
||||||
|
github_ci_runner: github_ci_setup docker-runner
|
||||||
|
|
||||||
test_collection:
|
test_collection:
|
||||||
rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt
|
rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt
|
||||||
if [ "$(VENV_BASE)" ]; then \
|
if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi && \
|
fi && \
|
||||||
pip install ansible-core && \
|
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi
|
||||||
|
ansible --version
|
||||||
py.test $(COLLECTION_TEST_DIRS) -v
|
py.test $(COLLECTION_TEST_DIRS) -v
|
||||||
# The python path needs to be modified so that the tests can find Ansible within the container
|
# The python path needs to be modified so that the tests can find Ansible within the container
|
||||||
# First we will use anything expility set as PYTHONPATH
|
# First we will use anything expility set as PYTHONPATH
|
||||||
@@ -329,8 +352,13 @@ install_collection: build_collection
|
|||||||
rm -rf $(COLLECTION_INSTALL)
|
rm -rf $(COLLECTION_INSTALL)
|
||||||
ansible-galaxy collection install awx_collection_build/$(COLLECTION_NAMESPACE)-$(COLLECTION_PACKAGE)-$(COLLECTION_VERSION).tar.gz
|
ansible-galaxy collection install awx_collection_build/$(COLLECTION_NAMESPACE)-$(COLLECTION_PACKAGE)-$(COLLECTION_VERSION).tar.gz
|
||||||
|
|
||||||
test_collection_sanity: install_collection
|
test_collection_sanity:
|
||||||
cd $(COLLECTION_INSTALL) && ansible-test sanity
|
rm -rf awx_collection_build/
|
||||||
|
rm -rf $(COLLECTION_INSTALL)
|
||||||
|
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi
|
||||||
|
ansible --version
|
||||||
|
COLLECTION_VERSION=1.0.0 make install_collection
|
||||||
|
cd $(COLLECTION_INSTALL) && ansible-test sanity $(COLLECTION_SANITY_ARGS)
|
||||||
|
|
||||||
test_collection_integration: install_collection
|
test_collection_integration: install_collection
|
||||||
cd $(COLLECTION_INSTALL) && ansible-test integration $(COLLECTION_TEST_TARGET)
|
cd $(COLLECTION_INSTALL) && ansible-test integration $(COLLECTION_TEST_TARGET)
|
||||||
@@ -341,23 +369,24 @@ test_unit:
|
|||||||
fi; \
|
fi; \
|
||||||
py.test awx/main/tests/unit awx/conf/tests/unit awx/sso/tests/unit
|
py.test awx/main/tests/unit awx/conf/tests/unit awx/sso/tests/unit
|
||||||
|
|
||||||
# Run all API unit tests with coverage enabled.
|
## Run all API unit tests with coverage enabled.
|
||||||
test_coverage:
|
test_coverage:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
py.test --create-db --cov=awx --cov-report=xml --junitxml=./reports/junit.xml $(TEST_DIRS)
|
py.test --create-db --cov=awx --cov-report=xml --junitxml=./reports/junit.xml $(TEST_DIRS)
|
||||||
|
|
||||||
# Output test coverage as HTML (into htmlcov directory).
|
## Output test coverage as HTML (into htmlcov directory).
|
||||||
coverage_html:
|
coverage_html:
|
||||||
coverage html
|
coverage html
|
||||||
|
|
||||||
# Run API unit tests across multiple Python/Django versions with Tox.
|
## Run API unit tests across multiple Python/Django versions with Tox.
|
||||||
test_tox:
|
test_tox:
|
||||||
tox -v
|
tox -v
|
||||||
|
|
||||||
# Make fake data
|
|
||||||
DATA_GEN_PRESET = ""
|
DATA_GEN_PRESET = ""
|
||||||
|
## Make fake data
|
||||||
bulk_data:
|
bulk_data:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
@@ -376,28 +405,29 @@ clean-ui:
|
|||||||
rm -rf awx/ui/build
|
rm -rf awx/ui/build
|
||||||
rm -rf awx/ui/src/locales/_build
|
rm -rf awx/ui/src/locales/_build
|
||||||
rm -rf $(UI_BUILD_FLAG_FILE)
|
rm -rf $(UI_BUILD_FLAG_FILE)
|
||||||
|
# the collectstatic command doesn't like it if this dir doesn't exist.
|
||||||
|
mkdir -p awx/ui/build/static
|
||||||
|
|
||||||
awx/ui/node_modules:
|
awx/ui/node_modules:
|
||||||
NODE_OPTIONS=--max-old-space-size=6144 $(NPM_BIN) --prefix awx/ui --loglevel warn ci
|
NODE_OPTIONS=--max-old-space-size=6144 $(NPM_BIN) --prefix awx/ui --loglevel warn --force ci
|
||||||
|
|
||||||
$(UI_BUILD_FLAG_FILE): awx/ui/node_modules
|
$(UI_BUILD_FLAG_FILE):
|
||||||
|
$(MAKE) awx/ui/node_modules
|
||||||
$(PYTHON) tools/scripts/compilemessages.py
|
$(PYTHON) tools/scripts/compilemessages.py
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
|
||||||
mkdir -p awx/public/static/css
|
|
||||||
mkdir -p awx/public/static/js
|
|
||||||
mkdir -p awx/public/static/media
|
|
||||||
cp -r awx/ui/build/static/css/* awx/public/static/css
|
|
||||||
cp -r awx/ui/build/static/js/* awx/public/static/js
|
|
||||||
cp -r awx/ui/build/static/media/* awx/public/static/media
|
|
||||||
touch $@
|
touch $@
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
ui-release: $(UI_BUILD_FLAG_FILE)
|
ui-release: $(UI_BUILD_FLAG_FILE)
|
||||||
|
|
||||||
ui-devel: awx/ui/node_modules
|
ui-devel: awx/ui/node_modules
|
||||||
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
|
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
|
||||||
|
mkdir -p /var/lib/awx/public/static/css
|
||||||
|
mkdir -p /var/lib/awx/public/static/js
|
||||||
|
mkdir -p /var/lib/awx/public/static/media
|
||||||
|
cp -r awx/ui/build/static/css/* /var/lib/awx/public/static/css
|
||||||
|
cp -r awx/ui/build/static/js/* /var/lib/awx/public/static/js
|
||||||
|
cp -r awx/ui/build/static/media/* /var/lib/awx/public/static/media
|
||||||
|
|
||||||
ui-devel-instrumented: awx/ui/node_modules
|
ui-devel-instrumented: awx/ui/node_modules
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run start-instrumented
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run start-instrumented
|
||||||
@@ -449,12 +479,18 @@ awx/projects:
|
|||||||
COMPOSE_UP_OPTS ?=
|
COMPOSE_UP_OPTS ?=
|
||||||
COMPOSE_OPTS ?=
|
COMPOSE_OPTS ?=
|
||||||
CONTROL_PLANE_NODE_COUNT ?= 1
|
CONTROL_PLANE_NODE_COUNT ?= 1
|
||||||
EXECUTION_NODE_COUNT ?= 2
|
EXECUTION_NODE_COUNT ?= 0
|
||||||
MINIKUBE_CONTAINER_GROUP ?= false
|
MINIKUBE_CONTAINER_GROUP ?= false
|
||||||
|
MINIKUBE_SETUP ?= false # if false, run minikube separately
|
||||||
|
EXTRA_SOURCES_ANSIBLE_OPTS ?=
|
||||||
|
|
||||||
|
ifneq ($(ADMIN_PASSWORD),)
|
||||||
|
EXTRA_SOURCES_ANSIBLE_OPTS := -e admin_password=$(ADMIN_PASSWORD) $(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||||
|
endif
|
||||||
|
|
||||||
docker-compose-sources: .git/hooks/pre-commit
|
docker-compose-sources: .git/hooks/pre-commit
|
||||||
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose-minikube/deploy.yml; \
|
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||||
fi;
|
fi;
|
||||||
|
|
||||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||||
@@ -468,7 +504,8 @@ docker-compose-sources: .git/hooks/pre-commit
|
|||||||
-e enable_ldap=$(LDAP) \
|
-e enable_ldap=$(LDAP) \
|
||||||
-e enable_splunk=$(SPLUNK) \
|
-e enable_splunk=$(SPLUNK) \
|
||||||
-e enable_prometheus=$(PROMETHEUS) \
|
-e enable_prometheus=$(PROMETHEUS) \
|
||||||
-e enable_grafana=$(GRAFANA)
|
-e enable_grafana=$(GRAFANA) $(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
docker-compose: awx/projects docker-compose-sources
|
docker-compose: awx/projects docker-compose-sources
|
||||||
@@ -502,7 +539,7 @@ docker-compose-container-group-clean:
|
|||||||
fi
|
fi
|
||||||
rm -rf tools/docker-compose-minikube/_sources/
|
rm -rf tools/docker-compose-minikube/_sources/
|
||||||
|
|
||||||
# Base development image build
|
## Base development image build
|
||||||
docker-compose-build:
|
docker-compose-build:
|
||||||
ansible-playbook tools/ansible/dockerfile.yml -e build_dev=True -e receptor_image=$(RECEPTOR_IMAGE)
|
ansible-playbook tools/ansible/dockerfile.yml -e build_dev=True -e receptor_image=$(RECEPTOR_IMAGE)
|
||||||
DOCKER_BUILDKIT=1 docker build -t $(DEVEL_IMAGE_NAME) \
|
DOCKER_BUILDKIT=1 docker build -t $(DEVEL_IMAGE_NAME) \
|
||||||
@@ -520,7 +557,7 @@ docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
|||||||
|
|
||||||
docker-refresh: docker-clean docker-compose
|
docker-refresh: docker-clean docker-compose
|
||||||
|
|
||||||
# Docker Development Environment with Elastic Stack Connected
|
## Docker Development Environment with Elastic Stack Connected
|
||||||
docker-compose-elk: awx/projects docker-compose-sources
|
docker-compose-elk: awx/projects docker-compose-sources
|
||||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||||
|
|
||||||
@@ -557,31 +594,73 @@ Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
|||||||
-e template_dest=_build_kube_dev \
|
-e template_dest=_build_kube_dev \
|
||||||
-e receptor_image=$(RECEPTOR_IMAGE)
|
-e receptor_image=$(RECEPTOR_IMAGE)
|
||||||
|
|
||||||
|
## Build awx_kube_devel image for development on local Kubernetes environment.
|
||||||
awx-kube-dev-build: Dockerfile.kube-dev
|
awx-kube-dev-build: Dockerfile.kube-dev
|
||||||
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||||
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
|
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
|
||||||
|
|
||||||
|
## Build awx image for deployment on Kubernetes environment.
|
||||||
|
awx-kube-build: Dockerfile
|
||||||
|
DOCKER_BUILDKIT=1 docker build -f Dockerfile \
|
||||||
|
--build-arg VERSION=$(VERSION) \
|
||||||
|
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||||
|
--build-arg HEADLESS=$(HEADLESS) \
|
||||||
|
-t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
|
||||||
|
|
||||||
# Translation TASKS
|
# Translation TASKS
|
||||||
# --------------------------------------
|
# --------------------------------------
|
||||||
|
|
||||||
# generate UI .pot file, an empty template of strings yet to be translated
|
## generate UI .pot file, an empty template of strings yet to be translated
|
||||||
pot: $(UI_BUILD_FLAG_FILE)
|
pot: $(UI_BUILD_FLAG_FILE)
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-template --clean
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-template --clean
|
||||||
|
|
||||||
# generate UI .po files for each locale (will update translated strings for `en`)
|
## generate UI .po files for each locale (will update translated strings for `en`)
|
||||||
po: $(UI_BUILD_FLAG_FILE)
|
po: $(UI_BUILD_FLAG_FILE)
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean
|
||||||
|
|
||||||
# generate API django .pot .po
|
## generate API django .pot .po
|
||||||
LANG = "en-us"
|
|
||||||
messages:
|
messages:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
$(PYTHON) manage.py makemessages -l $(LANG) --keep-pot
|
$(PYTHON) manage.py makemessages -l en_us --keep-pot
|
||||||
|
|
||||||
print-%:
|
print-%:
|
||||||
@echo $($*)
|
@echo $($*)
|
||||||
|
|
||||||
|
# HELP related targets
|
||||||
|
# --------------------------------------
|
||||||
|
|
||||||
|
HELP_FILTER=.PHONY
|
||||||
|
|
||||||
|
## Display help targets
|
||||||
|
help:
|
||||||
|
@printf "Available targets:\n"
|
||||||
|
@make -s help/generate | grep -vE "\w($(HELP_FILTER))"
|
||||||
|
|
||||||
|
## Display help for all targets
|
||||||
|
help/all:
|
||||||
|
@printf "Available targets:\n"
|
||||||
|
@make -s help/generate
|
||||||
|
|
||||||
|
## Generate help output from MAKEFILE_LIST
|
||||||
|
help/generate:
|
||||||
|
@awk '/^[-a-zA-Z_0-9%:\\\.\/]+:/ { \
|
||||||
|
helpMessage = match(lastLine, /^## (.*)/); \
|
||||||
|
if (helpMessage) { \
|
||||||
|
helpCommand = $$1; \
|
||||||
|
helpMessage = substr(lastLine, RSTART + 3, RLENGTH); \
|
||||||
|
gsub("\\\\", "", helpCommand); \
|
||||||
|
gsub(":+$$", "", helpCommand); \
|
||||||
|
printf " \x1b[32;01m%-35s\x1b[0m %s\n", helpCommand, helpMessage; \
|
||||||
|
} else { \
|
||||||
|
helpCommand = $$1; \
|
||||||
|
gsub("\\\\", "", helpCommand); \
|
||||||
|
gsub(":+$$", "", helpCommand); \
|
||||||
|
printf " \x1b[32;01m%-35s\x1b[0m %s\n", helpCommand, "No help available"; \
|
||||||
|
} \
|
||||||
|
} \
|
||||||
|
{ lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u
|
||||||
|
@printf "\n"
|
||||||
|
|||||||
@@ -67,7 +67,6 @@ else:
|
|||||||
from django.db import connection
|
from django.db import connection
|
||||||
|
|
||||||
if HAS_DJANGO is True:
|
if HAS_DJANGO is True:
|
||||||
|
|
||||||
# See upgrade blocker note in requirements/README.md
|
# See upgrade blocker note in requirements/README.md
|
||||||
try:
|
try:
|
||||||
names_digest('foo', 'bar', 'baz', length=8)
|
names_digest('foo', 'bar', 'baz', length=8)
|
||||||
@@ -190,7 +189,7 @@ def manage():
|
|||||||
sys.stdout.write('%s\n' % __version__)
|
sys.stdout.write('%s\n' % __version__)
|
||||||
# If running as a user without permission to read settings, display an
|
# If running as a user without permission to read settings, display an
|
||||||
# error message. Allow --help to still work.
|
# error message. Allow --help to still work.
|
||||||
elif settings.SECRET_KEY == 'permission-denied':
|
elif not os.getenv('SKIP_SECRET_KEY_CHECK', False) and settings.SECRET_KEY == 'permission-denied':
|
||||||
if len(sys.argv) == 1 or len(sys.argv) >= 2 and sys.argv[1] in ('-h', '--help', 'help'):
|
if len(sys.argv) == 1 or len(sys.argv) >= 2 and sys.argv[1] in ('-h', '--help', 'help'):
|
||||||
execute_from_command_line(sys.argv)
|
execute_from_command_line(sys.argv)
|
||||||
sys.stdout.write('\n')
|
sys.stdout.write('\n')
|
||||||
|
|||||||
@@ -96,6 +96,15 @@ register(
|
|||||||
category=_('Authentication'),
|
category=_('Authentication'),
|
||||||
category_slug='authentication',
|
category_slug='authentication',
|
||||||
)
|
)
|
||||||
|
register(
|
||||||
|
'ALLOW_METRICS_FOR_ANONYMOUS_USERS',
|
||||||
|
field_class=fields.BooleanField,
|
||||||
|
default=False,
|
||||||
|
label=_('Allow anonymous users to poll metrics'),
|
||||||
|
help_text=_('If true, anonymous users are allowed to poll metrics.'),
|
||||||
|
category=_('Authentication'),
|
||||||
|
category_slug='authentication',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def authentication_validate(serializer, attrs):
|
def authentication_validate(serializer, attrs):
|
||||||
|
|||||||
@@ -80,7 +80,6 @@ class VerbatimField(serializers.Field):
|
|||||||
|
|
||||||
|
|
||||||
class OAuth2ProviderField(fields.DictField):
|
class OAuth2ProviderField(fields.DictField):
|
||||||
|
|
||||||
default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')}
|
default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')}
|
||||||
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
|
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
|
||||||
child = fields.IntegerField(min_value=1)
|
child = fields.IntegerField(min_value=1)
|
||||||
|
|||||||
@@ -157,10 +157,9 @@ class FieldLookupBackend(BaseFilterBackend):
|
|||||||
|
|
||||||
# A list of fields that we know can be filtered on without the possiblity
|
# A list of fields that we know can be filtered on without the possiblity
|
||||||
# of introducing duplicates
|
# of introducing duplicates
|
||||||
NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField)
|
NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField, TextField)
|
||||||
|
|
||||||
def get_fields_from_lookup(self, model, lookup):
|
def get_fields_from_lookup(self, model, lookup):
|
||||||
|
|
||||||
if '__' in lookup and lookup.rsplit('__', 1)[-1] in self.SUPPORTED_LOOKUPS:
|
if '__' in lookup and lookup.rsplit('__', 1)[-1] in self.SUPPORTED_LOOKUPS:
|
||||||
path, suffix = lookup.rsplit('__', 1)
|
path, suffix = lookup.rsplit('__', 1)
|
||||||
else:
|
else:
|
||||||
@@ -232,6 +231,9 @@ class FieldLookupBackend(BaseFilterBackend):
|
|||||||
re.compile(value)
|
re.compile(value)
|
||||||
except re.error as e:
|
except re.error as e:
|
||||||
raise ValueError(e.args[0])
|
raise ValueError(e.args[0])
|
||||||
|
elif new_lookup.endswith('__iexact'):
|
||||||
|
if not isinstance(field, (CharField, TextField)):
|
||||||
|
raise ValueError(f'{field.name} is not a text field and cannot be filtered by case-insensitive search')
|
||||||
elif new_lookup.endswith('__search'):
|
elif new_lookup.endswith('__search'):
|
||||||
related_model = getattr(field, 'related_model', None)
|
related_model = getattr(field, 'related_model', None)
|
||||||
if not related_model:
|
if not related_model:
|
||||||
@@ -258,8 +260,8 @@ class FieldLookupBackend(BaseFilterBackend):
|
|||||||
search_filters = {}
|
search_filters = {}
|
||||||
needs_distinct = False
|
needs_distinct = False
|
||||||
# Can only have two values: 'AND', 'OR'
|
# Can only have two values: 'AND', 'OR'
|
||||||
# If 'AND' is used, an iterm must satisfy all condition to show up in the results.
|
# If 'AND' is used, an item must satisfy all conditions to show up in the results.
|
||||||
# If 'OR' is used, an item just need to satisfy one condition to appear in results.
|
# If 'OR' is used, an item just needs to satisfy one condition to appear in results.
|
||||||
search_filter_relation = 'OR'
|
search_filter_relation = 'OR'
|
||||||
for key, values in request.query_params.lists():
|
for key, values in request.query_params.lists():
|
||||||
if key in self.RESERVED_NAMES:
|
if key in self.RESERVED_NAMES:
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ import inspect
|
|||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
import urllib.parse
|
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@@ -14,7 +13,7 @@ from django.contrib.auth import views as auth_views
|
|||||||
from django.contrib.contenttypes.models import ContentType
|
from django.contrib.contenttypes.models import ContentType
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.core.exceptions import FieldDoesNotExist
|
from django.core.exceptions import FieldDoesNotExist
|
||||||
from django.db import connection
|
from django.db import connection, transaction
|
||||||
from django.db.models.fields.related import OneToOneRel
|
from django.db.models.fields.related import OneToOneRel
|
||||||
from django.http import QueryDict
|
from django.http import QueryDict
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
@@ -30,7 +29,7 @@ from rest_framework.response import Response
|
|||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework import views
|
from rest_framework import views
|
||||||
from rest_framework.permissions import AllowAny
|
from rest_framework.permissions import AllowAny
|
||||||
from rest_framework.renderers import StaticHTMLRenderer, JSONRenderer
|
from rest_framework.renderers import StaticHTMLRenderer
|
||||||
from rest_framework.negotiation import DefaultContentNegotiation
|
from rest_framework.negotiation import DefaultContentNegotiation
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
@@ -41,7 +40,7 @@ from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd,
|
|||||||
from awx.main.utils.db import get_all_field_names
|
from awx.main.utils.db import get_all_field_names
|
||||||
from awx.main.utils.licensing import server_product_name
|
from awx.main.utils.licensing import server_product_name
|
||||||
from awx.main.views import ApiErrorView
|
from awx.main.views import ApiErrorView
|
||||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
|
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||||
from awx.api.versioning import URLPathVersioning
|
from awx.api.versioning import URLPathVersioning
|
||||||
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
|
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
|
||||||
from awx.conf import settings_registry
|
from awx.conf import settings_registry
|
||||||
@@ -63,9 +62,9 @@ __all__ = [
|
|||||||
'SubDetailAPIView',
|
'SubDetailAPIView',
|
||||||
'ResourceAccessList',
|
'ResourceAccessList',
|
||||||
'ParentMixin',
|
'ParentMixin',
|
||||||
'DeleteLastUnattachLabelMixin',
|
|
||||||
'SubListAttachDetachAPIView',
|
'SubListAttachDetachAPIView',
|
||||||
'CopyAPIView',
|
'CopyAPIView',
|
||||||
|
'GenericCancelView',
|
||||||
'BaseUsersList',
|
'BaseUsersList',
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -91,14 +90,9 @@ class LoggedLoginView(auth_views.LoginView):
|
|||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def post(self, request, *args, **kwargs):
|
||||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||||
current_user = getattr(request, 'user', None)
|
|
||||||
if request.user.is_authenticated:
|
if request.user.is_authenticated:
|
||||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
||||||
ret.set_cookie('userLoggedIn', 'true')
|
ret.set_cookie('userLoggedIn', 'true')
|
||||||
current_user = UserSerializer(self.request.user)
|
|
||||||
current_user = smart_str(JSONRenderer().render(current_user.data))
|
|
||||||
current_user = urllib.parse.quote('%s' % current_user, '')
|
|
||||||
ret.set_cookie('current_user', current_user, secure=settings.SESSION_COOKIE_SECURE or None)
|
|
||||||
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
@@ -141,7 +135,6 @@ def get_default_schema():
|
|||||||
|
|
||||||
|
|
||||||
class APIView(views.APIView):
|
class APIView(views.APIView):
|
||||||
|
|
||||||
schema = get_default_schema()
|
schema = get_default_schema()
|
||||||
versioning_class = URLPathVersioning
|
versioning_class = URLPathVersioning
|
||||||
|
|
||||||
@@ -255,7 +248,7 @@ class APIView(views.APIView):
|
|||||||
response['X-API-Query-Time'] = '%0.3fs' % sum(q_times)
|
response['X-API-Query-Time'] = '%0.3fs' % sum(q_times)
|
||||||
|
|
||||||
if getattr(self, 'deprecated', False):
|
if getattr(self, 'deprecated', False):
|
||||||
response['Warning'] = '299 awx "This resource has been deprecated and will be removed in a future release."' # noqa
|
response['Warning'] = '299 awx "This resource has been deprecated and will be removed in a future release."'
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
@@ -775,28 +768,6 @@ class SubListAttachDetachAPIView(SubListCreateAttachDetachAPIView):
|
|||||||
return {'id': None}
|
return {'id': None}
|
||||||
|
|
||||||
|
|
||||||
class DeleteLastUnattachLabelMixin(object):
|
|
||||||
"""
|
|
||||||
Models for which you want the last instance to be deleted from the database
|
|
||||||
when the last disassociate is called should inherit from this class. Further,
|
|
||||||
the model should implement is_detached()
|
|
||||||
"""
|
|
||||||
|
|
||||||
def unattach(self, request, *args, **kwargs):
|
|
||||||
(sub_id, res) = super(DeleteLastUnattachLabelMixin, self).unattach_validate(request)
|
|
||||||
if res:
|
|
||||||
return res
|
|
||||||
|
|
||||||
res = super(DeleteLastUnattachLabelMixin, self).unattach_by_id(request, sub_id)
|
|
||||||
|
|
||||||
obj = self.model.objects.get(id=sub_id)
|
|
||||||
|
|
||||||
if obj.is_detached():
|
|
||||||
obj.delete()
|
|
||||||
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
class SubDetailAPIView(ParentMixin, generics.RetrieveAPIView, GenericAPIView):
|
class SubDetailAPIView(ParentMixin, generics.RetrieveAPIView, GenericAPIView):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -828,7 +799,6 @@ class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class ResourceAccessList(ParentMixin, ListAPIView):
|
class ResourceAccessList(ParentMixin, ListAPIView):
|
||||||
|
|
||||||
serializer_class = ResourceAccessListElementSerializer
|
serializer_class = ResourceAccessListElementSerializer
|
||||||
ordering = ('username',)
|
ordering = ('username',)
|
||||||
|
|
||||||
@@ -851,7 +821,6 @@ def trigger_delayed_deep_copy(*args, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
class CopyAPIView(GenericAPIView):
|
class CopyAPIView(GenericAPIView):
|
||||||
|
|
||||||
serializer_class = CopySerializer
|
serializer_class = CopySerializer
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
copy_return_serializer_class = None
|
copy_return_serializer_class = None
|
||||||
@@ -1014,6 +983,23 @@ class CopyAPIView(GenericAPIView):
|
|||||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||||
|
|
||||||
|
|
||||||
|
class GenericCancelView(RetrieveAPIView):
|
||||||
|
# In subclass set model, serializer_class
|
||||||
|
obj_permission_type = 'cancel'
|
||||||
|
|
||||||
|
@transaction.non_atomic_requests
|
||||||
|
def dispatch(self, *args, **kwargs):
|
||||||
|
return super(GenericCancelView, self).dispatch(*args, **kwargs)
|
||||||
|
|
||||||
|
def post(self, request, *args, **kwargs):
|
||||||
|
obj = self.get_object()
|
||||||
|
if obj.can_cancel:
|
||||||
|
obj.cancel()
|
||||||
|
return Response(status=status.HTTP_202_ACCEPTED)
|
||||||
|
else:
|
||||||
|
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class BaseUsersList(SubListCreateAttachDetachAPIView):
|
class BaseUsersList(SubListCreateAttachDetachAPIView):
|
||||||
def post(self, request, *args, **kwargs):
|
def post(self, request, *args, **kwargs):
|
||||||
ret = super(BaseUsersList, self).post(request, *args, **kwargs)
|
ret = super(BaseUsersList, self).post(request, *args, **kwargs)
|
||||||
|
|||||||
@@ -128,7 +128,7 @@ class Metadata(metadata.SimpleMetadata):
|
|||||||
# Special handling of notification configuration where the required properties
|
# Special handling of notification configuration where the required properties
|
||||||
# are conditional on the type selected.
|
# are conditional on the type selected.
|
||||||
if field.field_name == 'notification_configuration':
|
if field.field_name == 'notification_configuration':
|
||||||
for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
|
for notification_type_name, notification_tr_name, notification_type_class in NotificationTemplate.NOTIFICATION_TYPES:
|
||||||
field_info[notification_type_name] = notification_type_class.init_parameters
|
field_info[notification_type_name] = notification_type_class.init_parameters
|
||||||
|
|
||||||
# Special handling of notification messages where the required properties
|
# Special handling of notification messages where the required properties
|
||||||
@@ -138,7 +138,7 @@ class Metadata(metadata.SimpleMetadata):
|
|||||||
except (AttributeError, KeyError):
|
except (AttributeError, KeyError):
|
||||||
view_model = None
|
view_model = None
|
||||||
if view_model == NotificationTemplate and field.field_name == 'messages':
|
if view_model == NotificationTemplate and field.field_name == 'messages':
|
||||||
for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
|
for notification_type_name, notification_tr_name, notification_type_class in NotificationTemplate.NOTIFICATION_TYPES:
|
||||||
field_info[notification_type_name] = notification_type_class.default_messages
|
field_info[notification_type_name] = notification_type_class.default_messages
|
||||||
|
|
||||||
# Update type of fields returned...
|
# Update type of fields returned...
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ class DisabledPaginator(DjangoPaginator):
|
|||||||
|
|
||||||
|
|
||||||
class Pagination(pagination.PageNumberPagination):
|
class Pagination(pagination.PageNumberPagination):
|
||||||
|
|
||||||
page_size_query_param = 'page_size'
|
page_size_query_param = 'page_size'
|
||||||
max_page_size = settings.MAX_PAGE_SIZE
|
max_page_size = settings.MAX_PAGE_SIZE
|
||||||
count_disabled = False
|
count_disabled = False
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ __all__ = [
|
|||||||
'InventoryInventorySourcesUpdatePermission',
|
'InventoryInventorySourcesUpdatePermission',
|
||||||
'UserPermission',
|
'UserPermission',
|
||||||
'IsSystemAdminOrAuditor',
|
'IsSystemAdminOrAuditor',
|
||||||
'InstanceGroupTowerPermission',
|
|
||||||
'WorkflowApprovalPermission',
|
'WorkflowApprovalPermission',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -22,7 +22,6 @@ class SurrogateEncoder(encoders.JSONEncoder):
|
|||||||
|
|
||||||
|
|
||||||
class DefaultJSONRenderer(renderers.JSONRenderer):
|
class DefaultJSONRenderer(renderers.JSONRenderer):
|
||||||
|
|
||||||
encoder_class = SurrogateEncoder
|
encoder_class = SurrogateEncoder
|
||||||
|
|
||||||
|
|
||||||
@@ -95,7 +94,6 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
|||||||
|
|
||||||
|
|
||||||
class PlainTextRenderer(renderers.BaseRenderer):
|
class PlainTextRenderer(renderers.BaseRenderer):
|
||||||
|
|
||||||
media_type = 'text/plain'
|
media_type = 'text/plain'
|
||||||
format = 'txt'
|
format = 'txt'
|
||||||
|
|
||||||
@@ -106,18 +104,15 @@ class PlainTextRenderer(renderers.BaseRenderer):
|
|||||||
|
|
||||||
|
|
||||||
class DownloadTextRenderer(PlainTextRenderer):
|
class DownloadTextRenderer(PlainTextRenderer):
|
||||||
|
|
||||||
format = "txt_download"
|
format = "txt_download"
|
||||||
|
|
||||||
|
|
||||||
class AnsiTextRenderer(PlainTextRenderer):
|
class AnsiTextRenderer(PlainTextRenderer):
|
||||||
|
|
||||||
media_type = 'text/plain'
|
media_type = 'text/plain'
|
||||||
format = 'ansi'
|
format = 'ansi'
|
||||||
|
|
||||||
|
|
||||||
class AnsiDownloadRenderer(PlainTextRenderer):
|
class AnsiDownloadRenderer(PlainTextRenderer):
|
||||||
|
|
||||||
format = "ansi_download"
|
format = "ansi_download"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
from django.utils.encoding import force_str
|
from django.utils.encoding import force_str
|
||||||
from django.utils.text import capfirst
|
from django.utils.text import capfirst
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from django.utils.functional import cached_property
|
from django.core.validators import RegexValidator, MaxLengthValidator
|
||||||
|
|
||||||
# Django REST Framework
|
# Django REST Framework
|
||||||
from rest_framework.exceptions import ValidationError, PermissionDenied
|
from rest_framework.exceptions import ValidationError, PermissionDenied
|
||||||
@@ -113,7 +113,7 @@ from awx.main.utils import (
|
|||||||
)
|
)
|
||||||
from awx.main.utils.filters import SmartFilter
|
from awx.main.utils.filters import SmartFilter
|
||||||
from awx.main.utils.named_url_graph import reset_counters
|
from awx.main.utils.named_url_graph import reset_counters
|
||||||
from awx.main.scheduler.task_manager_models import TaskManagerInstanceGroups, TaskManagerInstances
|
from awx.main.scheduler.task_manager_models import TaskManagerModels
|
||||||
from awx.main.redact import UriCleaner, REPLACE_STR
|
from awx.main.redact import UriCleaner, REPLACE_STR
|
||||||
|
|
||||||
from awx.main.validators import vars_validate_or_raise
|
from awx.main.validators import vars_validate_or_raise
|
||||||
@@ -121,6 +121,9 @@ from awx.main.validators import vars_validate_or_raise
|
|||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, VerbatimField, DeprecatedCredentialField
|
from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, VerbatimField, DeprecatedCredentialField
|
||||||
|
|
||||||
|
# AWX Utils
|
||||||
|
from awx.api.validators import HostnameRegexValidator
|
||||||
|
|
||||||
logger = logging.getLogger('awx.api.serializers')
|
logger = logging.getLogger('awx.api.serializers')
|
||||||
|
|
||||||
# Fields that should be summarized regardless of object type.
|
# Fields that should be summarized regardless of object type.
|
||||||
@@ -155,6 +158,7 @@ SUMMARIZABLE_FK_FIELDS = {
|
|||||||
'source_project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'),
|
'source_project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'),
|
||||||
'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed'),
|
'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed'),
|
||||||
'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'kubernetes', 'credential_type_id'),
|
'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'kubernetes', 'credential_type_id'),
|
||||||
|
'signature_validation_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'credential_type_id'),
|
||||||
'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed', 'type', 'canceled_on'),
|
'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed', 'type', 'canceled_on'),
|
||||||
'job_template': DEFAULT_SUMMARY_FIELDS,
|
'job_template': DEFAULT_SUMMARY_FIELDS,
|
||||||
'workflow_job_template': DEFAULT_SUMMARY_FIELDS,
|
'workflow_job_template': DEFAULT_SUMMARY_FIELDS,
|
||||||
@@ -196,7 +200,6 @@ def reverse_gfk(content_object, request):
|
|||||||
|
|
||||||
|
|
||||||
class CopySerializer(serializers.Serializer):
|
class CopySerializer(serializers.Serializer):
|
||||||
|
|
||||||
name = serializers.CharField()
|
name = serializers.CharField()
|
||||||
|
|
||||||
def validate(self, attrs):
|
def validate(self, attrs):
|
||||||
@@ -428,7 +431,6 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
|||||||
continue
|
continue
|
||||||
summary_fields[fk] = OrderedDict()
|
summary_fields[fk] = OrderedDict()
|
||||||
for field in related_fields:
|
for field in related_fields:
|
||||||
|
|
||||||
fval = getattr(fkval, field, None)
|
fval = getattr(fkval, field, None)
|
||||||
|
|
||||||
if fval is None and field == 'type':
|
if fval is None and field == 'type':
|
||||||
@@ -615,7 +617,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
|||||||
def validate(self, attrs):
|
def validate(self, attrs):
|
||||||
attrs = super(BaseSerializer, self).validate(attrs)
|
attrs = super(BaseSerializer, self).validate(attrs)
|
||||||
try:
|
try:
|
||||||
# Create/update a model instance and run it's full_clean() method to
|
# Create/update a model instance and run its full_clean() method to
|
||||||
# do any validation implemented on the model class.
|
# do any validation implemented on the model class.
|
||||||
exclusions = self.get_validation_exclusions(self.instance)
|
exclusions = self.get_validation_exclusions(self.instance)
|
||||||
obj = self.instance or self.Meta.model()
|
obj = self.instance or self.Meta.model()
|
||||||
@@ -926,7 +928,6 @@ class UnifiedJobListSerializer(UnifiedJobSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
|
class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
|
||||||
|
|
||||||
result_stdout = serializers.SerializerMethodField()
|
result_stdout = serializers.SerializerMethodField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -940,7 +941,6 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class UserSerializer(BaseSerializer):
|
class UserSerializer(BaseSerializer):
|
||||||
|
|
||||||
password = serializers.CharField(required=False, default='', write_only=True, help_text=_('Write-only field used to change the password.'))
|
password = serializers.CharField(required=False, default='', write_only=True, help_text=_('Write-only field used to change the password.'))
|
||||||
ldap_dn = serializers.CharField(source='profile.ldap_dn', read_only=True)
|
ldap_dn = serializers.CharField(source='profile.ldap_dn', read_only=True)
|
||||||
external_account = serializers.SerializerMethodField(help_text=_('Set if the account is managed by an external service'))
|
external_account = serializers.SerializerMethodField(help_text=_('Set if the account is managed by an external service'))
|
||||||
@@ -1100,7 +1100,6 @@ class UserActivityStreamSerializer(UserSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class BaseOAuth2TokenSerializer(BaseSerializer):
|
class BaseOAuth2TokenSerializer(BaseSerializer):
|
||||||
|
|
||||||
refresh_token = serializers.SerializerMethodField()
|
refresh_token = serializers.SerializerMethodField()
|
||||||
token = serializers.SerializerMethodField()
|
token = serializers.SerializerMethodField()
|
||||||
ALLOWED_SCOPES = ['read', 'write']
|
ALLOWED_SCOPES = ['read', 'write']
|
||||||
@@ -1218,7 +1217,6 @@ class UserPersonalTokenSerializer(BaseOAuth2TokenSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class OAuth2ApplicationSerializer(BaseSerializer):
|
class OAuth2ApplicationSerializer(BaseSerializer):
|
||||||
|
|
||||||
show_capabilities = ['edit', 'delete']
|
show_capabilities = ['edit', 'delete']
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -1453,7 +1451,6 @@ class ExecutionEnvironmentSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
|
class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
|
||||||
|
|
||||||
status = serializers.ChoiceField(choices=Project.PROJECT_STATUS_CHOICES, read_only=True)
|
status = serializers.ChoiceField(choices=Project.PROJECT_STATUS_CHOICES, read_only=True)
|
||||||
last_update_failed = serializers.BooleanField(read_only=True)
|
last_update_failed = serializers.BooleanField(read_only=True)
|
||||||
last_updated = serializers.DateTimeField(read_only=True)
|
last_updated = serializers.DateTimeField(read_only=True)
|
||||||
@@ -1471,6 +1468,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
|
|||||||
'allow_override',
|
'allow_override',
|
||||||
'custom_virtualenv',
|
'custom_virtualenv',
|
||||||
'default_environment',
|
'default_environment',
|
||||||
|
'signature_validation_credential',
|
||||||
) + (
|
) + (
|
||||||
'last_update_failed',
|
'last_update_failed',
|
||||||
'last_updated',
|
'last_updated',
|
||||||
@@ -1543,7 +1541,6 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class ProjectPlaybooksSerializer(ProjectSerializer):
|
class ProjectPlaybooksSerializer(ProjectSerializer):
|
||||||
|
|
||||||
playbooks = serializers.SerializerMethodField(help_text=_('Array of playbooks available within this project.'))
|
playbooks = serializers.SerializerMethodField(help_text=_('Array of playbooks available within this project.'))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -1561,7 +1558,6 @@ class ProjectPlaybooksSerializer(ProjectSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class ProjectInventoriesSerializer(ProjectSerializer):
|
class ProjectInventoriesSerializer(ProjectSerializer):
|
||||||
|
|
||||||
inventory_files = serializers.ReadOnlyField(help_text=_('Array of inventory files and directories available within this project, ' 'not comprehensive.'))
|
inventory_files = serializers.ReadOnlyField(help_text=_('Array of inventory files and directories available within this project, ' 'not comprehensive.'))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -1576,7 +1572,6 @@ class ProjectInventoriesSerializer(ProjectSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class ProjectUpdateViewSerializer(ProjectSerializer):
|
class ProjectUpdateViewSerializer(ProjectSerializer):
|
||||||
|
|
||||||
can_update = serializers.BooleanField(read_only=True)
|
can_update = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -1606,7 +1601,6 @@ class ProjectUpdateSerializer(UnifiedJobSerializer, ProjectOptionsSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class ProjectUpdateDetailSerializer(ProjectUpdateSerializer):
|
class ProjectUpdateDetailSerializer(ProjectUpdateSerializer):
|
||||||
|
|
||||||
playbook_counts = serializers.SerializerMethodField(help_text=_('A count of all plays and tasks for the job run.'))
|
playbook_counts = serializers.SerializerMethodField(help_text=_('A count of all plays and tasks for the job run.'))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -1629,7 +1623,6 @@ class ProjectUpdateListSerializer(ProjectUpdateSerializer, UnifiedJobListSeriali
|
|||||||
|
|
||||||
|
|
||||||
class ProjectUpdateCancelSerializer(ProjectUpdateSerializer):
|
class ProjectUpdateCancelSerializer(ProjectUpdateSerializer):
|
||||||
|
|
||||||
can_cancel = serializers.BooleanField(read_only=True)
|
can_cancel = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -1679,6 +1672,7 @@ class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables):
|
|||||||
'total_inventory_sources',
|
'total_inventory_sources',
|
||||||
'inventory_sources_with_failures',
|
'inventory_sources_with_failures',
|
||||||
'pending_deletion',
|
'pending_deletion',
|
||||||
|
'prevent_instance_group_fallback',
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
@@ -1966,7 +1960,6 @@ class GroupSerializer(BaseSerializerWithVariables):
|
|||||||
|
|
||||||
|
|
||||||
class GroupTreeSerializer(GroupSerializer):
|
class GroupTreeSerializer(GroupSerializer):
|
||||||
|
|
||||||
children = serializers.SerializerMethodField()
|
children = serializers.SerializerMethodField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -2064,7 +2057,6 @@ class InventorySourceOptionsSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOptionsSerializer):
|
class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOptionsSerializer):
|
||||||
|
|
||||||
status = serializers.ChoiceField(choices=InventorySource.INVENTORY_SOURCE_STATUS_CHOICES, read_only=True)
|
status = serializers.ChoiceField(choices=InventorySource.INVENTORY_SOURCE_STATUS_CHOICES, read_only=True)
|
||||||
last_update_failed = serializers.BooleanField(read_only=True)
|
last_update_failed = serializers.BooleanField(read_only=True)
|
||||||
last_updated = serializers.DateTimeField(read_only=True)
|
last_updated = serializers.DateTimeField(read_only=True)
|
||||||
@@ -2209,15 +2201,22 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
|
|||||||
|
|
||||||
|
|
||||||
class InventorySourceUpdateSerializer(InventorySourceSerializer):
|
class InventorySourceUpdateSerializer(InventorySourceSerializer):
|
||||||
|
|
||||||
can_update = serializers.BooleanField(read_only=True)
|
can_update = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
fields = ('can_update',)
|
fields = ('can_update',)
|
||||||
|
|
||||||
|
def validate(self, attrs):
|
||||||
|
project = self.instance.source_project
|
||||||
|
if project:
|
||||||
|
failed_reason = project.get_reason_if_failed()
|
||||||
|
if failed_reason:
|
||||||
|
raise serializers.ValidationError(failed_reason)
|
||||||
|
|
||||||
|
return super(InventorySourceUpdateSerializer, self).validate(attrs)
|
||||||
|
|
||||||
|
|
||||||
class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSerializer):
|
class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSerializer):
|
||||||
|
|
||||||
custom_virtualenv = serializers.ReadOnlyField()
|
custom_virtualenv = serializers.ReadOnlyField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -2231,6 +2230,7 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri
|
|||||||
'source_project_update',
|
'source_project_update',
|
||||||
'custom_virtualenv',
|
'custom_virtualenv',
|
||||||
'instance_group',
|
'instance_group',
|
||||||
|
'scm_revision',
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
@@ -2257,7 +2257,6 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri
|
|||||||
|
|
||||||
|
|
||||||
class InventoryUpdateDetailSerializer(InventoryUpdateSerializer):
|
class InventoryUpdateDetailSerializer(InventoryUpdateSerializer):
|
||||||
|
|
||||||
source_project = serializers.SerializerMethodField(help_text=_('The project used for this job.'), method_name='get_source_project_id')
|
source_project = serializers.SerializerMethodField(help_text=_('The project used for this job.'), method_name='get_source_project_id')
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -2308,7 +2307,6 @@ class InventoryUpdateListSerializer(InventoryUpdateSerializer, UnifiedJobListSer
|
|||||||
|
|
||||||
|
|
||||||
class InventoryUpdateCancelSerializer(InventoryUpdateSerializer):
|
class InventoryUpdateCancelSerializer(InventoryUpdateSerializer):
|
||||||
|
|
||||||
can_cancel = serializers.BooleanField(read_only=True)
|
can_cancel = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -2666,7 +2664,6 @@ class CredentialSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class CredentialSerializerCreate(CredentialSerializer):
|
class CredentialSerializerCreate(CredentialSerializer):
|
||||||
|
|
||||||
user = serializers.PrimaryKeyRelatedField(
|
user = serializers.PrimaryKeyRelatedField(
|
||||||
queryset=User.objects.all(),
|
queryset=User.objects.all(),
|
||||||
required=False,
|
required=False,
|
||||||
@@ -2921,6 +2918,12 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
|||||||
'ask_verbosity_on_launch',
|
'ask_verbosity_on_launch',
|
||||||
'ask_inventory_on_launch',
|
'ask_inventory_on_launch',
|
||||||
'ask_credential_on_launch',
|
'ask_credential_on_launch',
|
||||||
|
'ask_execution_environment_on_launch',
|
||||||
|
'ask_labels_on_launch',
|
||||||
|
'ask_forks_on_launch',
|
||||||
|
'ask_job_slice_count_on_launch',
|
||||||
|
'ask_timeout_on_launch',
|
||||||
|
'ask_instance_groups_on_launch',
|
||||||
'survey_enabled',
|
'survey_enabled',
|
||||||
'become_enabled',
|
'become_enabled',
|
||||||
'diff_mode',
|
'diff_mode',
|
||||||
@@ -2929,6 +2932,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
|||||||
'job_slice_count',
|
'job_slice_count',
|
||||||
'webhook_service',
|
'webhook_service',
|
||||||
'webhook_credential',
|
'webhook_credential',
|
||||||
|
'prevent_instance_group_fallback',
|
||||||
)
|
)
|
||||||
read_only_fields = ('*', 'custom_virtualenv')
|
read_only_fields = ('*', 'custom_virtualenv')
|
||||||
|
|
||||||
@@ -3014,7 +3018,6 @@ class JobTemplateWithSpecSerializer(JobTemplateSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
||||||
|
|
||||||
passwords_needed_to_start = serializers.ReadOnlyField()
|
passwords_needed_to_start = serializers.ReadOnlyField()
|
||||||
artifacts = serializers.SerializerMethodField()
|
artifacts = serializers.SerializerMethodField()
|
||||||
|
|
||||||
@@ -3097,7 +3100,6 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class JobDetailSerializer(JobSerializer):
|
class JobDetailSerializer(JobSerializer):
|
||||||
|
|
||||||
playbook_counts = serializers.SerializerMethodField(help_text=_('A count of all plays and tasks for the job run.'))
|
playbook_counts = serializers.SerializerMethodField(help_text=_('A count of all plays and tasks for the job run.'))
|
||||||
custom_virtualenv = serializers.ReadOnlyField()
|
custom_virtualenv = serializers.ReadOnlyField()
|
||||||
|
|
||||||
@@ -3115,7 +3117,6 @@ class JobDetailSerializer(JobSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class JobCancelSerializer(BaseSerializer):
|
class JobCancelSerializer(BaseSerializer):
|
||||||
|
|
||||||
can_cancel = serializers.BooleanField(read_only=True)
|
can_cancel = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -3124,7 +3125,6 @@ class JobCancelSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class JobRelaunchSerializer(BaseSerializer):
|
class JobRelaunchSerializer(BaseSerializer):
|
||||||
|
|
||||||
passwords_needed_to_start = serializers.SerializerMethodField()
|
passwords_needed_to_start = serializers.SerializerMethodField()
|
||||||
retry_counts = serializers.SerializerMethodField()
|
retry_counts = serializers.SerializerMethodField()
|
||||||
hosts = serializers.ChoiceField(
|
hosts = serializers.ChoiceField(
|
||||||
@@ -3183,8 +3183,7 @@ class JobRelaunchSerializer(BaseSerializer):
|
|||||||
return attrs
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
class JobCreateScheduleSerializer(BaseSerializer):
|
class JobCreateScheduleSerializer(LabelsListMixin, BaseSerializer):
|
||||||
|
|
||||||
can_schedule = serializers.SerializerMethodField()
|
can_schedule = serializers.SerializerMethodField()
|
||||||
prompts = serializers.SerializerMethodField()
|
prompts = serializers.SerializerMethodField()
|
||||||
|
|
||||||
@@ -3209,14 +3208,17 @@ class JobCreateScheduleSerializer(BaseSerializer):
|
|||||||
try:
|
try:
|
||||||
config = obj.launch_config
|
config = obj.launch_config
|
||||||
ret = config.prompts_dict(display=True)
|
ret = config.prompts_dict(display=True)
|
||||||
if 'inventory' in ret:
|
for field_name in ('inventory', 'execution_environment'):
|
||||||
ret['inventory'] = self._summarize('inventory', ret['inventory'])
|
if field_name in ret:
|
||||||
if 'credentials' in ret:
|
ret[field_name] = self._summarize(field_name, ret[field_name])
|
||||||
all_creds = [self._summarize('credential', cred) for cred in ret['credentials']]
|
for field_name, singular in (('credentials', 'credential'), ('instance_groups', 'instance_group')):
|
||||||
ret['credentials'] = all_creds
|
if field_name in ret:
|
||||||
|
ret[field_name] = [self._summarize(singular, obj) for obj in ret[field_name]]
|
||||||
|
if 'labels' in ret:
|
||||||
|
ret['labels'] = self._summary_field_labels(config)
|
||||||
return ret
|
return ret
|
||||||
except JobLaunchConfig.DoesNotExist:
|
except JobLaunchConfig.DoesNotExist:
|
||||||
return {'all': _('Unknown, job may have been ran before launch configurations were saved.')}
|
return {'all': _('Unknown, job may have been run before launch configurations were saved.')}
|
||||||
|
|
||||||
|
|
||||||
class AdHocCommandSerializer(UnifiedJobSerializer):
|
class AdHocCommandSerializer(UnifiedJobSerializer):
|
||||||
@@ -3307,7 +3309,6 @@ class AdHocCommandDetailSerializer(AdHocCommandSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class AdHocCommandCancelSerializer(AdHocCommandSerializer):
|
class AdHocCommandCancelSerializer(AdHocCommandSerializer):
|
||||||
|
|
||||||
can_cancel = serializers.BooleanField(read_only=True)
|
can_cancel = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -3346,7 +3347,6 @@ class SystemJobTemplateSerializer(UnifiedJobTemplateSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class SystemJobSerializer(UnifiedJobSerializer):
|
class SystemJobSerializer(UnifiedJobSerializer):
|
||||||
|
|
||||||
result_stdout = serializers.SerializerMethodField()
|
result_stdout = serializers.SerializerMethodField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -3373,7 +3373,6 @@ class SystemJobSerializer(UnifiedJobSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class SystemJobCancelSerializer(SystemJobSerializer):
|
class SystemJobCancelSerializer(SystemJobSerializer):
|
||||||
|
|
||||||
can_cancel = serializers.BooleanField(read_only=True)
|
can_cancel = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -3386,6 +3385,9 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
|
|||||||
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||||
scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||||
|
|
||||||
|
skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||||
|
job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = WorkflowJobTemplate
|
model = WorkflowJobTemplate
|
||||||
fields = (
|
fields = (
|
||||||
@@ -3404,6 +3406,11 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
|
|||||||
'webhook_service',
|
'webhook_service',
|
||||||
'webhook_credential',
|
'webhook_credential',
|
||||||
'-execution_environment',
|
'-execution_environment',
|
||||||
|
'ask_labels_on_launch',
|
||||||
|
'ask_skip_tags_on_launch',
|
||||||
|
'ask_tags_on_launch',
|
||||||
|
'skip_tags',
|
||||||
|
'job_tags',
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
@@ -3447,7 +3454,7 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
|
|||||||
|
|
||||||
# process char_prompts, these are not direct fields on the model
|
# process char_prompts, these are not direct fields on the model
|
||||||
mock_obj = self.Meta.model()
|
mock_obj = self.Meta.model()
|
||||||
for field_name in ('scm_branch', 'limit'):
|
for field_name in ('scm_branch', 'limit', 'skip_tags', 'job_tags'):
|
||||||
if field_name in attrs:
|
if field_name in attrs:
|
||||||
setattr(mock_obj, field_name, attrs[field_name])
|
setattr(mock_obj, field_name, attrs[field_name])
|
||||||
attrs.pop(field_name)
|
attrs.pop(field_name)
|
||||||
@@ -3473,6 +3480,9 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
|
|||||||
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||||
scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||||
|
|
||||||
|
skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||||
|
job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = WorkflowJob
|
model = WorkflowJob
|
||||||
fields = (
|
fields = (
|
||||||
@@ -3492,6 +3502,8 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
|
|||||||
'webhook_service',
|
'webhook_service',
|
||||||
'webhook_credential',
|
'webhook_credential',
|
||||||
'webhook_guid',
|
'webhook_guid',
|
||||||
|
'skip_tags',
|
||||||
|
'job_tags',
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
@@ -3525,7 +3537,6 @@ class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer)
|
|||||||
|
|
||||||
|
|
||||||
class WorkflowJobCancelSerializer(WorkflowJobSerializer):
|
class WorkflowJobCancelSerializer(WorkflowJobSerializer):
|
||||||
|
|
||||||
can_cancel = serializers.BooleanField(read_only=True)
|
can_cancel = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -3539,7 +3550,6 @@ class WorkflowApprovalViewSerializer(UnifiedJobSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class WorkflowApprovalSerializer(UnifiedJobSerializer):
|
class WorkflowApprovalSerializer(UnifiedJobSerializer):
|
||||||
|
|
||||||
can_approve_or_deny = serializers.SerializerMethodField()
|
can_approve_or_deny = serializers.SerializerMethodField()
|
||||||
approval_expiration = serializers.SerializerMethodField()
|
approval_expiration = serializers.SerializerMethodField()
|
||||||
timed_out = serializers.ReadOnlyField()
|
timed_out = serializers.ReadOnlyField()
|
||||||
@@ -3608,6 +3618,9 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
|||||||
skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||||
diff_mode = serializers.BooleanField(required=False, allow_null=True, default=None)
|
diff_mode = serializers.BooleanField(required=False, allow_null=True, default=None)
|
||||||
verbosity = serializers.ChoiceField(allow_null=True, required=False, default=None, choices=VERBOSITY_CHOICES)
|
verbosity = serializers.ChoiceField(allow_null=True, required=False, default=None, choices=VERBOSITY_CHOICES)
|
||||||
|
forks = serializers.IntegerField(required=False, allow_null=True, min_value=0, default=None)
|
||||||
|
job_slice_count = serializers.IntegerField(required=False, allow_null=True, min_value=0, default=None)
|
||||||
|
timeout = serializers.IntegerField(required=False, allow_null=True, default=None)
|
||||||
exclude_errors = ()
|
exclude_errors = ()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -3623,13 +3636,21 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
|||||||
'skip_tags',
|
'skip_tags',
|
||||||
'diff_mode',
|
'diff_mode',
|
||||||
'verbosity',
|
'verbosity',
|
||||||
|
'execution_environment',
|
||||||
|
'forks',
|
||||||
|
'job_slice_count',
|
||||||
|
'timeout',
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
res = super(LaunchConfigurationBaseSerializer, self).get_related(obj)
|
res = super(LaunchConfigurationBaseSerializer, self).get_related(obj)
|
||||||
if obj.inventory_id:
|
if obj.inventory_id:
|
||||||
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory_id})
|
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory_id})
|
||||||
|
if obj.execution_environment_id:
|
||||||
|
res['execution_environment'] = self.reverse('api:execution_environment_detail', kwargs={'pk': obj.execution_environment_id})
|
||||||
|
res['labels'] = self.reverse('api:{}_labels_list'.format(get_type_for_model(self.Meta.model)), kwargs={'pk': obj.pk})
|
||||||
res['credentials'] = self.reverse('api:{}_credentials_list'.format(get_type_for_model(self.Meta.model)), kwargs={'pk': obj.pk})
|
res['credentials'] = self.reverse('api:{}_credentials_list'.format(get_type_for_model(self.Meta.model)), kwargs={'pk': obj.pk})
|
||||||
|
res['instance_groups'] = self.reverse('api:{}_instance_groups_list'.format(get_type_for_model(self.Meta.model)), kwargs={'pk': obj.pk})
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def _build_mock_obj(self, attrs):
|
def _build_mock_obj(self, attrs):
|
||||||
@@ -3709,7 +3730,11 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
|||||||
|
|
||||||
# Build unsaved version of this config, use it to detect prompts errors
|
# Build unsaved version of this config, use it to detect prompts errors
|
||||||
mock_obj = self._build_mock_obj(attrs)
|
mock_obj = self._build_mock_obj(attrs)
|
||||||
accepted, rejected, errors = ujt._accept_or_ignore_job_kwargs(_exclude_errors=self.exclude_errors, **mock_obj.prompts_dict())
|
if set(list(ujt.get_ask_mapping().keys()) + ['extra_data']) & set(attrs.keys()):
|
||||||
|
accepted, rejected, errors = ujt._accept_or_ignore_job_kwargs(_exclude_errors=self.exclude_errors, **mock_obj.prompts_dict())
|
||||||
|
else:
|
||||||
|
# Only perform validation of prompts if prompts fields are provided
|
||||||
|
errors = {}
|
||||||
|
|
||||||
# Remove all unprocessed $encrypted$ strings, indicating default usage
|
# Remove all unprocessed $encrypted$ strings, indicating default usage
|
||||||
if 'extra_data' in attrs and password_dict:
|
if 'extra_data' in attrs and password_dict:
|
||||||
@@ -3919,7 +3944,6 @@ class JobHostSummarySerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class JobEventSerializer(BaseSerializer):
|
class JobEventSerializer(BaseSerializer):
|
||||||
|
|
||||||
event_display = serializers.CharField(source='get_event_display2', read_only=True)
|
event_display = serializers.CharField(source='get_event_display2', read_only=True)
|
||||||
event_level = serializers.IntegerField(read_only=True)
|
event_level = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
@@ -4015,7 +4039,6 @@ class ProjectUpdateEventSerializer(JobEventSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class AdHocCommandEventSerializer(BaseSerializer):
|
class AdHocCommandEventSerializer(BaseSerializer):
|
||||||
|
|
||||||
event_display = serializers.CharField(source='get_event_display', read_only=True)
|
event_display = serializers.CharField(source='get_event_display', read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -4081,7 +4104,6 @@ class SystemJobEventSerializer(AdHocCommandEventSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class JobLaunchSerializer(BaseSerializer):
|
class JobLaunchSerializer(BaseSerializer):
|
||||||
|
|
||||||
# Representational fields
|
# Representational fields
|
||||||
passwords_needed_to_start = serializers.ReadOnlyField()
|
passwords_needed_to_start = serializers.ReadOnlyField()
|
||||||
can_start_without_user_input = serializers.BooleanField(read_only=True)
|
can_start_without_user_input = serializers.BooleanField(read_only=True)
|
||||||
@@ -4104,6 +4126,12 @@ class JobLaunchSerializer(BaseSerializer):
|
|||||||
skip_tags = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
skip_tags = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||||
limit = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
limit = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||||
verbosity = serializers.ChoiceField(required=False, choices=VERBOSITY_CHOICES, write_only=True)
|
verbosity = serializers.ChoiceField(required=False, choices=VERBOSITY_CHOICES, write_only=True)
|
||||||
|
execution_environment = serializers.PrimaryKeyRelatedField(queryset=ExecutionEnvironment.objects.all(), required=False, write_only=True)
|
||||||
|
labels = serializers.PrimaryKeyRelatedField(many=True, queryset=Label.objects.all(), required=False, write_only=True)
|
||||||
|
forks = serializers.IntegerField(required=False, write_only=True, min_value=0)
|
||||||
|
job_slice_count = serializers.IntegerField(required=False, write_only=True, min_value=0)
|
||||||
|
timeout = serializers.IntegerField(required=False, write_only=True)
|
||||||
|
instance_groups = serializers.PrimaryKeyRelatedField(many=True, queryset=InstanceGroup.objects.all(), required=False, write_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = JobTemplate
|
model = JobTemplate
|
||||||
@@ -4131,6 +4159,12 @@ class JobLaunchSerializer(BaseSerializer):
|
|||||||
'ask_verbosity_on_launch',
|
'ask_verbosity_on_launch',
|
||||||
'ask_inventory_on_launch',
|
'ask_inventory_on_launch',
|
||||||
'ask_credential_on_launch',
|
'ask_credential_on_launch',
|
||||||
|
'ask_execution_environment_on_launch',
|
||||||
|
'ask_labels_on_launch',
|
||||||
|
'ask_forks_on_launch',
|
||||||
|
'ask_job_slice_count_on_launch',
|
||||||
|
'ask_timeout_on_launch',
|
||||||
|
'ask_instance_groups_on_launch',
|
||||||
'survey_enabled',
|
'survey_enabled',
|
||||||
'variables_needed_to_start',
|
'variables_needed_to_start',
|
||||||
'credential_needed_to_start',
|
'credential_needed_to_start',
|
||||||
@@ -4138,6 +4172,12 @@ class JobLaunchSerializer(BaseSerializer):
|
|||||||
'job_template_data',
|
'job_template_data',
|
||||||
'defaults',
|
'defaults',
|
||||||
'verbosity',
|
'verbosity',
|
||||||
|
'execution_environment',
|
||||||
|
'labels',
|
||||||
|
'forks',
|
||||||
|
'job_slice_count',
|
||||||
|
'timeout',
|
||||||
|
'instance_groups',
|
||||||
)
|
)
|
||||||
read_only_fields = (
|
read_only_fields = (
|
||||||
'ask_scm_branch_on_launch',
|
'ask_scm_branch_on_launch',
|
||||||
@@ -4150,6 +4190,12 @@ class JobLaunchSerializer(BaseSerializer):
|
|||||||
'ask_verbosity_on_launch',
|
'ask_verbosity_on_launch',
|
||||||
'ask_inventory_on_launch',
|
'ask_inventory_on_launch',
|
||||||
'ask_credential_on_launch',
|
'ask_credential_on_launch',
|
||||||
|
'ask_execution_environment_on_launch',
|
||||||
|
'ask_labels_on_launch',
|
||||||
|
'ask_forks_on_launch',
|
||||||
|
'ask_job_slice_count_on_launch',
|
||||||
|
'ask_timeout_on_launch',
|
||||||
|
'ask_instance_groups_on_launch',
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_credential_needed_to_start(self, obj):
|
def get_credential_needed_to_start(self, obj):
|
||||||
@@ -4174,6 +4220,17 @@ class JobLaunchSerializer(BaseSerializer):
|
|||||||
if cred.credential_type.managed and 'vault_id' in cred.credential_type.defined_fields:
|
if cred.credential_type.managed and 'vault_id' in cred.credential_type.defined_fields:
|
||||||
cred_dict['vault_id'] = cred.get_input('vault_id', default=None)
|
cred_dict['vault_id'] = cred.get_input('vault_id', default=None)
|
||||||
defaults_dict.setdefault(field_name, []).append(cred_dict)
|
defaults_dict.setdefault(field_name, []).append(cred_dict)
|
||||||
|
elif field_name == 'execution_environment':
|
||||||
|
if obj.execution_environment_id:
|
||||||
|
defaults_dict[field_name] = {'id': obj.execution_environment.id, 'name': obj.execution_environment.name}
|
||||||
|
else:
|
||||||
|
defaults_dict[field_name] = {}
|
||||||
|
elif field_name == 'labels':
|
||||||
|
for label in obj.labels.all():
|
||||||
|
label_dict = {'id': label.id, 'name': label.name}
|
||||||
|
defaults_dict.setdefault(field_name, []).append(label_dict)
|
||||||
|
elif field_name == 'instance_groups':
|
||||||
|
defaults_dict[field_name] = []
|
||||||
else:
|
else:
|
||||||
defaults_dict[field_name] = getattr(obj, field_name)
|
defaults_dict[field_name] = getattr(obj, field_name)
|
||||||
return defaults_dict
|
return defaults_dict
|
||||||
@@ -4193,8 +4250,10 @@ class JobLaunchSerializer(BaseSerializer):
|
|||||||
# Basic validation - cannot run a playbook without a playbook
|
# Basic validation - cannot run a playbook without a playbook
|
||||||
if not template.project:
|
if not template.project:
|
||||||
errors['project'] = _("A project is required to run a job.")
|
errors['project'] = _("A project is required to run a job.")
|
||||||
elif template.project.status in ('error', 'failed'):
|
else:
|
||||||
errors['playbook'] = _("Missing a revision to run due to failed project update.")
|
failure_reason = template.project.get_reason_if_failed()
|
||||||
|
if failure_reason:
|
||||||
|
errors['playbook'] = failure_reason
|
||||||
|
|
||||||
# cannot run a playbook without an inventory
|
# cannot run a playbook without an inventory
|
||||||
if template.inventory and template.inventory.pending_deletion is True:
|
if template.inventory and template.inventory.pending_deletion is True:
|
||||||
@@ -4261,7 +4320,6 @@ class JobLaunchSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class WorkflowJobLaunchSerializer(BaseSerializer):
|
class WorkflowJobLaunchSerializer(BaseSerializer):
|
||||||
|
|
||||||
can_start_without_user_input = serializers.BooleanField(read_only=True)
|
can_start_without_user_input = serializers.BooleanField(read_only=True)
|
||||||
defaults = serializers.SerializerMethodField()
|
defaults = serializers.SerializerMethodField()
|
||||||
variables_needed_to_start = serializers.ReadOnlyField()
|
variables_needed_to_start = serializers.ReadOnlyField()
|
||||||
@@ -4272,6 +4330,10 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
|
|||||||
scm_branch = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
scm_branch = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||||
workflow_job_template_data = serializers.SerializerMethodField()
|
workflow_job_template_data = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
labels = serializers.PrimaryKeyRelatedField(many=True, queryset=Label.objects.all(), required=False, write_only=True)
|
||||||
|
skip_tags = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||||
|
job_tags = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = WorkflowJobTemplate
|
model = WorkflowJobTemplate
|
||||||
fields = (
|
fields = (
|
||||||
@@ -4291,8 +4353,22 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
|
|||||||
'workflow_job_template_data',
|
'workflow_job_template_data',
|
||||||
'survey_enabled',
|
'survey_enabled',
|
||||||
'ask_variables_on_launch',
|
'ask_variables_on_launch',
|
||||||
|
'ask_labels_on_launch',
|
||||||
|
'labels',
|
||||||
|
'ask_skip_tags_on_launch',
|
||||||
|
'ask_tags_on_launch',
|
||||||
|
'skip_tags',
|
||||||
|
'job_tags',
|
||||||
|
)
|
||||||
|
read_only_fields = (
|
||||||
|
'ask_inventory_on_launch',
|
||||||
|
'ask_variables_on_launch',
|
||||||
|
'ask_skip_tags_on_launch',
|
||||||
|
'ask_labels_on_launch',
|
||||||
|
'ask_limit_on_launch',
|
||||||
|
'ask_scm_branch_on_launch',
|
||||||
|
'ask_tags_on_launch',
|
||||||
)
|
)
|
||||||
read_only_fields = ('ask_inventory_on_launch', 'ask_variables_on_launch')
|
|
||||||
|
|
||||||
def get_survey_enabled(self, obj):
|
def get_survey_enabled(self, obj):
|
||||||
if obj:
|
if obj:
|
||||||
@@ -4304,6 +4380,10 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
|
|||||||
for field_name in WorkflowJobTemplate.get_ask_mapping().keys():
|
for field_name in WorkflowJobTemplate.get_ask_mapping().keys():
|
||||||
if field_name == 'inventory':
|
if field_name == 'inventory':
|
||||||
defaults_dict[field_name] = dict(name=getattrd(obj, '%s.name' % field_name, None), id=getattrd(obj, '%s.pk' % field_name, None))
|
defaults_dict[field_name] = dict(name=getattrd(obj, '%s.name' % field_name, None), id=getattrd(obj, '%s.pk' % field_name, None))
|
||||||
|
elif field_name == 'labels':
|
||||||
|
for label in obj.labels.all():
|
||||||
|
label_dict = {"id": label.id, "name": label.name}
|
||||||
|
defaults_dict.setdefault(field_name, []).append(label_dict)
|
||||||
else:
|
else:
|
||||||
defaults_dict[field_name] = getattr(obj, field_name)
|
defaults_dict[field_name] = getattr(obj, field_name)
|
||||||
return defaults_dict
|
return defaults_dict
|
||||||
@@ -4329,6 +4409,7 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
|
|||||||
WFJT_inventory = template.inventory
|
WFJT_inventory = template.inventory
|
||||||
WFJT_limit = template.limit
|
WFJT_limit = template.limit
|
||||||
WFJT_scm_branch = template.scm_branch
|
WFJT_scm_branch = template.scm_branch
|
||||||
|
|
||||||
super(WorkflowJobLaunchSerializer, self).validate(attrs)
|
super(WorkflowJobLaunchSerializer, self).validate(attrs)
|
||||||
template.extra_vars = WFJT_extra_vars
|
template.extra_vars = WFJT_extra_vars
|
||||||
template.inventory = WFJT_inventory
|
template.inventory = WFJT_inventory
|
||||||
@@ -4551,7 +4632,6 @@ class NotificationTemplateSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class NotificationSerializer(BaseSerializer):
|
class NotificationSerializer(BaseSerializer):
|
||||||
|
|
||||||
body = serializers.SerializerMethodField(help_text=_('Notification body'))
|
body = serializers.SerializerMethodField(help_text=_('Notification body'))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -4720,6 +4800,8 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
|||||||
if isinstance(obj.unified_job_template, SystemJobTemplate):
|
if isinstance(obj.unified_job_template, SystemJobTemplate):
|
||||||
summary_fields['unified_job_template']['job_type'] = obj.unified_job_template.job_type
|
summary_fields['unified_job_template']['job_type'] = obj.unified_job_template.job_type
|
||||||
|
|
||||||
|
# We are not showing instance groups on summary fields because JTs don't either
|
||||||
|
|
||||||
if 'inventory' in summary_fields:
|
if 'inventory' in summary_fields:
|
||||||
return summary_fields
|
return summary_fields
|
||||||
|
|
||||||
@@ -4754,7 +4836,7 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
|||||||
class InstanceLinkSerializer(BaseSerializer):
|
class InstanceLinkSerializer(BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = InstanceLink
|
model = InstanceLink
|
||||||
fields = ('source', 'target')
|
fields = ('source', 'target', 'link_state')
|
||||||
|
|
||||||
source = serializers.SlugRelatedField(slug_field="hostname", read_only=True)
|
source = serializers.SlugRelatedField(slug_field="hostname", read_only=True)
|
||||||
target = serializers.SlugRelatedField(slug_field="hostname", read_only=True)
|
target = serializers.SlugRelatedField(slug_field="hostname", read_only=True)
|
||||||
@@ -4763,63 +4845,93 @@ class InstanceLinkSerializer(BaseSerializer):
|
|||||||
class InstanceNodeSerializer(BaseSerializer):
|
class InstanceNodeSerializer(BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Instance
|
model = Instance
|
||||||
fields = ('id', 'hostname', 'node_type', 'node_state')
|
fields = ('id', 'hostname', 'node_type', 'node_state', 'enabled')
|
||||||
|
|
||||||
node_state = serializers.SerializerMethodField()
|
|
||||||
|
|
||||||
def get_node_state(self, obj):
|
|
||||||
if not obj.enabled:
|
|
||||||
return "disabled"
|
|
||||||
return "error" if obj.errors else "healthy"
|
|
||||||
|
|
||||||
|
|
||||||
class InstanceSerializer(BaseSerializer):
|
class InstanceSerializer(BaseSerializer):
|
||||||
|
show_capabilities = ['edit']
|
||||||
|
|
||||||
consumed_capacity = serializers.SerializerMethodField()
|
consumed_capacity = serializers.SerializerMethodField()
|
||||||
percent_capacity_remaining = serializers.SerializerMethodField()
|
percent_capacity_remaining = serializers.SerializerMethodField()
|
||||||
jobs_running = serializers.IntegerField(help_text=_('Count of jobs in the running or waiting state that ' 'are targeted for this instance'), read_only=True)
|
jobs_running = serializers.IntegerField(help_text=_('Count of jobs in the running or waiting state that are targeted for this instance'), read_only=True)
|
||||||
jobs_total = serializers.IntegerField(help_text=_('Count of all jobs that target this instance'), read_only=True)
|
jobs_total = serializers.IntegerField(help_text=_('Count of all jobs that target this instance'), read_only=True)
|
||||||
|
health_check_pending = serializers.SerializerMethodField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Instance
|
model = Instance
|
||||||
read_only_fields = ('uuid', 'hostname', 'version', 'node_type')
|
read_only_fields = ('ip_address', 'uuid', 'version')
|
||||||
fields = (
|
fields = (
|
||||||
"id",
|
'id',
|
||||||
"type",
|
'hostname',
|
||||||
"url",
|
'type',
|
||||||
"related",
|
'url',
|
||||||
"uuid",
|
'related',
|
||||||
"hostname",
|
'summary_fields',
|
||||||
"created",
|
'uuid',
|
||||||
"modified",
|
'created',
|
||||||
"last_seen",
|
'modified',
|
||||||
"last_health_check",
|
'last_seen',
|
||||||
"errors",
|
'health_check_started',
|
||||||
|
'health_check_pending',
|
||||||
|
'last_health_check',
|
||||||
|
'errors',
|
||||||
'capacity_adjustment',
|
'capacity_adjustment',
|
||||||
"version",
|
'version',
|
||||||
"capacity",
|
'capacity',
|
||||||
"consumed_capacity",
|
'consumed_capacity',
|
||||||
"percent_capacity_remaining",
|
'percent_capacity_remaining',
|
||||||
"jobs_running",
|
'jobs_running',
|
||||||
"jobs_total",
|
'jobs_total',
|
||||||
"cpu",
|
'cpu',
|
||||||
"memory",
|
'memory',
|
||||||
"cpu_capacity",
|
'cpu_capacity',
|
||||||
"mem_capacity",
|
'mem_capacity',
|
||||||
"enabled",
|
'enabled',
|
||||||
"managed_by_policy",
|
'managed_by_policy',
|
||||||
"node_type",
|
'node_type',
|
||||||
|
'node_state',
|
||||||
|
'ip_address',
|
||||||
|
'listener_port',
|
||||||
)
|
)
|
||||||
|
extra_kwargs = {
|
||||||
|
'node_type': {'initial': Instance.Types.EXECUTION, 'default': Instance.Types.EXECUTION},
|
||||||
|
'node_state': {'initial': Instance.States.INSTALLED, 'default': Instance.States.INSTALLED},
|
||||||
|
'hostname': {
|
||||||
|
'validators': [
|
||||||
|
MaxLengthValidator(limit_value=250),
|
||||||
|
validators.UniqueValidator(queryset=Instance.objects.all()),
|
||||||
|
RegexValidator(
|
||||||
|
regex=r'^localhost$|^127(?:\.[0-9]+){0,2}\.[0-9]+$|^(?:0*\:)*?:?0*1$',
|
||||||
|
flags=re.IGNORECASE,
|
||||||
|
inverse_match=True,
|
||||||
|
message="hostname cannot be localhost or 127.0.0.1",
|
||||||
|
),
|
||||||
|
HostnameRegexValidator(),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
res = super(InstanceSerializer, self).get_related(obj)
|
res = super(InstanceSerializer, self).get_related(obj)
|
||||||
res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk})
|
res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk})
|
||||||
res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk})
|
res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk})
|
||||||
|
if settings.IS_K8S and obj.node_type in (Instance.Types.EXECUTION,):
|
||||||
|
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
|
||||||
|
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
|
||||||
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
||||||
if obj.node_type != 'hop':
|
if obj.node_type == 'execution':
|
||||||
res['health_check'] = self.reverse('api:instance_health_check', kwargs={'pk': obj.pk})
|
res['health_check'] = self.reverse('api:instance_health_check', kwargs={'pk': obj.pk})
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
def get_summary_fields(self, obj):
|
||||||
|
summary = super().get_summary_fields(obj)
|
||||||
|
|
||||||
|
# use this handle to distinguish between a listView and a detailView
|
||||||
|
if self.is_detail_view:
|
||||||
|
summary['links'] = InstanceLinkSerializer(InstanceLink.objects.select_related('target', 'source').filter(source=obj), many=True).data
|
||||||
|
|
||||||
|
return summary
|
||||||
|
|
||||||
def get_consumed_capacity(self, obj):
|
def get_consumed_capacity(self, obj):
|
||||||
return obj.consumed_capacity
|
return obj.consumed_capacity
|
||||||
|
|
||||||
@@ -4829,10 +4941,58 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
else:
|
else:
|
||||||
return float("{0:.2f}".format(((float(obj.capacity) - float(obj.consumed_capacity)) / (float(obj.capacity))) * 100))
|
return float("{0:.2f}".format(((float(obj.capacity) - float(obj.consumed_capacity)) / (float(obj.capacity))) * 100))
|
||||||
|
|
||||||
def validate(self, attrs):
|
def get_health_check_pending(self, obj):
|
||||||
if self.instance.node_type == 'hop':
|
return obj.health_check_pending
|
||||||
raise serializers.ValidationError(_('Hop node instances may not be changed.'))
|
|
||||||
return attrs
|
def validate(self, data):
|
||||||
|
if self.instance:
|
||||||
|
if self.instance.node_type == Instance.Types.HOP:
|
||||||
|
raise serializers.ValidationError("Hop node instances may not be changed.")
|
||||||
|
else:
|
||||||
|
if not settings.IS_K8S:
|
||||||
|
raise serializers.ValidationError("Can only create instances on Kubernetes or OpenShift.")
|
||||||
|
return data
|
||||||
|
|
||||||
|
def validate_node_type(self, value):
|
||||||
|
if not self.instance:
|
||||||
|
if value not in (Instance.Types.EXECUTION,):
|
||||||
|
raise serializers.ValidationError("Can only create execution nodes.")
|
||||||
|
else:
|
||||||
|
if self.instance.node_type != value:
|
||||||
|
raise serializers.ValidationError("Cannot change node type.")
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
def validate_node_state(self, value):
|
||||||
|
if self.instance:
|
||||||
|
if value != self.instance.node_state:
|
||||||
|
if not settings.IS_K8S:
|
||||||
|
raise serializers.ValidationError("Can only change the state on Kubernetes or OpenShift.")
|
||||||
|
if value != Instance.States.DEPROVISIONING:
|
||||||
|
raise serializers.ValidationError("Can only change instances to the 'deprovisioning' state.")
|
||||||
|
if self.instance.node_type not in (Instance.Types.EXECUTION,):
|
||||||
|
raise serializers.ValidationError("Can only deprovision execution nodes.")
|
||||||
|
else:
|
||||||
|
if value and value != Instance.States.INSTALLED:
|
||||||
|
raise serializers.ValidationError("Can only create instances in the 'installed' state.")
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
def validate_hostname(self, value):
|
||||||
|
"""
|
||||||
|
- Hostname cannot be "localhost" - but can be something like localhost.domain
|
||||||
|
- Cannot change the hostname of an-already instantiated & initialized Instance object
|
||||||
|
"""
|
||||||
|
if self.instance and self.instance.hostname != value:
|
||||||
|
raise serializers.ValidationError("Cannot change hostname.")
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
def validate_listener_port(self, value):
|
||||||
|
if self.instance and self.instance.listener_port != value:
|
||||||
|
raise serializers.ValidationError("Cannot change listener port.")
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
class InstanceHealthCheckSerializer(BaseSerializer):
|
class InstanceHealthCheckSerializer(BaseSerializer):
|
||||||
@@ -4843,14 +5003,11 @@ class InstanceHealthCheckSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class InstanceGroupSerializer(BaseSerializer):
|
class InstanceGroupSerializer(BaseSerializer):
|
||||||
|
|
||||||
show_capabilities = ['edit', 'delete']
|
show_capabilities = ['edit', 'delete']
|
||||||
|
capacity = serializers.SerializerMethodField()
|
||||||
consumed_capacity = serializers.SerializerMethodField()
|
consumed_capacity = serializers.SerializerMethodField()
|
||||||
percent_capacity_remaining = serializers.SerializerMethodField()
|
percent_capacity_remaining = serializers.SerializerMethodField()
|
||||||
jobs_running = serializers.IntegerField(
|
jobs_running = serializers.SerializerMethodField()
|
||||||
help_text=_('Count of jobs in the running or waiting state that ' 'are targeted for this instance group'), read_only=True
|
|
||||||
)
|
|
||||||
jobs_total = serializers.IntegerField(help_text=_('Count of all jobs that target this instance group'), read_only=True)
|
jobs_total = serializers.IntegerField(help_text=_('Count of all jobs that target this instance group'), read_only=True)
|
||||||
instances = serializers.SerializerMethodField()
|
instances = serializers.SerializerMethodField()
|
||||||
is_container_group = serializers.BooleanField(
|
is_container_group = serializers.BooleanField(
|
||||||
@@ -4876,6 +5033,22 @@ class InstanceGroupSerializer(BaseSerializer):
|
|||||||
label=_('Policy Instance Minimum'),
|
label=_('Policy Instance Minimum'),
|
||||||
help_text=_("Static minimum number of Instances that will be automatically assign to " "this group when new instances come online."),
|
help_text=_("Static minimum number of Instances that will be automatically assign to " "this group when new instances come online."),
|
||||||
)
|
)
|
||||||
|
max_concurrent_jobs = serializers.IntegerField(
|
||||||
|
default=0,
|
||||||
|
min_value=0,
|
||||||
|
required=False,
|
||||||
|
initial=0,
|
||||||
|
label=_('Max Concurrent Jobs'),
|
||||||
|
help_text=_("Maximum number of concurrent jobs to run on a group. When set to zero, no maximum is enforced."),
|
||||||
|
)
|
||||||
|
max_forks = serializers.IntegerField(
|
||||||
|
default=0,
|
||||||
|
min_value=0,
|
||||||
|
required=False,
|
||||||
|
initial=0,
|
||||||
|
label=_('Max Forks'),
|
||||||
|
help_text=_("Maximum number of forks to execute concurrently on a group. When set to zero, no maximum is enforced."),
|
||||||
|
)
|
||||||
policy_instance_list = serializers.ListField(
|
policy_instance_list = serializers.ListField(
|
||||||
child=serializers.CharField(),
|
child=serializers.CharField(),
|
||||||
required=False,
|
required=False,
|
||||||
@@ -4897,6 +5070,8 @@ class InstanceGroupSerializer(BaseSerializer):
|
|||||||
"consumed_capacity",
|
"consumed_capacity",
|
||||||
"percent_capacity_remaining",
|
"percent_capacity_remaining",
|
||||||
"jobs_running",
|
"jobs_running",
|
||||||
|
"max_concurrent_jobs",
|
||||||
|
"max_forks",
|
||||||
"jobs_total",
|
"jobs_total",
|
||||||
"instances",
|
"instances",
|
||||||
"is_container_group",
|
"is_container_group",
|
||||||
@@ -4978,38 +5153,47 @@ class InstanceGroupSerializer(BaseSerializer):
|
|||||||
# Store capacity values (globally computed) in the context
|
# Store capacity values (globally computed) in the context
|
||||||
if 'task_manager_igs' not in self.context:
|
if 'task_manager_igs' not in self.context:
|
||||||
instance_groups_queryset = None
|
instance_groups_queryset = None
|
||||||
jobs_qs = UnifiedJob.objects.filter(status__in=('running', 'waiting'))
|
|
||||||
if self.parent: # Is ListView:
|
if self.parent: # Is ListView:
|
||||||
instance_groups_queryset = self.parent.instance
|
instance_groups_queryset = self.parent.instance
|
||||||
|
|
||||||
instances = TaskManagerInstances(jobs_qs)
|
tm_models = TaskManagerModels.init_with_consumed_capacity(
|
||||||
instance_groups = TaskManagerInstanceGroups(instances_by_hostname=instances, instance_groups_queryset=instance_groups_queryset)
|
instance_fields=['uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'enabled'],
|
||||||
|
instance_groups_queryset=instance_groups_queryset,
|
||||||
|
)
|
||||||
|
|
||||||
self.context['task_manager_igs'] = instance_groups
|
self.context['task_manager_igs'] = tm_models.instance_groups
|
||||||
return self.context['task_manager_igs']
|
return self.context['task_manager_igs']
|
||||||
|
|
||||||
def get_consumed_capacity(self, obj):
|
def get_consumed_capacity(self, obj):
|
||||||
ig_mgr = self.get_ig_mgr()
|
ig_mgr = self.get_ig_mgr()
|
||||||
return ig_mgr.get_consumed_capacity(obj.name)
|
return ig_mgr.get_consumed_capacity(obj.name)
|
||||||
|
|
||||||
def get_percent_capacity_remaining(self, obj):
|
def get_capacity(self, obj):
|
||||||
if not obj.capacity:
|
|
||||||
return 0.0
|
|
||||||
ig_mgr = self.get_ig_mgr()
|
ig_mgr = self.get_ig_mgr()
|
||||||
return float("{0:.2f}".format((float(ig_mgr.get_remaining_capacity(obj.name)) / (float(obj.capacity))) * 100))
|
return ig_mgr.get_capacity(obj.name)
|
||||||
|
|
||||||
|
def get_percent_capacity_remaining(self, obj):
|
||||||
|
capacity = self.get_capacity(obj)
|
||||||
|
if not capacity:
|
||||||
|
return 0.0
|
||||||
|
consumed_capacity = self.get_consumed_capacity(obj)
|
||||||
|
return float("{0:.2f}".format(((float(capacity) - float(consumed_capacity)) / (float(capacity))) * 100))
|
||||||
|
|
||||||
def get_instances(self, obj):
|
def get_instances(self, obj):
|
||||||
return obj.instances.count()
|
ig_mgr = self.get_ig_mgr()
|
||||||
|
return len(ig_mgr.get_instances(obj.name))
|
||||||
|
|
||||||
|
def get_jobs_running(self, obj):
|
||||||
|
ig_mgr = self.get_ig_mgr()
|
||||||
|
return ig_mgr.get_jobs_running(obj.name)
|
||||||
|
|
||||||
|
|
||||||
class ActivityStreamSerializer(BaseSerializer):
|
class ActivityStreamSerializer(BaseSerializer):
|
||||||
|
|
||||||
changes = serializers.SerializerMethodField()
|
changes = serializers.SerializerMethodField()
|
||||||
object_association = serializers.SerializerMethodField(help_text=_("When present, shows the field name of the role or relationship that changed."))
|
object_association = serializers.SerializerMethodField(help_text=_("When present, shows the field name of the role or relationship that changed."))
|
||||||
object_type = serializers.SerializerMethodField(help_text=_("When present, shows the model on which the role or relationship was defined."))
|
object_type = serializers.SerializerMethodField(help_text=_("When present, shows the model on which the role or relationship was defined."))
|
||||||
|
|
||||||
@cached_property
|
def _local_summarizable_fk_fields(self, obj):
|
||||||
def _local_summarizable_fk_fields(self):
|
|
||||||
summary_dict = copy.copy(SUMMARIZABLE_FK_FIELDS)
|
summary_dict = copy.copy(SUMMARIZABLE_FK_FIELDS)
|
||||||
# Special requests
|
# Special requests
|
||||||
summary_dict['group'] = summary_dict['group'] + ('inventory_id',)
|
summary_dict['group'] = summary_dict['group'] + ('inventory_id',)
|
||||||
@@ -5029,7 +5213,13 @@ class ActivityStreamSerializer(BaseSerializer):
|
|||||||
('workflow_approval', ('id', 'name', 'unified_job_id')),
|
('workflow_approval', ('id', 'name', 'unified_job_id')),
|
||||||
('instance', ('id', 'hostname')),
|
('instance', ('id', 'hostname')),
|
||||||
]
|
]
|
||||||
return field_list
|
# Optimization - do not attempt to summarize all fields, pair down to only relations that exist
|
||||||
|
if not obj:
|
||||||
|
return field_list
|
||||||
|
existing_association_types = [obj.object1, obj.object2]
|
||||||
|
if 'user' in existing_association_types:
|
||||||
|
existing_association_types.append('role')
|
||||||
|
return [entry for entry in field_list if entry[0] in existing_association_types]
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = ActivityStream
|
model = ActivityStream
|
||||||
@@ -5113,7 +5303,7 @@ class ActivityStreamSerializer(BaseSerializer):
|
|||||||
data = {}
|
data = {}
|
||||||
if obj.actor is not None:
|
if obj.actor is not None:
|
||||||
data['actor'] = self.reverse('api:user_detail', kwargs={'pk': obj.actor.pk})
|
data['actor'] = self.reverse('api:user_detail', kwargs={'pk': obj.actor.pk})
|
||||||
for fk, __ in self._local_summarizable_fk_fields:
|
for fk, __ in self._local_summarizable_fk_fields(obj):
|
||||||
if not hasattr(obj, fk):
|
if not hasattr(obj, fk):
|
||||||
continue
|
continue
|
||||||
m2m_list = self._get_related_objects(obj, fk)
|
m2m_list = self._get_related_objects(obj, fk)
|
||||||
@@ -5170,7 +5360,7 @@ class ActivityStreamSerializer(BaseSerializer):
|
|||||||
|
|
||||||
def get_summary_fields(self, obj):
|
def get_summary_fields(self, obj):
|
||||||
summary_fields = OrderedDict()
|
summary_fields = OrderedDict()
|
||||||
for fk, related_fields in self._local_summarizable_fk_fields:
|
for fk, related_fields in self._local_summarizable_fk_fields(obj):
|
||||||
try:
|
try:
|
||||||
if not hasattr(obj, fk):
|
if not hasattr(obj, fk):
|
||||||
continue
|
continue
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
Launch a Job Template:
|
Launch a Job Template:
|
||||||
|
{% ifmeth GET %}
|
||||||
Make a GET request to this resource to determine if the job_template can be
|
Make a GET request to this resource to determine if the job_template can be
|
||||||
launched and whether any passwords are required to launch the job_template.
|
launched and whether any passwords are required to launch the job_template.
|
||||||
The response will include the following fields:
|
The response will include the following fields:
|
||||||
@@ -29,8 +29,8 @@ The response will include the following fields:
|
|||||||
* `inventory_needed_to_start`: Flag indicating the presence of an inventory
|
* `inventory_needed_to_start`: Flag indicating the presence of an inventory
|
||||||
associated with the job template. If not then one should be supplied when
|
associated with the job template. If not then one should be supplied when
|
||||||
launching the job (boolean, read-only)
|
launching the job (boolean, read-only)
|
||||||
|
{% endifmeth %}
|
||||||
Make a POST request to this resource to launch the job_template. If any
|
{% ifmeth POST %}Make a POST request to this resource to launch the job_template. If any
|
||||||
passwords, inventory, or extra variables (extra_vars) are required, they must
|
passwords, inventory, or extra variables (extra_vars) are required, they must
|
||||||
be passed via POST data, with extra_vars given as a YAML or JSON string and
|
be passed via POST data, with extra_vars given as a YAML or JSON string and
|
||||||
escaped parentheses. If the `inventory_needed_to_start` is `True` then the
|
escaped parentheses. If the `inventory_needed_to_start` is `True` then the
|
||||||
@@ -41,3 +41,4 @@ are not provided, a 400 status code will be returned. If the job cannot be
|
|||||||
launched, a 405 status code will be returned. If the provided credential or
|
launched, a 405 status code will be returned. If the provided credential or
|
||||||
inventory are not allowed to be used by the user, then a 403 status code will
|
inventory are not allowed to be used by the user, then a 403 status code will
|
||||||
be returned.
|
be returned.
|
||||||
|
{% endifmeth %}
|
||||||
23
awx/api/templates/instance_install_bundle/group_vars/all.yml
Normal file
23
awx/api/templates/instance_install_bundle/group_vars/all.yml
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
receptor_user: awx
|
||||||
|
receptor_group: awx
|
||||||
|
receptor_verify: true
|
||||||
|
receptor_tls: true
|
||||||
|
receptor_work_commands:
|
||||||
|
ansible-runner:
|
||||||
|
command: ansible-runner
|
||||||
|
params: worker
|
||||||
|
allowruntimeparams: true
|
||||||
|
verifysignature: true
|
||||||
|
custom_worksign_public_keyfile: receptor/work-public-key.pem
|
||||||
|
custom_tls_certfile: receptor/tls/receptor.crt
|
||||||
|
custom_tls_keyfile: receptor/tls/receptor.key
|
||||||
|
custom_ca_certfile: receptor/tls/ca/receptor-ca.crt
|
||||||
|
receptor_protocol: 'tcp'
|
||||||
|
receptor_listener: true
|
||||||
|
receptor_port: {{ instance.listener_port }}
|
||||||
|
receptor_dependencies:
|
||||||
|
- python39-pip
|
||||||
|
{% verbatim %}
|
||||||
|
podman_user: "{{ receptor_user }}"
|
||||||
|
podman_group: "{{ receptor_group }}"
|
||||||
|
{% endverbatim %}
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
{% verbatim %}
|
||||||
|
---
|
||||||
|
- hosts: all
|
||||||
|
become: yes
|
||||||
|
tasks:
|
||||||
|
- name: Create the receptor user
|
||||||
|
user:
|
||||||
|
name: "{{ receptor_user }}"
|
||||||
|
shell: /bin/bash
|
||||||
|
- name: Enable Copr repo for Receptor
|
||||||
|
command: dnf copr enable ansible-awx/receptor -y
|
||||||
|
- import_role:
|
||||||
|
name: ansible.receptor.podman
|
||||||
|
- import_role:
|
||||||
|
name: ansible.receptor.setup
|
||||||
|
- name: Install ansible-runner
|
||||||
|
pip:
|
||||||
|
name: ansible-runner
|
||||||
|
executable: pip3.9
|
||||||
|
{% endverbatim %}
|
||||||
7
awx/api/templates/instance_install_bundle/inventory.yml
Normal file
7
awx/api/templates/instance_install_bundle/inventory.yml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
all:
|
||||||
|
hosts:
|
||||||
|
remote-execution:
|
||||||
|
ansible_host: {{ instance.hostname }}
|
||||||
|
ansible_user: <username> # user provided
|
||||||
|
ansible_ssh_private_key_file: ~/.ssh/id_rsa
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
---
|
||||||
|
collections:
|
||||||
|
- name: ansible.receptor
|
||||||
|
version: 1.1.0
|
||||||
17
awx/api/urls/debug.py
Normal file
17
awx/api/urls/debug.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
from django.urls import re_path
|
||||||
|
|
||||||
|
from awx.api.views.debug import (
|
||||||
|
DebugRootView,
|
||||||
|
TaskManagerDebugView,
|
||||||
|
DependencyManagerDebugView,
|
||||||
|
WorkflowManagerDebugView,
|
||||||
|
)
|
||||||
|
|
||||||
|
urls = [
|
||||||
|
re_path(r'^$', DebugRootView.as_view(), name='debug'),
|
||||||
|
re_path(r'^task_manager/$', TaskManagerDebugView.as_view(), name='task_manager'),
|
||||||
|
re_path(r'^dependency_manager/$', DependencyManagerDebugView.as_view(), name='dependency_manager'),
|
||||||
|
re_path(r'^workflow_manager/$', WorkflowManagerDebugView.as_view(), name='workflow_manager'),
|
||||||
|
]
|
||||||
|
|
||||||
|
__all__ = ['urls']
|
||||||
@@ -3,7 +3,15 @@
|
|||||||
|
|
||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
from awx.api.views import InstanceList, InstanceDetail, InstanceUnifiedJobsList, InstanceInstanceGroupsList, InstanceHealthCheck
|
from awx.api.views import (
|
||||||
|
InstanceList,
|
||||||
|
InstanceDetail,
|
||||||
|
InstanceUnifiedJobsList,
|
||||||
|
InstanceInstanceGroupsList,
|
||||||
|
InstanceHealthCheck,
|
||||||
|
InstancePeersList,
|
||||||
|
)
|
||||||
|
from awx.api.views.instance_install_bundle import InstanceInstallBundle
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
urls = [
|
||||||
@@ -12,6 +20,8 @@ urls = [
|
|||||||
re_path(r'^(?P<pk>[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'),
|
re_path(r'^(?P<pk>[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
|
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'),
|
re_path(r'^(?P<pk>[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/peers/$', InstancePeersList.as_view(), name='instance_peers_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/install_bundle/$', InstanceInstallBundle.as_view(), name='instance_install_bundle'),
|
||||||
]
|
]
|
||||||
|
|
||||||
__all__ = ['urls']
|
__all__ = ['urls']
|
||||||
|
|||||||
@@ -3,26 +3,28 @@
|
|||||||
|
|
||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
from awx.api.views import (
|
from awx.api.views.inventory import (
|
||||||
InventoryList,
|
InventoryList,
|
||||||
InventoryDetail,
|
InventoryDetail,
|
||||||
InventoryHostsList,
|
|
||||||
InventoryGroupsList,
|
|
||||||
InventoryRootGroupsList,
|
|
||||||
InventoryVariableData,
|
|
||||||
InventoryScriptView,
|
|
||||||
InventoryTreeView,
|
|
||||||
InventoryInventorySourcesList,
|
|
||||||
InventoryInventorySourcesUpdate,
|
|
||||||
InventoryActivityStreamList,
|
InventoryActivityStreamList,
|
||||||
InventoryJobTemplateList,
|
InventoryJobTemplateList,
|
||||||
InventoryAdHocCommandsList,
|
|
||||||
InventoryAccessList,
|
InventoryAccessList,
|
||||||
InventoryObjectRolesList,
|
InventoryObjectRolesList,
|
||||||
InventoryInstanceGroupsList,
|
InventoryInstanceGroupsList,
|
||||||
InventoryLabelList,
|
InventoryLabelList,
|
||||||
InventoryCopy,
|
InventoryCopy,
|
||||||
)
|
)
|
||||||
|
from awx.api.views import (
|
||||||
|
InventoryHostsList,
|
||||||
|
InventoryGroupsList,
|
||||||
|
InventoryInventorySourcesList,
|
||||||
|
InventoryInventorySourcesUpdate,
|
||||||
|
InventoryAdHocCommandsList,
|
||||||
|
InventoryRootGroupsList,
|
||||||
|
InventoryScriptView,
|
||||||
|
InventoryTreeView,
|
||||||
|
InventoryVariableData,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
urls = [
|
||||||
|
|||||||
@@ -3,6 +3,9 @@
|
|||||||
|
|
||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
|
from awx.api.views.inventory import (
|
||||||
|
InventoryUpdateEventsList,
|
||||||
|
)
|
||||||
from awx.api.views import (
|
from awx.api.views import (
|
||||||
InventoryUpdateList,
|
InventoryUpdateList,
|
||||||
InventoryUpdateDetail,
|
InventoryUpdateDetail,
|
||||||
@@ -10,7 +13,6 @@ from awx.api.views import (
|
|||||||
InventoryUpdateStdout,
|
InventoryUpdateStdout,
|
||||||
InventoryUpdateNotificationsList,
|
InventoryUpdateNotificationsList,
|
||||||
InventoryUpdateCredentialsList,
|
InventoryUpdateCredentialsList,
|
||||||
InventoryUpdateEventsList,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
from awx.api.views import LabelList, LabelDetail
|
from awx.api.views.labels import LabelList, LabelDetail
|
||||||
|
|
||||||
|
|
||||||
urls = [re_path(r'^$', LabelList.as_view(), name='label_list'), re_path(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail')]
|
urls = [re_path(r'^$', LabelList.as_view(), name='label_list'), re_path(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail')]
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ from oauthlib import oauth2
|
|||||||
from oauth2_provider import views
|
from oauth2_provider import views
|
||||||
|
|
||||||
from awx.main.models import RefreshToken
|
from awx.main.models import RefreshToken
|
||||||
from awx.api.views import ApiOAuthAuthorizationRootView
|
from awx.api.views.root import ApiOAuthAuthorizationRootView
|
||||||
|
|
||||||
|
|
||||||
class TokenView(views.TokenView):
|
class TokenView(views.TokenView):
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
from awx.api.views import (
|
from awx.api.views.organization import (
|
||||||
OrganizationList,
|
OrganizationList,
|
||||||
OrganizationDetail,
|
OrganizationDetail,
|
||||||
OrganizationUsersList,
|
OrganizationUsersList,
|
||||||
@@ -14,7 +14,6 @@ from awx.api.views import (
|
|||||||
OrganizationJobTemplatesList,
|
OrganizationJobTemplatesList,
|
||||||
OrganizationWorkflowJobTemplatesList,
|
OrganizationWorkflowJobTemplatesList,
|
||||||
OrganizationTeamsList,
|
OrganizationTeamsList,
|
||||||
OrganizationCredentialList,
|
|
||||||
OrganizationActivityStreamList,
|
OrganizationActivityStreamList,
|
||||||
OrganizationNotificationTemplatesList,
|
OrganizationNotificationTemplatesList,
|
||||||
OrganizationNotificationTemplatesErrorList,
|
OrganizationNotificationTemplatesErrorList,
|
||||||
@@ -25,8 +24,8 @@ from awx.api.views import (
|
|||||||
OrganizationGalaxyCredentialsList,
|
OrganizationGalaxyCredentialsList,
|
||||||
OrganizationObjectRolesList,
|
OrganizationObjectRolesList,
|
||||||
OrganizationAccessList,
|
OrganizationAccessList,
|
||||||
OrganizationApplicationList,
|
|
||||||
)
|
)
|
||||||
|
from awx.api.views import OrganizationCredentialList, OrganizationApplicationList
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
urls = [
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList
|
from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList, ScheduleLabelsList, ScheduleInstanceGroupList
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
urls = [
|
||||||
@@ -11,6 +11,8 @@ urls = [
|
|||||||
re_path(r'^(?P<pk>[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'),
|
re_path(r'^(?P<pk>[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/jobs/$', ScheduleUnifiedJobsList.as_view(), name='schedule_unified_jobs_list'),
|
re_path(r'^(?P<pk>[0-9]+)/jobs/$', ScheduleUnifiedJobsList.as_view(), name='schedule_unified_jobs_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/credentials/$', ScheduleCredentialsList.as_view(), name='schedule_credentials_list'),
|
re_path(r'^(?P<pk>[0-9]+)/credentials/$', ScheduleCredentialsList.as_view(), name='schedule_credentials_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/labels/$', ScheduleLabelsList.as_view(), name='schedule_labels_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', ScheduleInstanceGroupList.as_view(), name='schedule_instance_groups_list'),
|
||||||
]
|
]
|
||||||
|
|
||||||
__all__ = ['urls']
|
__all__ = ['urls']
|
||||||
|
|||||||
@@ -2,17 +2,19 @@
|
|||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
from __future__ import absolute_import, unicode_literals
|
from __future__ import absolute_import, unicode_literals
|
||||||
from django.conf import settings
|
|
||||||
from django.urls import include, re_path
|
from django.urls import include, re_path
|
||||||
|
|
||||||
|
from awx import MODE
|
||||||
from awx.api.generics import LoggedLoginView, LoggedLogoutView
|
from awx.api.generics import LoggedLoginView, LoggedLogoutView
|
||||||
from awx.api.views import (
|
from awx.api.views.root import (
|
||||||
ApiRootView,
|
ApiRootView,
|
||||||
ApiV2RootView,
|
ApiV2RootView,
|
||||||
ApiV2PingView,
|
ApiV2PingView,
|
||||||
ApiV2ConfigView,
|
ApiV2ConfigView,
|
||||||
ApiV2SubscriptionView,
|
ApiV2SubscriptionView,
|
||||||
ApiV2AttachView,
|
ApiV2AttachView,
|
||||||
|
)
|
||||||
|
from awx.api.views import (
|
||||||
AuthView,
|
AuthView,
|
||||||
UserMeList,
|
UserMeList,
|
||||||
DashboardView,
|
DashboardView,
|
||||||
@@ -28,8 +30,8 @@ from awx.api.views import (
|
|||||||
OAuth2TokenList,
|
OAuth2TokenList,
|
||||||
ApplicationOAuth2TokenList,
|
ApplicationOAuth2TokenList,
|
||||||
OAuth2ApplicationDetail,
|
OAuth2ApplicationDetail,
|
||||||
MeshVisualizer,
|
|
||||||
)
|
)
|
||||||
|
from awx.api.views.mesh_visualizer import MeshVisualizer
|
||||||
|
|
||||||
from awx.api.views.metrics import MetricsView
|
from awx.api.views.metrics import MetricsView
|
||||||
|
|
||||||
@@ -145,7 +147,12 @@ urlpatterns = [
|
|||||||
re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
|
re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
|
||||||
re_path(r'^o/', include(oauth2_root_urls)),
|
re_path(r'^o/', include(oauth2_root_urls)),
|
||||||
]
|
]
|
||||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
if MODE == 'development':
|
||||||
|
# Only include these if we are in the development environment
|
||||||
from awx.api.swagger import SwaggerSchemaView
|
from awx.api.swagger import SwaggerSchemaView
|
||||||
|
|
||||||
urlpatterns += [re_path(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')]
|
urlpatterns += [re_path(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')]
|
||||||
|
|
||||||
|
from awx.api.urls.debug import urls as debug_urls
|
||||||
|
|
||||||
|
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
from awx.api.views import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
|
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ from awx.api.views import (
|
|||||||
WorkflowJobNodeFailureNodesList,
|
WorkflowJobNodeFailureNodesList,
|
||||||
WorkflowJobNodeAlwaysNodesList,
|
WorkflowJobNodeAlwaysNodesList,
|
||||||
WorkflowJobNodeCredentialsList,
|
WorkflowJobNodeCredentialsList,
|
||||||
|
WorkflowJobNodeLabelsList,
|
||||||
|
WorkflowJobNodeInstanceGroupsList,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -20,6 +22,8 @@ urls = [
|
|||||||
re_path(r'^(?P<pk>[0-9]+)/failure_nodes/$', WorkflowJobNodeFailureNodesList.as_view(), name='workflow_job_node_failure_nodes_list'),
|
re_path(r'^(?P<pk>[0-9]+)/failure_nodes/$', WorkflowJobNodeFailureNodesList.as_view(), name='workflow_job_node_failure_nodes_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/always_nodes/$', WorkflowJobNodeAlwaysNodesList.as_view(), name='workflow_job_node_always_nodes_list'),
|
re_path(r'^(?P<pk>[0-9]+)/always_nodes/$', WorkflowJobNodeAlwaysNodesList.as_view(), name='workflow_job_node_always_nodes_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/credentials/$', WorkflowJobNodeCredentialsList.as_view(), name='workflow_job_node_credentials_list'),
|
re_path(r'^(?P<pk>[0-9]+)/credentials/$', WorkflowJobNodeCredentialsList.as_view(), name='workflow_job_node_credentials_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobNodeLabelsList.as_view(), name='workflow_job_node_labels_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', WorkflowJobNodeInstanceGroupsList.as_view(), name='workflow_job_node_instance_groups_list'),
|
||||||
]
|
]
|
||||||
|
|
||||||
__all__ = ['urls']
|
__all__ = ['urls']
|
||||||
|
|||||||
@@ -11,6 +11,8 @@ from awx.api.views import (
|
|||||||
WorkflowJobTemplateNodeAlwaysNodesList,
|
WorkflowJobTemplateNodeAlwaysNodesList,
|
||||||
WorkflowJobTemplateNodeCredentialsList,
|
WorkflowJobTemplateNodeCredentialsList,
|
||||||
WorkflowJobTemplateNodeCreateApproval,
|
WorkflowJobTemplateNodeCreateApproval,
|
||||||
|
WorkflowJobTemplateNodeLabelsList,
|
||||||
|
WorkflowJobTemplateNodeInstanceGroupsList,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -21,6 +23,8 @@ urls = [
|
|||||||
re_path(r'^(?P<pk>[0-9]+)/failure_nodes/$', WorkflowJobTemplateNodeFailureNodesList.as_view(), name='workflow_job_template_node_failure_nodes_list'),
|
re_path(r'^(?P<pk>[0-9]+)/failure_nodes/$', WorkflowJobTemplateNodeFailureNodesList.as_view(), name='workflow_job_template_node_failure_nodes_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/always_nodes/$', WorkflowJobTemplateNodeAlwaysNodesList.as_view(), name='workflow_job_template_node_always_nodes_list'),
|
re_path(r'^(?P<pk>[0-9]+)/always_nodes/$', WorkflowJobTemplateNodeAlwaysNodesList.as_view(), name='workflow_job_template_node_always_nodes_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/credentials/$', WorkflowJobTemplateNodeCredentialsList.as_view(), name='workflow_job_template_node_credentials_list'),
|
re_path(r'^(?P<pk>[0-9]+)/credentials/$', WorkflowJobTemplateNodeCredentialsList.as_view(), name='workflow_job_template_node_credentials_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobTemplateNodeLabelsList.as_view(), name='workflow_job_template_node_labels_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', WorkflowJobTemplateNodeInstanceGroupsList.as_view(), name='workflow_job_template_node_instance_groups_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/create_approval_template/$', WorkflowJobTemplateNodeCreateApproval.as_view(), name='workflow_job_template_node_create_approval'),
|
re_path(r'^(?P<pk>[0-9]+)/create_approval_template/$', WorkflowJobTemplateNodeCreateApproval.as_view(), name='workflow_job_template_node_create_approval'),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
54
awx/api/validators.py
Normal file
54
awx/api/validators.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
import re
|
||||||
|
|
||||||
|
from django.core.validators import RegexValidator, validate_ipv46_address
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
|
|
||||||
|
|
||||||
|
class HostnameRegexValidator(RegexValidator):
|
||||||
|
"""
|
||||||
|
Fully validates a domain name that is compliant with norms in Linux/RHEL
|
||||||
|
- Cannot start with a hyphen
|
||||||
|
- Cannot begin with, or end with a "."
|
||||||
|
- Cannot contain any whitespaces
|
||||||
|
- Entire hostname is max 255 chars (including dots)
|
||||||
|
- Each domain/label is between 1 and 63 characters, except top level domain, which must be at least 2 characters
|
||||||
|
- Supports ipv4, ipv6, simple hostnames and FQDNs
|
||||||
|
- Follows RFC 9210 (modern RFC 1123, 1178) requirements
|
||||||
|
|
||||||
|
Accepts an IP Address or Hostname as the argument
|
||||||
|
"""
|
||||||
|
|
||||||
|
regex = '^[a-z0-9][-a-z0-9]*$|^([a-z0-9][-a-z0-9]{0,62}[.])*[a-z0-9][-a-z0-9]{1,62}$'
|
||||||
|
flags = re.IGNORECASE
|
||||||
|
|
||||||
|
def __call__(self, value):
|
||||||
|
regex_matches, err = self.__validate(value)
|
||||||
|
invalid_input = regex_matches if self.inverse_match else not regex_matches
|
||||||
|
if invalid_input:
|
||||||
|
if err is None:
|
||||||
|
err = ValidationError(self.message, code=self.code, params={"value": value})
|
||||||
|
raise err
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"regex={self.regex}, message={self.message}, code={self.code}, inverse_match={self.inverse_match}, flags={self.flags}"
|
||||||
|
|
||||||
|
def __validate(self, value):
|
||||||
|
if ' ' in value:
|
||||||
|
return False, ValidationError("whitespaces in hostnames are illegal")
|
||||||
|
|
||||||
|
"""
|
||||||
|
If we have an IP address, try and validate it.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
validate_ipv46_address(value)
|
||||||
|
return True, None
|
||||||
|
except ValidationError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
"""
|
||||||
|
By this point in the code, we probably have a simple hostname, FQDN or a strange hostname like "192.localhost.domain.101"
|
||||||
|
"""
|
||||||
|
if not self.regex.match(value):
|
||||||
|
return False, ValidationError(f"illegal characters detected in hostname={value}. Please verify.")
|
||||||
|
|
||||||
|
return True, None
|
||||||
File diff suppressed because it is too large
Load Diff
68
awx/api/views/debug.py
Normal file
68
awx/api/views/debug.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from rest_framework.permissions import AllowAny
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from awx.api.generics import APIView
|
||||||
|
|
||||||
|
from awx.main.scheduler import TaskManager, DependencyManager, WorkflowManager
|
||||||
|
|
||||||
|
|
||||||
|
class TaskManagerDebugView(APIView):
|
||||||
|
_ignore_model_permissions = True
|
||||||
|
exclude_from_schema = True
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
prefix = 'Task'
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
TaskManager().schedule()
|
||||||
|
if not settings.AWX_DISABLE_TASK_MANAGERS:
|
||||||
|
msg = f"Running {self.prefix} manager. To disable other triggers to the {self.prefix} manager, set AWX_DISABLE_TASK_MANAGERS to True"
|
||||||
|
else:
|
||||||
|
msg = f"AWX_DISABLE_TASK_MANAGERS is True, this view is the only way to trigger the {self.prefix} manager"
|
||||||
|
return Response(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class DependencyManagerDebugView(APIView):
|
||||||
|
_ignore_model_permissions = True
|
||||||
|
exclude_from_schema = True
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
prefix = 'Dependency'
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
DependencyManager().schedule()
|
||||||
|
if not settings.AWX_DISABLE_TASK_MANAGERS:
|
||||||
|
msg = f"Running {self.prefix} manager. To disable other triggers to the {self.prefix} manager, set AWX_DISABLE_TASK_MANAGERS to True"
|
||||||
|
else:
|
||||||
|
msg = f"AWX_DISABLE_TASK_MANAGERS is True, this view is the only way to trigger the {self.prefix} manager"
|
||||||
|
return Response(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowManagerDebugView(APIView):
|
||||||
|
_ignore_model_permissions = True
|
||||||
|
exclude_from_schema = True
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
prefix = 'Workflow'
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
WorkflowManager().schedule()
|
||||||
|
if not settings.AWX_DISABLE_TASK_MANAGERS:
|
||||||
|
msg = f"Running {self.prefix} manager. To disable other triggers to the {self.prefix} manager, set AWX_DISABLE_TASK_MANAGERS to True"
|
||||||
|
else:
|
||||||
|
msg = f"AWX_DISABLE_TASK_MANAGERS is True, this view is the only way to trigger the {self.prefix} manager"
|
||||||
|
return Response(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class DebugRootView(APIView):
|
||||||
|
_ignore_model_permissions = True
|
||||||
|
exclude_from_schema = True
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
|
def get(self, request, format=None):
|
||||||
|
'''List of available debug urls'''
|
||||||
|
data = OrderedDict()
|
||||||
|
data['task_manager'] = '/api/debug/task_manager/'
|
||||||
|
data['dependency_manager'] = '/api/debug/dependency_manager/'
|
||||||
|
data['workflow_manager'] = '/api/debug/workflow_manager/'
|
||||||
|
return Response(data)
|
||||||
199
awx/api/views/instance_install_bundle.py
Normal file
199
awx/api/views/instance_install_bundle.py
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
# Copyright (c) 2018 Red Hat, Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import io
|
||||||
|
import ipaddress
|
||||||
|
import os
|
||||||
|
import tarfile
|
||||||
|
|
||||||
|
import asn1
|
||||||
|
from awx.api import serializers
|
||||||
|
from awx.api.generics import GenericAPIView, Response
|
||||||
|
from awx.api.permissions import IsSystemAdminOrAuditor
|
||||||
|
from awx.main import models
|
||||||
|
from cryptography import x509
|
||||||
|
from cryptography.hazmat.primitives import hashes, serialization
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||||
|
from cryptography.x509 import DNSName, IPAddress, ObjectIdentifier, OtherName
|
||||||
|
from cryptography.x509.oid import NameOID
|
||||||
|
from django.http import HttpResponse
|
||||||
|
from django.template.loader import render_to_string
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from rest_framework import status
|
||||||
|
|
||||||
|
# Red Hat has an OID namespace (RHANANA). Receptor has its own designation under that.
|
||||||
|
RECEPTOR_OID = "1.3.6.1.4.1.2312.19.1"
|
||||||
|
|
||||||
|
|
||||||
|
# generate install bundle for the instance
|
||||||
|
# install bundle directory structure
|
||||||
|
# ├── install_receptor.yml (playbook)
|
||||||
|
# ├── inventory.yml
|
||||||
|
# ├── group_vars
|
||||||
|
# │ └── all.yml
|
||||||
|
# ├── receptor
|
||||||
|
# │ ├── tls
|
||||||
|
# │ │ ├── ca
|
||||||
|
# │ │ │ └── receptor-ca.crt
|
||||||
|
# │ │ ├── receptor.crt
|
||||||
|
# │ │ └── receptor.key
|
||||||
|
# │ └── work-public-key.pem
|
||||||
|
# └── requirements.yml
|
||||||
|
class InstanceInstallBundle(GenericAPIView):
|
||||||
|
name = _('Install Bundle')
|
||||||
|
model = models.Instance
|
||||||
|
serializer_class = serializers.InstanceSerializer
|
||||||
|
permission_classes = (IsSystemAdminOrAuditor,)
|
||||||
|
|
||||||
|
def get(self, request, *args, **kwargs):
|
||||||
|
instance_obj = self.get_object()
|
||||||
|
|
||||||
|
if instance_obj.node_type not in ('execution',):
|
||||||
|
return Response(
|
||||||
|
data=dict(msg=_('Install bundle can only be generated for execution nodes.')),
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
with io.BytesIO() as f:
|
||||||
|
with tarfile.open(fileobj=f, mode='w:gz') as tar:
|
||||||
|
# copy /etc/receptor/tls/ca/receptor-ca.crt to receptor/tls/ca in the tar file
|
||||||
|
tar.add(
|
||||||
|
os.path.realpath('/etc/receptor/tls/ca/receptor-ca.crt'), arcname=f"{instance_obj.hostname}_install_bundle/receptor/tls/ca/receptor-ca.crt"
|
||||||
|
)
|
||||||
|
|
||||||
|
# copy /etc/receptor/signing/work-public-key.pem to receptor/work-public-key.pem
|
||||||
|
tar.add('/etc/receptor/signing/work-public-key.pem', arcname=f"{instance_obj.hostname}_install_bundle/receptor/work-public-key.pem")
|
||||||
|
|
||||||
|
# generate and write the receptor key to receptor/tls/receptor.key in the tar file
|
||||||
|
key, cert = generate_receptor_tls(instance_obj)
|
||||||
|
|
||||||
|
key_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/receptor/tls/receptor.key")
|
||||||
|
key_tarinfo.size = len(key)
|
||||||
|
tar.addfile(key_tarinfo, io.BytesIO(key))
|
||||||
|
|
||||||
|
cert_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/receptor/tls/receptor.crt")
|
||||||
|
cert_tarinfo.size = len(cert)
|
||||||
|
tar.addfile(cert_tarinfo, io.BytesIO(cert))
|
||||||
|
|
||||||
|
# generate and write install_receptor.yml to the tar file
|
||||||
|
playbook = generate_playbook().encode('utf-8')
|
||||||
|
playbook_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/install_receptor.yml")
|
||||||
|
playbook_tarinfo.size = len(playbook)
|
||||||
|
tar.addfile(playbook_tarinfo, io.BytesIO(playbook))
|
||||||
|
|
||||||
|
# generate and write inventory.yml to the tar file
|
||||||
|
inventory_yml = generate_inventory_yml(instance_obj).encode('utf-8')
|
||||||
|
inventory_yml_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/inventory.yml")
|
||||||
|
inventory_yml_tarinfo.size = len(inventory_yml)
|
||||||
|
tar.addfile(inventory_yml_tarinfo, io.BytesIO(inventory_yml))
|
||||||
|
|
||||||
|
# generate and write group_vars/all.yml to the tar file
|
||||||
|
group_vars = generate_group_vars_all_yml(instance_obj).encode('utf-8')
|
||||||
|
group_vars_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/group_vars/all.yml")
|
||||||
|
group_vars_tarinfo.size = len(group_vars)
|
||||||
|
tar.addfile(group_vars_tarinfo, io.BytesIO(group_vars))
|
||||||
|
|
||||||
|
# generate and write requirements.yml to the tar file
|
||||||
|
requirements_yml = generate_requirements_yml().encode('utf-8')
|
||||||
|
requirements_yml_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/requirements.yml")
|
||||||
|
requirements_yml_tarinfo.size = len(requirements_yml)
|
||||||
|
tar.addfile(requirements_yml_tarinfo, io.BytesIO(requirements_yml))
|
||||||
|
|
||||||
|
# respond with the tarfile
|
||||||
|
f.seek(0)
|
||||||
|
response = HttpResponse(f.read(), status=status.HTTP_200_OK)
|
||||||
|
response['Content-Disposition'] = f"attachment; filename={instance_obj.hostname}_install_bundle.tar.gz"
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def generate_playbook():
|
||||||
|
return render_to_string("instance_install_bundle/install_receptor.yml")
|
||||||
|
|
||||||
|
|
||||||
|
def generate_requirements_yml():
|
||||||
|
return render_to_string("instance_install_bundle/requirements.yml")
|
||||||
|
|
||||||
|
|
||||||
|
def generate_inventory_yml(instance_obj):
|
||||||
|
return render_to_string("instance_install_bundle/inventory.yml", context=dict(instance=instance_obj))
|
||||||
|
|
||||||
|
|
||||||
|
def generate_group_vars_all_yml(instance_obj):
|
||||||
|
return render_to_string("instance_install_bundle/group_vars/all.yml", context=dict(instance=instance_obj))
|
||||||
|
|
||||||
|
|
||||||
|
def generate_receptor_tls(instance_obj):
|
||||||
|
# generate private key for the receptor
|
||||||
|
key = rsa.generate_private_key(public_exponent=65537, key_size=2048)
|
||||||
|
|
||||||
|
# encode receptor hostname to asn1
|
||||||
|
hostname = instance_obj.hostname
|
||||||
|
encoder = asn1.Encoder()
|
||||||
|
encoder.start()
|
||||||
|
encoder.write(hostname.encode(), nr=asn1.Numbers.UTF8String)
|
||||||
|
hostname_asn1 = encoder.output()
|
||||||
|
|
||||||
|
san_params = [
|
||||||
|
DNSName(hostname),
|
||||||
|
OtherName(ObjectIdentifier(RECEPTOR_OID), hostname_asn1),
|
||||||
|
]
|
||||||
|
|
||||||
|
try:
|
||||||
|
san_params.append(IPAddress(ipaddress.IPv4Address(hostname)))
|
||||||
|
except ipaddress.AddressValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# generate certificate for the receptor
|
||||||
|
csr = (
|
||||||
|
x509.CertificateSigningRequestBuilder()
|
||||||
|
.subject_name(
|
||||||
|
x509.Name(
|
||||||
|
[
|
||||||
|
x509.NameAttribute(NameOID.COMMON_NAME, hostname),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.add_extension(
|
||||||
|
x509.SubjectAlternativeName(san_params),
|
||||||
|
critical=False,
|
||||||
|
)
|
||||||
|
.sign(key, hashes.SHA256())
|
||||||
|
)
|
||||||
|
|
||||||
|
# sign csr with the receptor ca key from /etc/receptor/ca/receptor-ca.key
|
||||||
|
with open('/etc/receptor/tls/ca/receptor-ca.key', 'rb') as f:
|
||||||
|
ca_key = serialization.load_pem_private_key(
|
||||||
|
f.read(),
|
||||||
|
password=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
with open('/etc/receptor/tls/ca/receptor-ca.crt', 'rb') as f:
|
||||||
|
ca_cert = x509.load_pem_x509_certificate(f.read())
|
||||||
|
|
||||||
|
cert = (
|
||||||
|
x509.CertificateBuilder()
|
||||||
|
.subject_name(csr.subject)
|
||||||
|
.issuer_name(ca_cert.issuer)
|
||||||
|
.public_key(csr.public_key())
|
||||||
|
.serial_number(x509.random_serial_number())
|
||||||
|
.not_valid_before(datetime.datetime.utcnow())
|
||||||
|
.not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=3650))
|
||||||
|
.add_extension(
|
||||||
|
csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).value,
|
||||||
|
critical=csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).critical,
|
||||||
|
)
|
||||||
|
.sign(ca_key, hashes.SHA256())
|
||||||
|
)
|
||||||
|
|
||||||
|
key = key.private_bytes(
|
||||||
|
encoding=serialization.Encoding.PEM,
|
||||||
|
format=serialization.PrivateFormat.TraditionalOpenSSL,
|
||||||
|
encryption_algorithm=serialization.NoEncryption(),
|
||||||
|
)
|
||||||
|
|
||||||
|
cert = cert.public_bytes(
|
||||||
|
encoding=serialization.Encoding.PEM,
|
||||||
|
)
|
||||||
|
|
||||||
|
return key, cert
|
||||||
@@ -18,8 +18,6 @@ from rest_framework import status
|
|||||||
# AWX
|
# AWX
|
||||||
from awx.main.models import ActivityStream, Inventory, JobTemplate, Role, User, InstanceGroup, InventoryUpdateEvent, InventoryUpdate
|
from awx.main.models import ActivityStream, Inventory, JobTemplate, Role, User, InstanceGroup, InventoryUpdateEvent, InventoryUpdate
|
||||||
|
|
||||||
from awx.main.models.label import Label
|
|
||||||
|
|
||||||
from awx.api.generics import (
|
from awx.api.generics import (
|
||||||
ListCreateAPIView,
|
ListCreateAPIView,
|
||||||
RetrieveUpdateDestroyAPIView,
|
RetrieveUpdateDestroyAPIView,
|
||||||
@@ -27,9 +25,8 @@ from awx.api.generics import (
|
|||||||
SubListAttachDetachAPIView,
|
SubListAttachDetachAPIView,
|
||||||
ResourceAccessList,
|
ResourceAccessList,
|
||||||
CopyAPIView,
|
CopyAPIView,
|
||||||
DeleteLastUnattachLabelMixin,
|
|
||||||
SubListCreateAttachDetachAPIView,
|
|
||||||
)
|
)
|
||||||
|
from awx.api.views.labels import LabelSubListCreateAttachDetachView
|
||||||
|
|
||||||
|
|
||||||
from awx.api.serializers import (
|
from awx.api.serializers import (
|
||||||
@@ -39,7 +36,6 @@ from awx.api.serializers import (
|
|||||||
InstanceGroupSerializer,
|
InstanceGroupSerializer,
|
||||||
InventoryUpdateEventSerializer,
|
InventoryUpdateEventSerializer,
|
||||||
JobTemplateSerializer,
|
JobTemplateSerializer,
|
||||||
LabelSerializer,
|
|
||||||
)
|
)
|
||||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin
|
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin
|
||||||
|
|
||||||
@@ -50,7 +46,6 @@ logger = logging.getLogger('awx.api.views.organization')
|
|||||||
|
|
||||||
|
|
||||||
class InventoryUpdateEventsList(SubListAPIView):
|
class InventoryUpdateEventsList(SubListAPIView):
|
||||||
|
|
||||||
model = InventoryUpdateEvent
|
model = InventoryUpdateEvent
|
||||||
serializer_class = InventoryUpdateEventSerializer
|
serializer_class = InventoryUpdateEventSerializer
|
||||||
parent_model = InventoryUpdate
|
parent_model = InventoryUpdate
|
||||||
@@ -70,13 +65,11 @@ class InventoryUpdateEventsList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class InventoryList(ListCreateAPIView):
|
class InventoryList(ListCreateAPIView):
|
||||||
|
|
||||||
model = Inventory
|
model = Inventory
|
||||||
serializer_class = InventorySerializer
|
serializer_class = InventorySerializer
|
||||||
|
|
||||||
|
|
||||||
class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||||
|
|
||||||
model = Inventory
|
model = Inventory
|
||||||
serializer_class = InventorySerializer
|
serializer_class = InventorySerializer
|
||||||
|
|
||||||
@@ -102,7 +95,6 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIVie
|
|||||||
|
|
||||||
|
|
||||||
class InventoryActivityStreamList(SubListAPIView):
|
class InventoryActivityStreamList(SubListAPIView):
|
||||||
|
|
||||||
model = ActivityStream
|
model = ActivityStream
|
||||||
serializer_class = ActivityStreamSerializer
|
serializer_class = ActivityStreamSerializer
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
@@ -117,7 +109,6 @@ class InventoryActivityStreamList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
||||||
|
|
||||||
model = InstanceGroup
|
model = InstanceGroup
|
||||||
serializer_class = InstanceGroupSerializer
|
serializer_class = InstanceGroupSerializer
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
@@ -125,13 +116,11 @@ class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class InventoryAccessList(ResourceAccessList):
|
class InventoryAccessList(ResourceAccessList):
|
||||||
|
|
||||||
model = User # needs to be User for AccessLists's
|
model = User # needs to be User for AccessLists's
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
|
|
||||||
|
|
||||||
class InventoryObjectRolesList(SubListAPIView):
|
class InventoryObjectRolesList(SubListAPIView):
|
||||||
|
|
||||||
model = Role
|
model = Role
|
||||||
serializer_class = RoleSerializer
|
serializer_class = RoleSerializer
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
@@ -144,7 +133,6 @@ class InventoryObjectRolesList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class InventoryJobTemplateList(SubListAPIView):
|
class InventoryJobTemplateList(SubListAPIView):
|
||||||
|
|
||||||
model = JobTemplate
|
model = JobTemplate
|
||||||
serializer_class = JobTemplateSerializer
|
serializer_class = JobTemplateSerializer
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
@@ -157,31 +145,10 @@ class InventoryJobTemplateList(SubListAPIView):
|
|||||||
return qs.filter(inventory=parent)
|
return qs.filter(inventory=parent)
|
||||||
|
|
||||||
|
|
||||||
class InventoryLabelList(DeleteLastUnattachLabelMixin, SubListCreateAttachDetachAPIView, SubListAPIView):
|
class InventoryLabelList(LabelSubListCreateAttachDetachView):
|
||||||
|
|
||||||
model = Label
|
|
||||||
serializer_class = LabelSerializer
|
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
relationship = 'labels'
|
|
||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
|
||||||
# If a label already exists in the database, attach it instead of erroring out
|
|
||||||
# that it already exists
|
|
||||||
if 'id' not in request.data and 'name' in request.data and 'organization' in request.data:
|
|
||||||
existing = Label.objects.filter(name=request.data['name'], organization_id=request.data['organization'])
|
|
||||||
if existing.exists():
|
|
||||||
existing = existing[0]
|
|
||||||
request.data['id'] = existing.id
|
|
||||||
del request.data['name']
|
|
||||||
del request.data['organization']
|
|
||||||
if Label.objects.filter(inventory_labels=self.kwargs['pk']).count() > 100:
|
|
||||||
return Response(
|
|
||||||
dict(msg=_('Maximum number of labels for {} reached.'.format(self.parent_model._meta.verbose_name_raw))), status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
return super(InventoryLabelList, self).post(request, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class InventoryCopy(CopyAPIView):
|
class InventoryCopy(CopyAPIView):
|
||||||
|
|
||||||
model = Inventory
|
model = Inventory
|
||||||
copy_return_serializer_class = InventorySerializer
|
copy_return_serializer_class = InventorySerializer
|
||||||
|
|||||||
69
awx/api/views/labels.py
Normal file
69
awx/api/views/labels.py
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
# AWX
|
||||||
|
from awx.api.generics import SubListCreateAttachDetachAPIView, RetrieveUpdateAPIView, ListCreateAPIView
|
||||||
|
from awx.main.models import Label
|
||||||
|
from awx.api.serializers import LabelSerializer
|
||||||
|
|
||||||
|
# Django
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
# Django REST Framework
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.status import HTTP_400_BAD_REQUEST
|
||||||
|
|
||||||
|
|
||||||
|
class LabelSubListCreateAttachDetachView(SubListCreateAttachDetachAPIView):
|
||||||
|
"""
|
||||||
|
For related labels lists like /api/v2/inventories/N/labels/
|
||||||
|
|
||||||
|
We want want the last instance to be deleted from the database
|
||||||
|
when the last disassociate happens.
|
||||||
|
|
||||||
|
Subclasses need to define parent_model
|
||||||
|
"""
|
||||||
|
|
||||||
|
model = Label
|
||||||
|
serializer_class = LabelSerializer
|
||||||
|
relationship = 'labels'
|
||||||
|
|
||||||
|
def unattach(self, request, *args, **kwargs):
|
||||||
|
(sub_id, res) = super().unattach_validate(request)
|
||||||
|
if res:
|
||||||
|
return res
|
||||||
|
|
||||||
|
res = super().unattach_by_id(request, sub_id)
|
||||||
|
|
||||||
|
obj = self.model.objects.get(id=sub_id)
|
||||||
|
|
||||||
|
if obj.is_detached():
|
||||||
|
obj.delete()
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
def post(self, request, *args, **kwargs):
|
||||||
|
# If a label already exists in the database, attach it instead of erroring out
|
||||||
|
# that it already exists
|
||||||
|
if 'id' not in request.data and 'name' in request.data and 'organization' in request.data:
|
||||||
|
existing = Label.objects.filter(name=request.data['name'], organization_id=request.data['organization'])
|
||||||
|
if existing.exists():
|
||||||
|
existing = existing[0]
|
||||||
|
request.data['id'] = existing.id
|
||||||
|
del request.data['name']
|
||||||
|
del request.data['organization']
|
||||||
|
|
||||||
|
# Give a 400 error if we have attached too many labels to this object
|
||||||
|
label_filter = self.parent_model._meta.get_field(self.relationship).remote_field.name
|
||||||
|
if Label.objects.filter(**{label_filter: self.kwargs['pk']}).count() > 100:
|
||||||
|
return Response(dict(msg=_(f'Maximum number of labels for {self.parent_model._meta.verbose_name_raw} reached.')), status=HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
return super().post(request, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class LabelDetail(RetrieveUpdateAPIView):
|
||||||
|
model = Label
|
||||||
|
serializer_class = LabelSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class LabelList(ListCreateAPIView):
|
||||||
|
name = _("Labels")
|
||||||
|
model = Label
|
||||||
|
serializer_class = LabelSerializer
|
||||||
@@ -10,13 +10,11 @@ from awx.main.models import InstanceLink, Instance
|
|||||||
|
|
||||||
|
|
||||||
class MeshVisualizer(APIView):
|
class MeshVisualizer(APIView):
|
||||||
|
|
||||||
name = _("Mesh Visualizer")
|
name = _("Mesh Visualizer")
|
||||||
permission_classes = (IsSystemAdminOrAuditor,)
|
permission_classes = (IsSystemAdminOrAuditor,)
|
||||||
swagger_topic = "System Configuration"
|
swagger_topic = "System Configuration"
|
||||||
|
|
||||||
def get(self, request, format=None):
|
def get(self, request, format=None):
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data,
|
'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data,
|
||||||
'links': InstanceLinkSerializer(InstanceLink.objects.select_related('target', 'source'), many=True).data,
|
'links': InstanceLinkSerializer(InstanceLink.objects.select_related('target', 'source'), many=True).data,
|
||||||
|
|||||||
@@ -5,9 +5,11 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
|
from django.conf import settings
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
# Django REST Framework
|
# Django REST Framework
|
||||||
|
from rest_framework.permissions import AllowAny
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.exceptions import PermissionDenied
|
from rest_framework.exceptions import PermissionDenied
|
||||||
|
|
||||||
@@ -25,15 +27,19 @@ logger = logging.getLogger('awx.analytics')
|
|||||||
|
|
||||||
|
|
||||||
class MetricsView(APIView):
|
class MetricsView(APIView):
|
||||||
|
|
||||||
name = _('Metrics')
|
name = _('Metrics')
|
||||||
swagger_topic = 'Metrics'
|
swagger_topic = 'Metrics'
|
||||||
|
|
||||||
renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer]
|
renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer]
|
||||||
|
|
||||||
|
def initialize_request(self, request, *args, **kwargs):
|
||||||
|
if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS:
|
||||||
|
self.permission_classes = (AllowAny,)
|
||||||
|
return super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||||
|
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
'''Show Metrics Details'''
|
'''Show Metrics Details'''
|
||||||
if request.user.is_superuser or request.user.is_system_auditor:
|
if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS or request.user.is_superuser or request.user.is_system_auditor:
|
||||||
metrics_to_show = ''
|
metrics_to_show = ''
|
||||||
if not request.query_params.get('subsystemonly', "0") == "1":
|
if not request.query_params.get('subsystemonly', "0") == "1":
|
||||||
metrics_to_show += metrics().decode('UTF-8')
|
metrics_to_show += metrics().decode('UTF-8')
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ from rest_framework import status
|
|||||||
|
|
||||||
from awx.main.constants import ACTIVE_STATES
|
from awx.main.constants import ACTIVE_STATES
|
||||||
from awx.main.utils import get_object_or_400
|
from awx.main.utils import get_object_or_400
|
||||||
from awx.main.models.ha import Instance, InstanceGroup
|
from awx.main.models.ha import Instance, InstanceGroup, schedule_policy_task
|
||||||
from awx.main.models.organization import Team
|
from awx.main.models.organization import Team
|
||||||
from awx.main.models.projects import Project
|
from awx.main.models.projects import Project
|
||||||
from awx.main.models.inventory import Inventory
|
from awx.main.models.inventory import Inventory
|
||||||
@@ -107,6 +107,11 @@ class InstanceGroupMembershipMixin(object):
|
|||||||
if inst_name in ig_obj.policy_instance_list:
|
if inst_name in ig_obj.policy_instance_list:
|
||||||
ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name))
|
ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name))
|
||||||
ig_obj.save(update_fields=['policy_instance_list'])
|
ig_obj.save(update_fields=['policy_instance_list'])
|
||||||
|
|
||||||
|
# sometimes removing an instance has a non-obvious consequence
|
||||||
|
# this is almost always true if policy_instance_percentage or _minimum is non-zero
|
||||||
|
# after removing a single instance, the other memberships need to be re-balanced
|
||||||
|
schedule_policy_task()
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -58,7 +58,6 @@ logger = logging.getLogger('awx.api.views.organization')
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||||
|
|
||||||
model = Organization
|
model = Organization
|
||||||
serializer_class = OrganizationSerializer
|
serializer_class = OrganizationSerializer
|
||||||
|
|
||||||
@@ -70,7 +69,6 @@ class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||||
|
|
||||||
model = Organization
|
model = Organization
|
||||||
serializer_class = OrganizationSerializer
|
serializer_class = OrganizationSerializer
|
||||||
|
|
||||||
@@ -106,7 +104,6 @@ class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPI
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationInventoriesList(SubListAPIView):
|
class OrganizationInventoriesList(SubListAPIView):
|
||||||
|
|
||||||
model = Inventory
|
model = Inventory
|
||||||
serializer_class = InventorySerializer
|
serializer_class = InventorySerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -114,7 +111,6 @@ class OrganizationInventoriesList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationUsersList(BaseUsersList):
|
class OrganizationUsersList(BaseUsersList):
|
||||||
|
|
||||||
model = User
|
model = User
|
||||||
serializer_class = UserSerializer
|
serializer_class = UserSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -123,7 +119,6 @@ class OrganizationUsersList(BaseUsersList):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationAdminsList(BaseUsersList):
|
class OrganizationAdminsList(BaseUsersList):
|
||||||
|
|
||||||
model = User
|
model = User
|
||||||
serializer_class = UserSerializer
|
serializer_class = UserSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -132,7 +127,6 @@ class OrganizationAdminsList(BaseUsersList):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationProjectsList(SubListCreateAPIView):
|
class OrganizationProjectsList(SubListCreateAPIView):
|
||||||
|
|
||||||
model = Project
|
model = Project
|
||||||
serializer_class = ProjectSerializer
|
serializer_class = ProjectSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -140,7 +134,6 @@ class OrganizationProjectsList(SubListCreateAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
|
class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
|
||||||
|
|
||||||
model = ExecutionEnvironment
|
model = ExecutionEnvironment
|
||||||
serializer_class = ExecutionEnvironmentSerializer
|
serializer_class = ExecutionEnvironmentSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -150,7 +143,6 @@ class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationJobTemplatesList(SubListCreateAPIView):
|
class OrganizationJobTemplatesList(SubListCreateAPIView):
|
||||||
|
|
||||||
model = JobTemplate
|
model = JobTemplate
|
||||||
serializer_class = JobTemplateSerializer
|
serializer_class = JobTemplateSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -158,7 +150,6 @@ class OrganizationJobTemplatesList(SubListCreateAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
||||||
|
|
||||||
model = WorkflowJobTemplate
|
model = WorkflowJobTemplate
|
||||||
serializer_class = WorkflowJobTemplateSerializer
|
serializer_class = WorkflowJobTemplateSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -166,7 +157,6 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||||
|
|
||||||
model = Team
|
model = Team
|
||||||
serializer_class = TeamSerializer
|
serializer_class = TeamSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -175,7 +165,6 @@ class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationActivityStreamList(SubListAPIView):
|
class OrganizationActivityStreamList(SubListAPIView):
|
||||||
|
|
||||||
model = ActivityStream
|
model = ActivityStream
|
||||||
serializer_class = ActivityStreamSerializer
|
serializer_class = ActivityStreamSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -184,7 +173,6 @@ class OrganizationActivityStreamList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
||||||
|
|
||||||
model = NotificationTemplate
|
model = NotificationTemplate
|
||||||
serializer_class = NotificationTemplateSerializer
|
serializer_class = NotificationTemplateSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -193,34 +181,28 @@ class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
|
class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
|
||||||
|
|
||||||
model = NotificationTemplate
|
model = NotificationTemplate
|
||||||
serializer_class = NotificationTemplateSerializer
|
serializer_class = NotificationTemplateSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesStartedList(OrganizationNotificationTemplatesAnyList):
|
class OrganizationNotificationTemplatesStartedList(OrganizationNotificationTemplatesAnyList):
|
||||||
|
|
||||||
relationship = 'notification_templates_started'
|
relationship = 'notification_templates_started'
|
||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesErrorList(OrganizationNotificationTemplatesAnyList):
|
class OrganizationNotificationTemplatesErrorList(OrganizationNotificationTemplatesAnyList):
|
||||||
|
|
||||||
relationship = 'notification_templates_error'
|
relationship = 'notification_templates_error'
|
||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesSuccessList(OrganizationNotificationTemplatesAnyList):
|
class OrganizationNotificationTemplatesSuccessList(OrganizationNotificationTemplatesAnyList):
|
||||||
|
|
||||||
relationship = 'notification_templates_success'
|
relationship = 'notification_templates_success'
|
||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesApprovalList(OrganizationNotificationTemplatesAnyList):
|
class OrganizationNotificationTemplatesApprovalList(OrganizationNotificationTemplatesAnyList):
|
||||||
|
|
||||||
relationship = 'notification_templates_approvals'
|
relationship = 'notification_templates_approvals'
|
||||||
|
|
||||||
|
|
||||||
class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
||||||
|
|
||||||
model = InstanceGroup
|
model = InstanceGroup
|
||||||
serializer_class = InstanceGroupSerializer
|
serializer_class = InstanceGroupSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -228,7 +210,6 @@ class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
||||||
|
|
||||||
model = Credential
|
model = Credential
|
||||||
serializer_class = CredentialSerializer
|
serializer_class = CredentialSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -240,13 +221,11 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationAccessList(ResourceAccessList):
|
class OrganizationAccessList(ResourceAccessList):
|
||||||
|
|
||||||
model = User # needs to be User for AccessLists's
|
model = User # needs to be User for AccessLists's
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
|
|
||||||
|
|
||||||
class OrganizationObjectRolesList(SubListAPIView):
|
class OrganizationObjectRolesList(SubListAPIView):
|
||||||
|
|
||||||
model = Role
|
model = Role
|
||||||
serializer_class = RoleSerializer
|
serializer_class = RoleSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
|
|||||||
@@ -36,7 +36,6 @@ logger = logging.getLogger('awx.api.views.root')
|
|||||||
|
|
||||||
|
|
||||||
class ApiRootView(APIView):
|
class ApiRootView(APIView):
|
||||||
|
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
name = _('REST API')
|
name = _('REST API')
|
||||||
versioning_class = None
|
versioning_class = None
|
||||||
@@ -59,7 +58,6 @@ class ApiRootView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class ApiOAuthAuthorizationRootView(APIView):
|
class ApiOAuthAuthorizationRootView(APIView):
|
||||||
|
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
name = _("API OAuth 2 Authorization Root")
|
name = _("API OAuth 2 Authorization Root")
|
||||||
versioning_class = None
|
versioning_class = None
|
||||||
@@ -74,7 +72,6 @@ class ApiOAuthAuthorizationRootView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class ApiVersionRootView(APIView):
|
class ApiVersionRootView(APIView):
|
||||||
|
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
swagger_topic = 'Versioning'
|
swagger_topic = 'Versioning'
|
||||||
|
|
||||||
@@ -172,7 +169,6 @@ class ApiV2PingView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class ApiV2SubscriptionView(APIView):
|
class ApiV2SubscriptionView(APIView):
|
||||||
|
|
||||||
permission_classes = (IsAuthenticated,)
|
permission_classes = (IsAuthenticated,)
|
||||||
name = _('Subscriptions')
|
name = _('Subscriptions')
|
||||||
swagger_topic = 'System Configuration'
|
swagger_topic = 'System Configuration'
|
||||||
@@ -212,7 +208,6 @@ class ApiV2SubscriptionView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class ApiV2AttachView(APIView):
|
class ApiV2AttachView(APIView):
|
||||||
|
|
||||||
permission_classes = (IsAuthenticated,)
|
permission_classes = (IsAuthenticated,)
|
||||||
name = _('Attach Subscription')
|
name = _('Attach Subscription')
|
||||||
swagger_topic = 'System Configuration'
|
swagger_topic = 'System Configuration'
|
||||||
@@ -230,7 +225,6 @@ class ApiV2AttachView(APIView):
|
|||||||
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||||
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||||
if pool_id and user and pw:
|
if pool_id and user and pw:
|
||||||
|
|
||||||
data = request.data.copy()
|
data = request.data.copy()
|
||||||
try:
|
try:
|
||||||
with set_environ(**settings.AWX_TASK_ENV):
|
with set_environ(**settings.AWX_TASK_ENV):
|
||||||
@@ -258,7 +252,6 @@ class ApiV2AttachView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class ApiV2ConfigView(APIView):
|
class ApiV2ConfigView(APIView):
|
||||||
|
|
||||||
permission_classes = (IsAuthenticated,)
|
permission_classes = (IsAuthenticated,)
|
||||||
name = _('Configuration')
|
name = _('Configuration')
|
||||||
swagger_topic = 'System Configuration'
|
swagger_topic = 'System Configuration'
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
|
|
||||||
|
|
||||||
class ConfConfig(AppConfig):
|
class ConfConfig(AppConfig):
|
||||||
|
|
||||||
name = 'awx.conf'
|
name = 'awx.conf'
|
||||||
verbose_name = _('Configuration')
|
verbose_name = _('Configuration')
|
||||||
|
|
||||||
@@ -16,7 +15,6 @@ class ConfConfig(AppConfig):
|
|||||||
self.module.autodiscover()
|
self.module.autodiscover()
|
||||||
|
|
||||||
if not set(sys.argv) & {'migrate', 'check_migrations'}:
|
if not set(sys.argv) & {'migrate', 'check_migrations'}:
|
||||||
|
|
||||||
from .settings import SettingsWrapper
|
from .settings import SettingsWrapper
|
||||||
|
|
||||||
SettingsWrapper.initialize()
|
SettingsWrapper.initialize()
|
||||||
|
|||||||
@@ -47,7 +47,6 @@ class IntegerField(IntegerField):
|
|||||||
|
|
||||||
|
|
||||||
class StringListField(ListField):
|
class StringListField(ListField):
|
||||||
|
|
||||||
child = CharField()
|
child = CharField()
|
||||||
|
|
||||||
def to_representation(self, value):
|
def to_representation(self, value):
|
||||||
@@ -57,7 +56,6 @@ class StringListField(ListField):
|
|||||||
|
|
||||||
|
|
||||||
class StringListBooleanField(ListField):
|
class StringListBooleanField(ListField):
|
||||||
|
|
||||||
default_error_messages = {'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.')}
|
default_error_messages = {'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.')}
|
||||||
child = CharField()
|
child = CharField()
|
||||||
|
|
||||||
@@ -96,7 +94,6 @@ class StringListBooleanField(ListField):
|
|||||||
|
|
||||||
|
|
||||||
class StringListPathField(StringListField):
|
class StringListPathField(StringListField):
|
||||||
|
|
||||||
default_error_messages = {'type_error': _('Expected list of strings but got {input_type} instead.'), 'path_error': _('{path} is not a valid path choice.')}
|
default_error_messages = {'type_error': _('Expected list of strings but got {input_type} instead.'), 'path_error': _('{path} is not a valid path choice.')}
|
||||||
|
|
||||||
def to_internal_value(self, paths):
|
def to_internal_value(self, paths):
|
||||||
@@ -126,7 +123,6 @@ class StringListIsolatedPathField(StringListField):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def to_internal_value(self, paths):
|
def to_internal_value(self, paths):
|
||||||
|
|
||||||
if isinstance(paths, (list, tuple)):
|
if isinstance(paths, (list, tuple)):
|
||||||
for p in paths:
|
for p in paths:
|
||||||
if not isinstance(p, str):
|
if not isinstance(p, str):
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ import awx.main.fields
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
|
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
|
|||||||
@@ -48,7 +48,6 @@ def revert_tower_settings(apps, schema_editor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0001_initial'), ('main', '0004_squashed_v310_release')]
|
dependencies = [('conf', '0001_initial'), ('main', '0004_squashed_v310_release')]
|
||||||
|
|
||||||
run_before = [('main', '0005_squashed_v310_v313_updates')]
|
run_before = [('main', '0005_squashed_v310_v313_updates')]
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ import awx.main.fields
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0002_v310_copy_tower_settings')]
|
dependencies = [('conf', '0002_v310_copy_tower_settings')]
|
||||||
|
|
||||||
operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONBlob(null=True))]
|
operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONBlob(null=True))]
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ from django.db import migrations
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0003_v310_JSONField_changes')]
|
dependencies = [('conf', '0003_v310_JSONField_changes')]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ def reverse_copy_session_settings(apps, schema_editor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0004_v320_reencrypt')]
|
dependencies = [('conf', '0004_v320_reencrypt')]
|
||||||
|
|
||||||
operations = [migrations.RunPython(copy_session_settings, reverse_copy_session_settings)]
|
operations = [migrations.RunPython(copy_session_settings, reverse_copy_session_settings)]
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ from django.db import migrations
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0005_v330_rename_two_session_settings')]
|
dependencies = [('conf', '0005_v330_rename_two_session_settings')]
|
||||||
|
|
||||||
operations = [migrations.RunPython(fill_ldap_group_type_params)]
|
operations = [migrations.RunPython(fill_ldap_group_type_params)]
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ def copy_allowed_ips(apps, schema_editor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0006_v331_ldap_group_type')]
|
dependencies = [('conf', '0006_v331_ldap_group_type')]
|
||||||
|
|
||||||
operations = [migrations.RunPython(copy_allowed_ips)]
|
operations = [migrations.RunPython(copy_allowed_ips)]
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ def _noop(apps, schema_editor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0007_v380_rename_more_settings')]
|
dependencies = [('conf', '0007_v380_rename_more_settings')]
|
||||||
|
|
||||||
operations = [migrations.RunPython(clear_old_license, _noop), migrations.RunPython(prefill_rh_credentials, _noop)]
|
operations = [migrations.RunPython(clear_old_license, _noop), migrations.RunPython(prefill_rh_credentials, _noop)]
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ def rename_proot_settings(apps, schema_editor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0008_subscriptions')]
|
dependencies = [('conf', '0008_subscriptions')]
|
||||||
|
|
||||||
operations = [migrations.RunPython(rename_proot_settings)]
|
operations = [migrations.RunPython(rename_proot_settings)]
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ __all__ = ['rename_setting']
|
|||||||
|
|
||||||
|
|
||||||
def rename_setting(apps, schema_editor, old_key, new_key):
|
def rename_setting(apps, schema_editor, old_key, new_key):
|
||||||
|
|
||||||
old_setting = None
|
old_setting = None
|
||||||
Setting = apps.get_model('conf', 'Setting')
|
Setting = apps.get_model('conf', 'Setting')
|
||||||
if Setting.objects.filter(key=new_key).exists() or hasattr(settings, new_key):
|
if Setting.objects.filter(key=new_key).exists() or hasattr(settings, new_key):
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ __all__ = ['Setting']
|
|||||||
|
|
||||||
|
|
||||||
class Setting(CreatedModifiedModel):
|
class Setting(CreatedModifiedModel):
|
||||||
|
|
||||||
key = models.CharField(max_length=255)
|
key = models.CharField(max_length=255)
|
||||||
value = JSONBlob(null=True)
|
value = JSONBlob(null=True)
|
||||||
user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE))
|
user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE))
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ def _ctit_db_wrapper(trans_safe=False):
|
|||||||
yield
|
yield
|
||||||
except DBError as exc:
|
except DBError as exc:
|
||||||
if trans_safe:
|
if trans_safe:
|
||||||
level = logger.exception
|
level = logger.warning
|
||||||
if isinstance(exc, ProgrammingError):
|
if isinstance(exc, ProgrammingError):
|
||||||
if 'relation' in str(exc) and 'does not exist' in str(exc):
|
if 'relation' in str(exc) and 'does not exist' in str(exc):
|
||||||
# this generally means we can't fetch Tower configuration
|
# this generally means we can't fetch Tower configuration
|
||||||
@@ -89,7 +89,7 @@ def _ctit_db_wrapper(trans_safe=False):
|
|||||||
# has come up *before* the database has finished migrating, and
|
# has come up *before* the database has finished migrating, and
|
||||||
# especially that the conf.settings table doesn't exist yet
|
# especially that the conf.settings table doesn't exist yet
|
||||||
level = logger.debug
|
level = logger.debug
|
||||||
level('Database settings are not available, using defaults.')
|
level(f'Database settings are not available, using defaults. error: {str(exc)}')
|
||||||
else:
|
else:
|
||||||
logger.exception('Error modifying something related to database settings.')
|
logger.exception('Error modifying something related to database settings.')
|
||||||
finally:
|
finally:
|
||||||
@@ -104,7 +104,6 @@ def filter_sensitive(registry, key, value):
|
|||||||
|
|
||||||
|
|
||||||
class TransientSetting(object):
|
class TransientSetting(object):
|
||||||
|
|
||||||
__slots__ = ('pk', 'value')
|
__slots__ = ('pk', 'value')
|
||||||
|
|
||||||
def __init__(self, pk, value):
|
def __init__(self, pk, value):
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ from awx.conf.fields import StringListBooleanField, StringListPathField, ListTup
|
|||||||
|
|
||||||
|
|
||||||
class TestStringListBooleanField:
|
class TestStringListBooleanField:
|
||||||
|
|
||||||
FIELD_VALUES = [
|
FIELD_VALUES = [
|
||||||
("hello", "hello"),
|
("hello", "hello"),
|
||||||
(("a", "b"), ["a", "b"]),
|
(("a", "b"), ["a", "b"]),
|
||||||
@@ -53,7 +52,6 @@ class TestStringListBooleanField:
|
|||||||
|
|
||||||
|
|
||||||
class TestListTuplesField:
|
class TestListTuplesField:
|
||||||
|
|
||||||
FIELD_VALUES = [([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")])]
|
FIELD_VALUES = [([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")])]
|
||||||
|
|
||||||
FIELD_VALUES_INVALID = [("abc", type("abc")), ([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))), (['a', 'b'], type('a')), (123, type(123))]
|
FIELD_VALUES_INVALID = [("abc", type("abc")), ([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))), (['a', 'b'], type('a')), (123, type(123))]
|
||||||
@@ -73,7 +71,6 @@ class TestListTuplesField:
|
|||||||
|
|
||||||
|
|
||||||
class TestStringListPathField:
|
class TestStringListPathField:
|
||||||
|
|
||||||
FIELD_VALUES = [
|
FIELD_VALUES = [
|
||||||
((".", "..", "/"), [".", "..", "/"]),
|
((".", "..", "/"), [".", "..", "/"]),
|
||||||
(("/home",), ["/home"]),
|
(("/home",), ["/home"]),
|
||||||
|
|||||||
@@ -36,7 +36,6 @@ SettingCategory = collections.namedtuple('SettingCategory', ('url', 'slug', 'nam
|
|||||||
|
|
||||||
|
|
||||||
class SettingCategoryList(ListAPIView):
|
class SettingCategoryList(ListAPIView):
|
||||||
|
|
||||||
model = Setting # Not exactly, but needed for the view.
|
model = Setting # Not exactly, but needed for the view.
|
||||||
serializer_class = SettingCategorySerializer
|
serializer_class = SettingCategorySerializer
|
||||||
filter_backends = []
|
filter_backends = []
|
||||||
@@ -58,7 +57,6 @@ class SettingCategoryList(ListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||||
|
|
||||||
model = Setting # Not exactly, but needed for the view.
|
model = Setting # Not exactly, but needed for the view.
|
||||||
serializer_class = SettingSingletonSerializer
|
serializer_class = SettingSingletonSerializer
|
||||||
filter_backends = []
|
filter_backends = []
|
||||||
@@ -146,7 +144,6 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class SettingLoggingTest(GenericAPIView):
|
class SettingLoggingTest(GenericAPIView):
|
||||||
|
|
||||||
name = _('Logging Connectivity Test')
|
name = _('Logging Connectivity Test')
|
||||||
model = Setting
|
model = Setting
|
||||||
serializer_class = SettingSingletonSerializer
|
serializer_class = SettingSingletonSerializer
|
||||||
|
|||||||
@@ -6237,4 +6237,5 @@ msgstr "%s se está actualizando."
|
|||||||
|
|
||||||
#: awx/ui/urls.py:24
|
#: awx/ui/urls.py:24
|
||||||
msgid "This page will refresh when complete."
|
msgid "This page will refresh when complete."
|
||||||
msgstr "Esta página se actualizará cuando se complete."
|
msgstr "Esta página se actualizará cuando se complete."
|
||||||
|
|
||||||
|
|||||||
@@ -721,7 +721,7 @@ msgstr "DTSTART valide obligatoire dans rrule. La valeur doit commencer par : DT
|
|||||||
#: awx/api/serializers.py:4657
|
#: awx/api/serializers.py:4657
|
||||||
msgid ""
|
msgid ""
|
||||||
"DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ."
|
"DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ."
|
||||||
msgstr "DTSTART ne peut correspondre à une DateHeure naïve. Spécifier ;TZINFO= ou YYYYMMDDTHHMMSSZZ."
|
msgstr "DTSTART ne peut correspondre à une date-heure naïve. Spécifier ;TZINFO= ou YYYYMMDDTHHMMSSZZ."
|
||||||
|
|
||||||
#: awx/api/serializers.py:4659
|
#: awx/api/serializers.py:4659
|
||||||
msgid "Multiple DTSTART is not supported."
|
msgid "Multiple DTSTART is not supported."
|
||||||
@@ -6239,4 +6239,5 @@ msgstr "%s est en cours de mise à niveau."
|
|||||||
|
|
||||||
#: awx/ui/urls.py:24
|
#: awx/ui/urls.py:24
|
||||||
msgid "This page will refresh when complete."
|
msgid "This page will refresh when complete."
|
||||||
msgstr "Cette page sera rafraîchie une fois terminée."
|
msgstr "Cette page sera rafraîchie une fois terminée."
|
||||||
|
|
||||||
|
|||||||
@@ -1440,7 +1440,7 @@ msgstr "指定した認証情報は無効 (HTTP 401) です。"
|
|||||||
|
|
||||||
#: awx/api/views/root.py:193 awx/api/views/root.py:234
|
#: awx/api/views/root.py:193 awx/api/views/root.py:234
|
||||||
msgid "Unable to connect to proxy server."
|
msgid "Unable to connect to proxy server."
|
||||||
msgstr "プロキシサーバーに接続できません。"
|
msgstr "プロキシーサーバーに接続できません。"
|
||||||
|
|
||||||
#: awx/api/views/root.py:195 awx/api/views/root.py:236
|
#: awx/api/views/root.py:195 awx/api/views/root.py:236
|
||||||
msgid "Could not connect to subscription service."
|
msgid "Could not connect to subscription service."
|
||||||
@@ -1976,7 +1976,7 @@ msgstr "リモートホスト名または IP を判別するために検索す
|
|||||||
|
|
||||||
#: awx/main/conf.py:85
|
#: awx/main/conf.py:85
|
||||||
msgid "Proxy IP Allowed List"
|
msgid "Proxy IP Allowed List"
|
||||||
msgstr "プロキシ IP 許可リスト"
|
msgstr "プロキシー IP 許可リスト"
|
||||||
|
|
||||||
#: awx/main/conf.py:87
|
#: awx/main/conf.py:87
|
||||||
msgid ""
|
msgid ""
|
||||||
@@ -2198,7 +2198,7 @@ msgid ""
|
|||||||
"Follow symbolic links when scanning for playbooks. Be aware that setting "
|
"Follow symbolic links when scanning for playbooks. Be aware that setting "
|
||||||
"this to True can lead to infinite recursion if a link points to a parent "
|
"this to True can lead to infinite recursion if a link points to a parent "
|
||||||
"directory of itself."
|
"directory of itself."
|
||||||
msgstr "Playbook をスキャンするときは、シンボリックリンクをたどってください。リンクがそれ自体の親ディレクトリーを指している場合は、これを True に設定すると、無限再帰が発生する可能性があることに注意してください。"
|
msgstr "Playbook のスキャン時にシンボリックリンクをたどります。リンクが親ディレクトリーを参照している場合には、この設定を True に指定すると無限再帰が発生する可能性があります。"
|
||||||
|
|
||||||
#: awx/main/conf.py:337
|
#: awx/main/conf.py:337
|
||||||
msgid "Ignore Ansible Galaxy SSL Certificate Verification"
|
msgid "Ignore Ansible Galaxy SSL Certificate Verification"
|
||||||
@@ -2499,7 +2499,7 @@ msgstr "Insights for Ansible Automation Platform の最終収集日。"
|
|||||||
msgid ""
|
msgid ""
|
||||||
"Last gathered entries for expensive collectors for Insights for Ansible "
|
"Last gathered entries for expensive collectors for Insights for Ansible "
|
||||||
"Automation Platform."
|
"Automation Platform."
|
||||||
msgstr "Insights for Ansible Automation Platform の高価なコレクターの最後に収集されたエントリー。"
|
msgstr "Insights for Ansible Automation Platform でコストがかかっているコレクターに関して最後に収集されたエントリー"
|
||||||
|
|
||||||
#: awx/main/conf.py:686
|
#: awx/main/conf.py:686
|
||||||
msgid "Insights for Ansible Automation Platform Gather Interval"
|
msgid "Insights for Ansible Automation Platform Gather Interval"
|
||||||
@@ -3692,7 +3692,7 @@ msgstr "タスクの開始"
|
|||||||
|
|
||||||
#: awx/main/models/events.py:189
|
#: awx/main/models/events.py:189
|
||||||
msgid "Variables Prompted"
|
msgid "Variables Prompted"
|
||||||
msgstr "変数のプロモート"
|
msgstr "提示される変数"
|
||||||
|
|
||||||
#: awx/main/models/events.py:190
|
#: awx/main/models/events.py:190
|
||||||
msgid "Gathering Facts"
|
msgid "Gathering Facts"
|
||||||
@@ -3741,15 +3741,15 @@ msgstr "エラー"
|
|||||||
|
|
||||||
#: awx/main/models/execution_environments.py:17
|
#: awx/main/models/execution_environments.py:17
|
||||||
msgid "Always pull container before running."
|
msgid "Always pull container before running."
|
||||||
msgstr "実行前に必ずコンテナーをプルしてください。"
|
msgstr "実行前に必ずコンテナーをプルする"
|
||||||
|
|
||||||
#: awx/main/models/execution_environments.py:18
|
#: awx/main/models/execution_environments.py:18
|
||||||
msgid "Only pull the image if not present before running."
|
msgid "Only pull the image if not present before running."
|
||||||
msgstr "実行する前に、存在しない場合にのみイメージをプルしてください。"
|
msgstr "イメージが存在しない場合のみ実行前にプルする"
|
||||||
|
|
||||||
#: awx/main/models/execution_environments.py:19
|
#: awx/main/models/execution_environments.py:19
|
||||||
msgid "Never pull container before running."
|
msgid "Never pull container before running."
|
||||||
msgstr "実行前にコンテナーをプルしないでください。"
|
msgstr "実行前にコンテナーをプルしない"
|
||||||
|
|
||||||
#: awx/main/models/execution_environments.py:29
|
#: awx/main/models/execution_environments.py:29
|
||||||
msgid ""
|
msgid ""
|
||||||
@@ -5228,7 +5228,7 @@ msgid ""
|
|||||||
"SSL) or \"ldaps://ldap.example.com:636\" (SSL). Multiple LDAP servers may be "
|
"SSL) or \"ldaps://ldap.example.com:636\" (SSL). Multiple LDAP servers may be "
|
||||||
"specified by separating with spaces or commas. LDAP authentication is "
|
"specified by separating with spaces or commas. LDAP authentication is "
|
||||||
"disabled if this parameter is empty."
|
"disabled if this parameter is empty."
|
||||||
msgstr "\"ldap://ldap.example.com:389\" (非 SSL) または \"ldaps://ldap.example.com:636\" (SSL) などの LDAP サーバーに接続する URI です。複数の LDAP サーバーをスペースまたはカンマで区切って指定できます。LDAP 認証は、このパラメーターが空の場合は無効になります。"
|
msgstr "\"ldap://ldap.example.com:389\" (非 SSL) または \"ldaps://ldap.example.com:636\" (SSL) などの LDAP サーバーに接続する URI です。複数の LDAP サーバーをスペースまたはコンマで区切って指定できます。LDAP 認証は、このパラメーターが空の場合は無効になります。"
|
||||||
|
|
||||||
#: awx/sso/conf.py:170 awx/sso/conf.py:187 awx/sso/conf.py:198
|
#: awx/sso/conf.py:170 awx/sso/conf.py:187 awx/sso/conf.py:198
|
||||||
#: awx/sso/conf.py:209 awx/sso/conf.py:226 awx/sso/conf.py:244
|
#: awx/sso/conf.py:209 awx/sso/conf.py:226 awx/sso/conf.py:244
|
||||||
@@ -6236,4 +6236,5 @@ msgstr "%s が現在アップグレード中です。"
|
|||||||
|
|
||||||
#: awx/ui/urls.py:24
|
#: awx/ui/urls.py:24
|
||||||
msgid "This page will refresh when complete."
|
msgid "This page will refresh when complete."
|
||||||
msgstr "このページは完了すると更新されます。"
|
msgstr "このページは完了すると更新されます。"
|
||||||
|
|
||||||
|
|||||||
@@ -956,7 +956,7 @@ msgstr "인스턴스 그룹의 인스턴스"
|
|||||||
|
|
||||||
#: awx/api/views/__init__.py:450
|
#: awx/api/views/__init__.py:450
|
||||||
msgid "Schedules"
|
msgid "Schedules"
|
||||||
msgstr "일정"
|
msgstr "스케줄"
|
||||||
|
|
||||||
#: awx/api/views/__init__.py:464
|
#: awx/api/views/__init__.py:464
|
||||||
msgid "Schedule Recurrence Rule Preview"
|
msgid "Schedule Recurrence Rule Preview"
|
||||||
@@ -3261,7 +3261,7 @@ msgstr "JSON 또는 YAML 구문을 사용하여 인젝터를 입력합니다.
|
|||||||
#: awx/main/models/credential/__init__.py:412
|
#: awx/main/models/credential/__init__.py:412
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "adding %s credential type"
|
msgid "adding %s credential type"
|
||||||
msgstr "인증 정보 유형 %s 추가 중"
|
msgstr "인증 정보 유형 %s 추가 중"
|
||||||
|
|
||||||
#: awx/main/models/credential/__init__.py:590
|
#: awx/main/models/credential/__init__.py:590
|
||||||
#: awx/main/models/credential/__init__.py:672
|
#: awx/main/models/credential/__init__.py:672
|
||||||
@@ -6236,4 +6236,5 @@ msgstr "%s 현재 업그레이드 중입니다."
|
|||||||
|
|
||||||
#: awx/ui/urls.py:24
|
#: awx/ui/urls.py:24
|
||||||
msgid "This page will refresh when complete."
|
msgid "This page will refresh when complete."
|
||||||
msgstr "완료되면 이 페이지가 새로 고침됩니다."
|
msgstr "완료되면 이 페이지가 새로 고침됩니다."
|
||||||
|
|
||||||
|
|||||||
@@ -6237,4 +6237,5 @@ msgstr "Er wordt momenteel een upgrade van%s geïnstalleerd."
|
|||||||
|
|
||||||
#: awx/ui/urls.py:24
|
#: awx/ui/urls.py:24
|
||||||
msgid "This page will refresh when complete."
|
msgid "This page will refresh when complete."
|
||||||
msgstr "Deze pagina wordt vernieuwd als hij klaar is."
|
msgstr "Deze pagina wordt vernieuwd als hij klaar is."
|
||||||
|
|
||||||
|
|||||||
@@ -348,7 +348,7 @@ msgstr "SCM track_submodules 只能用于 git 项目。"
|
|||||||
msgid ""
|
msgid ""
|
||||||
"Only Container Registry credentials can be associated with an Execution "
|
"Only Container Registry credentials can be associated with an Execution "
|
||||||
"Environment"
|
"Environment"
|
||||||
msgstr "只有容器 registry 凭证可以与执行环境关联"
|
msgstr "只有容器注册表凭证才可以与执行环境关联"
|
||||||
|
|
||||||
#: awx/api/serializers.py:1440
|
#: awx/api/serializers.py:1440
|
||||||
msgid "Cannot change the organization of an execution environment"
|
msgid "Cannot change the organization of an execution environment"
|
||||||
@@ -629,7 +629,7 @@ msgstr "不支持在不替换的情况下在启动时删除 {} 凭证。提供
|
|||||||
|
|
||||||
#: awx/api/serializers.py:4338
|
#: awx/api/serializers.py:4338
|
||||||
msgid "The inventory associated with this Workflow is being deleted."
|
msgid "The inventory associated with this Workflow is being deleted."
|
||||||
msgstr "与此 Workflow 关联的清单将被删除。"
|
msgstr "与此工作流关联的清单将被删除。"
|
||||||
|
|
||||||
#: awx/api/serializers.py:4405
|
#: awx/api/serializers.py:4405
|
||||||
msgid "Message type '{}' invalid, must be either 'message' or 'body'"
|
msgid "Message type '{}' invalid, must be either 'message' or 'body'"
|
||||||
@@ -3229,7 +3229,7 @@ msgstr "云"
|
|||||||
#: awx/main/models/credential/__init__.py:336
|
#: awx/main/models/credential/__init__.py:336
|
||||||
#: awx/main/models/credential/__init__.py:1113
|
#: awx/main/models/credential/__init__.py:1113
|
||||||
msgid "Container Registry"
|
msgid "Container Registry"
|
||||||
msgstr "容器 Registry"
|
msgstr "容器注册表"
|
||||||
|
|
||||||
#: awx/main/models/credential/__init__.py:337
|
#: awx/main/models/credential/__init__.py:337
|
||||||
msgid "Personal Access Token"
|
msgid "Personal Access Token"
|
||||||
@@ -3560,7 +3560,7 @@ msgstr "身份验证 URL"
|
|||||||
|
|
||||||
#: awx/main/models/credential/__init__.py:1120
|
#: awx/main/models/credential/__init__.py:1120
|
||||||
msgid "Authentication endpoint for the container registry."
|
msgid "Authentication endpoint for the container registry."
|
||||||
msgstr "容器 registry 的身份验证端点。"
|
msgstr "容器注册表的身份验证端点。"
|
||||||
|
|
||||||
#: awx/main/models/credential/__init__.py:1130
|
#: awx/main/models/credential/__init__.py:1130
|
||||||
msgid "Password or Token"
|
msgid "Password or Token"
|
||||||
@@ -3764,7 +3764,7 @@ msgstr "镜像位置"
|
|||||||
msgid ""
|
msgid ""
|
||||||
"The full image location, including the container registry, image name, and "
|
"The full image location, including the container registry, image name, and "
|
||||||
"version tag."
|
"version tag."
|
||||||
msgstr "完整镜像位置,包括容器 registry、镜像名称和版本标签。"
|
msgstr "完整镜像位置,包括容器注册表、镜像名称和版本标签。"
|
||||||
|
|
||||||
#: awx/main/models/execution_environments.py:51
|
#: awx/main/models/execution_environments.py:51
|
||||||
msgid "Pull image before running?"
|
msgid "Pull image before running?"
|
||||||
@@ -6238,4 +6238,5 @@ msgstr "%s 当前正在升级。"
|
|||||||
|
|
||||||
#: awx/ui/urls.py:24
|
#: awx/ui/urls.py:24
|
||||||
msgid "This page will refresh when complete."
|
msgid "This page will refresh when complete."
|
||||||
msgstr "完成后,此页面会刷新。"
|
msgstr "完成后,此页面会刷新。"
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ from django.conf import settings
|
|||||||
from django.db.models import Q, Prefetch
|
from django.db.models import Q, Prefetch
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.core.exceptions import ObjectDoesNotExist
|
from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
|
||||||
|
|
||||||
# Django REST Framework
|
# Django REST Framework
|
||||||
from rest_framework.exceptions import ParseError, PermissionDenied
|
from rest_framework.exceptions import ParseError, PermissionDenied
|
||||||
@@ -281,13 +281,23 @@ class BaseAccess(object):
|
|||||||
"""
|
"""
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def assure_relationship_exists(self, obj, relationship):
|
||||||
|
if '.' in relationship:
|
||||||
|
return # not attempting validation for complex relationships now
|
||||||
|
try:
|
||||||
|
obj._meta.get_field(relationship)
|
||||||
|
except FieldDoesNotExist:
|
||||||
|
raise NotImplementedError(f'The relationship {relationship} does not exist for model {type(obj)}')
|
||||||
|
|
||||||
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
||||||
|
self.assure_relationship_exists(obj, relationship)
|
||||||
if skip_sub_obj_read_check:
|
if skip_sub_obj_read_check:
|
||||||
return self.can_change(obj, None)
|
return self.can_change(obj, None)
|
||||||
else:
|
else:
|
||||||
return bool(self.can_change(obj, None) and self.user.can_access(type(sub_obj), 'read', sub_obj))
|
return bool(self.can_change(obj, None) and self.user.can_access(type(sub_obj), 'read', sub_obj))
|
||||||
|
|
||||||
def can_unattach(self, obj, sub_obj, relationship, data=None):
|
def can_unattach(self, obj, sub_obj, relationship, data=None):
|
||||||
|
self.assure_relationship_exists(obj, relationship)
|
||||||
return self.can_change(obj, data)
|
return self.can_change(obj, data)
|
||||||
|
|
||||||
def check_related(self, field, Model, data, role_field='admin_role', obj=None, mandatory=False):
|
def check_related(self, field, Model, data, role_field='admin_role', obj=None, mandatory=False):
|
||||||
@@ -328,6 +338,8 @@ class BaseAccess(object):
|
|||||||
role = getattr(resource, role_field, None)
|
role = getattr(resource, role_field, None)
|
||||||
if role is None:
|
if role is None:
|
||||||
# Handle special case where resource does not have direct roles
|
# Handle special case where resource does not have direct roles
|
||||||
|
if role_field == 'read_role':
|
||||||
|
return self.user.can_access(type(resource), 'read', resource)
|
||||||
access_method_type = {'admin_role': 'change', 'execute_role': 'start'}[role_field]
|
access_method_type = {'admin_role': 'change', 'execute_role': 'start'}[role_field]
|
||||||
return self.user.can_access(type(resource), access_method_type, resource, None)
|
return self.user.can_access(type(resource), access_method_type, resource, None)
|
||||||
return self.user in role
|
return self.user in role
|
||||||
@@ -499,6 +511,21 @@ class BaseAccess(object):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class UnifiedCredentialsMixin(BaseAccess):
|
||||||
|
"""
|
||||||
|
The credentials many-to-many is a standard relationship for JT, jobs, and others
|
||||||
|
Permission to attach is always use permission, and permission to unattach is admin to the parent object
|
||||||
|
"""
|
||||||
|
|
||||||
|
@check_superuser
|
||||||
|
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
||||||
|
if relationship == 'credentials':
|
||||||
|
if not isinstance(sub_obj, Credential):
|
||||||
|
raise RuntimeError(f'Can only attach credentials to credentials relationship, got {type(sub_obj)}')
|
||||||
|
return self.can_change(obj, None) and (self.user in sub_obj.use_role)
|
||||||
|
return super().can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
||||||
|
|
||||||
|
|
||||||
class NotificationAttachMixin(BaseAccess):
|
class NotificationAttachMixin(BaseAccess):
|
||||||
"""For models that can have notifications attached
|
"""For models that can have notifications attached
|
||||||
|
|
||||||
@@ -534,7 +561,6 @@ class NotificationAttachMixin(BaseAccess):
|
|||||||
|
|
||||||
|
|
||||||
class InstanceAccess(BaseAccess):
|
class InstanceAccess(BaseAccess):
|
||||||
|
|
||||||
model = Instance
|
model = Instance
|
||||||
prefetch_related = ('rampart_groups',)
|
prefetch_related = ('rampart_groups',)
|
||||||
|
|
||||||
@@ -552,7 +578,7 @@ class InstanceAccess(BaseAccess):
|
|||||||
return super(InstanceAccess, self).can_unattach(obj, sub_obj, relationship, relationship, data=data)
|
return super(InstanceAccess, self).can_unattach(obj, sub_obj, relationship, relationship, data=data)
|
||||||
|
|
||||||
def can_add(self, data):
|
def can_add(self, data):
|
||||||
return False
|
return self.user.is_superuser
|
||||||
|
|
||||||
def can_change(self, obj, data):
|
def can_change(self, obj, data):
|
||||||
return False
|
return False
|
||||||
@@ -562,7 +588,6 @@ class InstanceAccess(BaseAccess):
|
|||||||
|
|
||||||
|
|
||||||
class InstanceGroupAccess(BaseAccess):
|
class InstanceGroupAccess(BaseAccess):
|
||||||
|
|
||||||
model = InstanceGroup
|
model = InstanceGroup
|
||||||
prefetch_related = ('instances',)
|
prefetch_related = ('instances',)
|
||||||
|
|
||||||
@@ -965,9 +990,6 @@ class HostAccess(BaseAccess):
|
|||||||
if data and 'name' in data:
|
if data and 'name' in data:
|
||||||
self.check_license(add_host_name=data['name'])
|
self.check_license(add_host_name=data['name'])
|
||||||
|
|
||||||
# Check the per-org limit
|
|
||||||
self.check_org_host_limit({'inventory': obj.inventory}, add_host_name=data['name'])
|
|
||||||
|
|
||||||
# Checks for admin or change permission on inventory, controls whether
|
# Checks for admin or change permission on inventory, controls whether
|
||||||
# the user can edit variable data.
|
# the user can edit variable data.
|
||||||
return obj and self.user in obj.inventory.admin_role
|
return obj and self.user in obj.inventory.admin_role
|
||||||
@@ -1005,7 +1027,9 @@ class GroupAccess(BaseAccess):
|
|||||||
return Group.objects.filter(inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role'))
|
return Group.objects.filter(inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role'))
|
||||||
|
|
||||||
def can_add(self, data):
|
def can_add(self, data):
|
||||||
if not data or 'inventory' not in data:
|
if not data: # So the browseable API will work
|
||||||
|
return Inventory.accessible_objects(self.user, 'admin_role').exists()
|
||||||
|
if 'inventory' not in data:
|
||||||
return False
|
return False
|
||||||
# Checks for admin or change permission on inventory.
|
# Checks for admin or change permission on inventory.
|
||||||
return self.check_related('inventory', Inventory, data)
|
return self.check_related('inventory', Inventory, data)
|
||||||
@@ -1031,7 +1055,7 @@ class GroupAccess(BaseAccess):
|
|||||||
return bool(obj and self.user in obj.inventory.admin_role)
|
return bool(obj and self.user in obj.inventory.admin_role)
|
||||||
|
|
||||||
|
|
||||||
class InventorySourceAccess(NotificationAttachMixin, BaseAccess):
|
class InventorySourceAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can see inventory sources whenever I can see their inventory.
|
I can see inventory sources whenever I can see their inventory.
|
||||||
I can change inventory sources whenever I can change their inventory.
|
I can change inventory sources whenever I can change their inventory.
|
||||||
@@ -1075,18 +1099,6 @@ class InventorySourceAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
return self.user in obj.inventory.update_role
|
return self.user in obj.inventory.update_role
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@check_superuser
|
|
||||||
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
|
||||||
if relationship == 'credentials' and isinstance(sub_obj, Credential):
|
|
||||||
return obj and obj.inventory and self.user in obj.inventory.admin_role and self.user in sub_obj.use_role
|
|
||||||
return super(InventorySourceAccess, self).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
|
||||||
|
|
||||||
@check_superuser
|
|
||||||
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
|
|
||||||
if relationship == 'credentials' and isinstance(sub_obj, Credential):
|
|
||||||
return obj and obj.inventory and self.user in obj.inventory.admin_role
|
|
||||||
return super(InventorySourceAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class InventoryUpdateAccess(BaseAccess):
|
class InventoryUpdateAccess(BaseAccess):
|
||||||
"""
|
"""
|
||||||
@@ -1485,7 +1497,7 @@ class ProjectUpdateAccess(BaseAccess):
|
|||||||
return obj and self.user in obj.project.admin_role
|
return obj and self.user in obj.project.admin_role
|
||||||
|
|
||||||
|
|
||||||
class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can see job templates when:
|
I can see job templates when:
|
||||||
- I have read role for the job template.
|
- I have read role for the job template.
|
||||||
@@ -1549,8 +1561,7 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
if self.user not in inventory.use_role:
|
if self.user not in inventory.use_role:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
ee = get_value(ExecutionEnvironment, 'execution_environment')
|
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
||||||
if ee and not self.user.can_access(ExecutionEnvironment, 'read', ee):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
project = get_value(Project, 'project')
|
project = get_value(Project, 'project')
|
||||||
@@ -1600,10 +1611,8 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
if self.changes_are_non_sensitive(obj, data):
|
if self.changes_are_non_sensitive(obj, data):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if data.get('execution_environment'):
|
if not self.check_related('execution_environment', ExecutionEnvironment, data, obj=obj, role_field='read_role'):
|
||||||
ee = get_object_from_data('execution_environment', ExecutionEnvironment, data)
|
return False
|
||||||
if not self.user.can_access(ExecutionEnvironment, 'read', ee):
|
|
||||||
return False
|
|
||||||
|
|
||||||
for required_field, cls in (('inventory', Inventory), ('project', Project)):
|
for required_field, cls in (('inventory', Inventory), ('project', Project)):
|
||||||
is_mandatory = True
|
is_mandatory = True
|
||||||
@@ -1667,17 +1676,13 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
if not obj.organization:
|
if not obj.organization:
|
||||||
return False
|
return False
|
||||||
return self.user.can_access(type(sub_obj), "read", sub_obj) and self.user in obj.organization.admin_role
|
return self.user.can_access(type(sub_obj), "read", sub_obj) and self.user in obj.organization.admin_role
|
||||||
if relationship == 'credentials' and isinstance(sub_obj, Credential):
|
|
||||||
return self.user in obj.admin_role and self.user in sub_obj.use_role
|
|
||||||
return super(JobTemplateAccess, self).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
return super(JobTemplateAccess, self).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
|
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||||
if relationship == "instance_groups":
|
if relationship == "instance_groups":
|
||||||
return self.can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
return self.can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
||||||
if relationship == 'credentials' and isinstance(sub_obj, Credential):
|
return super(JobTemplateAccess, self).can_unattach(obj, sub_obj, relationship, *args, **kwargs)
|
||||||
return self.user in obj.admin_role
|
|
||||||
return super(JobTemplateAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class JobAccess(BaseAccess):
|
class JobAccess(BaseAccess):
|
||||||
@@ -1824,7 +1829,7 @@ class SystemJobAccess(BaseAccess):
|
|||||||
return False # no relaunching of system jobs
|
return False # no relaunching of system jobs
|
||||||
|
|
||||||
|
|
||||||
class JobLaunchConfigAccess(BaseAccess):
|
class JobLaunchConfigAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||||
"""
|
"""
|
||||||
Launch configs must have permissions checked for
|
Launch configs must have permissions checked for
|
||||||
- relaunching
|
- relaunching
|
||||||
@@ -1832,63 +1837,69 @@ class JobLaunchConfigAccess(BaseAccess):
|
|||||||
|
|
||||||
In order to create a new object with a copy of this launch config, I need:
|
In order to create a new object with a copy of this launch config, I need:
|
||||||
- use access to related inventory (if present)
|
- use access to related inventory (if present)
|
||||||
|
- read access to Execution Environment (if present), unless the specified ee is already in the template
|
||||||
- use role to many-related credentials (if any present)
|
- use role to many-related credentials (if any present)
|
||||||
|
- read access to many-related labels (if any present), unless the specified label is already in the template
|
||||||
|
- read access to many-related instance groups (if any present), unless the specified instance group is already in the template
|
||||||
"""
|
"""
|
||||||
|
|
||||||
model = JobLaunchConfig
|
model = JobLaunchConfig
|
||||||
select_related = 'job'
|
select_related = 'job'
|
||||||
prefetch_related = ('credentials', 'inventory')
|
prefetch_related = ('credentials', 'inventory')
|
||||||
|
|
||||||
def _unusable_creds_exist(self, qs):
|
M2M_CHECKS = {'credentials': Credential, 'labels': Label, 'instance_groups': InstanceGroup}
|
||||||
return qs.exclude(pk__in=Credential._accessible_pk_qs(Credential, self.user, 'use_role')).exists()
|
|
||||||
|
|
||||||
def has_credentials_access(self, obj):
|
def _related_filtered_queryset(self, cls):
|
||||||
# user has access if no related credentials exist that the user lacks use role for
|
if cls is Label:
|
||||||
return not self._unusable_creds_exist(obj.credentials)
|
return LabelAccess(self.user).filtered_queryset()
|
||||||
|
elif cls is InstanceGroup:
|
||||||
|
return InstanceGroupAccess(self.user).filtered_queryset()
|
||||||
|
else:
|
||||||
|
return cls._accessible_pk_qs(cls, self.user, 'use_role')
|
||||||
|
|
||||||
|
def has_obj_m2m_access(self, obj):
|
||||||
|
for relationship, cls in self.M2M_CHECKS.items():
|
||||||
|
if getattr(obj, relationship).exclude(pk__in=self._related_filtered_queryset(cls)).exists():
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_add(self, data, template=None):
|
def can_add(self, data, template=None):
|
||||||
# This is a special case, we don't check related many-to-many elsewhere
|
# This is a special case, we don't check related many-to-many elsewhere
|
||||||
# launch RBAC checks use this
|
# launch RBAC checks use this
|
||||||
if 'credentials' in data and data['credentials'] or 'reference_obj' in data:
|
if 'reference_obj' in data:
|
||||||
if 'reference_obj' in data:
|
if not self.has_obj_m2m_access(data['reference_obj']):
|
||||||
prompted_cred_qs = data['reference_obj'].credentials.all()
|
|
||||||
else:
|
|
||||||
# If given model objects, only use the primary key from them
|
|
||||||
cred_pks = [cred.pk for cred in data['credentials']]
|
|
||||||
if template:
|
|
||||||
for cred in template.credentials.all():
|
|
||||||
if cred.pk in cred_pks:
|
|
||||||
cred_pks.remove(cred.pk)
|
|
||||||
prompted_cred_qs = Credential.objects.filter(pk__in=cred_pks)
|
|
||||||
if self._unusable_creds_exist(prompted_cred_qs):
|
|
||||||
return False
|
return False
|
||||||
return self.check_related('inventory', Inventory, data, role_field='use_role')
|
else:
|
||||||
|
for relationship, cls in self.M2M_CHECKS.items():
|
||||||
|
if relationship in data and data[relationship]:
|
||||||
|
# If given model objects, only use the primary key from them
|
||||||
|
sub_obj_pks = [sub_obj.pk for sub_obj in data[relationship]]
|
||||||
|
if template:
|
||||||
|
for sub_obj in getattr(template, relationship).all():
|
||||||
|
if sub_obj.pk in sub_obj_pks:
|
||||||
|
sub_obj_pks.remove(sub_obj.pk)
|
||||||
|
if cls.objects.filter(pk__in=sub_obj_pks).exclude(pk__in=self._related_filtered_queryset(cls)).exists():
|
||||||
|
return False
|
||||||
|
return self.check_related('inventory', Inventory, data, role_field='use_role') and self.check_related(
|
||||||
|
'execution_environment', ExecutionEnvironment, data, role_field='read_role'
|
||||||
|
)
|
||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_use(self, obj):
|
def can_use(self, obj):
|
||||||
return self.check_related('inventory', Inventory, {}, obj=obj, role_field='use_role', mandatory=True) and self.has_credentials_access(obj)
|
return (
|
||||||
|
self.has_obj_m2m_access(obj)
|
||||||
|
and self.check_related('inventory', Inventory, {}, obj=obj, role_field='use_role', mandatory=True)
|
||||||
|
and self.check_related('execution_environment', ExecutionEnvironment, {}, obj=obj, role_field='read_role')
|
||||||
|
)
|
||||||
|
|
||||||
def can_change(self, obj, data):
|
def can_change(self, obj, data):
|
||||||
return self.check_related('inventory', Inventory, data, obj=obj, role_field='use_role')
|
return self.check_related('inventory', Inventory, data, obj=obj, role_field='use_role') and self.check_related(
|
||||||
|
'execution_environment', ExecutionEnvironment, data, obj=obj, role_field='read_role'
|
||||||
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
)
|
||||||
if isinstance(sub_obj, Credential) and relationship == 'credentials':
|
|
||||||
return self.user in sub_obj.use_role
|
|
||||||
else:
|
|
||||||
raise NotImplementedError('Only credentials can be attached to launch configurations.')
|
|
||||||
|
|
||||||
def can_unattach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
|
||||||
if isinstance(sub_obj, Credential) and relationship == 'credentials':
|
|
||||||
if skip_sub_obj_read_check:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return self.user in sub_obj.read_role
|
|
||||||
else:
|
|
||||||
raise NotImplementedError('Only credentials can be attached to launch configurations.')
|
|
||||||
|
|
||||||
|
|
||||||
class WorkflowJobTemplateNodeAccess(BaseAccess):
|
class WorkflowJobTemplateNodeAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can see/use a WorkflowJobTemplateNode if I have read permission
|
I can see/use a WorkflowJobTemplateNode if I have read permission
|
||||||
to associated Workflow Job Template
|
to associated Workflow Job Template
|
||||||
@@ -1911,7 +1922,7 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
model = WorkflowJobTemplateNode
|
model = WorkflowJobTemplateNode
|
||||||
prefetch_related = ('success_nodes', 'failure_nodes', 'always_nodes', 'unified_job_template', 'credentials', 'workflow_job_template')
|
prefetch_related = ('success_nodes', 'failure_nodes', 'always_nodes', 'unified_job_template', 'workflow_job_template')
|
||||||
|
|
||||||
def filtered_queryset(self):
|
def filtered_queryset(self):
|
||||||
return self.model.objects.filter(workflow_job_template__in=WorkflowJobTemplate.accessible_objects(self.user, 'read_role'))
|
return self.model.objects.filter(workflow_job_template__in=WorkflowJobTemplate.accessible_objects(self.user, 'read_role'))
|
||||||
@@ -1923,7 +1934,8 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
|
|||||||
return (
|
return (
|
||||||
self.check_related('workflow_job_template', WorkflowJobTemplate, data, mandatory=True)
|
self.check_related('workflow_job_template', WorkflowJobTemplate, data, mandatory=True)
|
||||||
and self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role')
|
and self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role')
|
||||||
and JobLaunchConfigAccess(self.user).can_add(data)
|
and self.check_related('inventory', Inventory, data, role_field='use_role')
|
||||||
|
and self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role')
|
||||||
)
|
)
|
||||||
|
|
||||||
def wfjt_admin(self, obj):
|
def wfjt_admin(self, obj):
|
||||||
@@ -1932,17 +1944,14 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
|
|||||||
else:
|
else:
|
||||||
return self.user in obj.workflow_job_template.admin_role
|
return self.user in obj.workflow_job_template.admin_role
|
||||||
|
|
||||||
def ujt_execute(self, obj):
|
def ujt_execute(self, obj, data=None):
|
||||||
if not obj.unified_job_template:
|
if not obj.unified_job_template:
|
||||||
return True
|
return True
|
||||||
return self.check_related('unified_job_template', UnifiedJobTemplate, {}, obj=obj, role_field='execute_role', mandatory=True)
|
return self.check_related('unified_job_template', UnifiedJobTemplate, data, obj=obj, role_field='execute_role', mandatory=True)
|
||||||
|
|
||||||
def can_change(self, obj, data):
|
def can_change(self, obj, data):
|
||||||
if not data:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# should not be able to edit the prompts if lacking access to UJT or WFJT
|
# should not be able to edit the prompts if lacking access to UJT or WFJT
|
||||||
return self.ujt_execute(obj) and self.wfjt_admin(obj) and JobLaunchConfigAccess(self.user).can_change(obj, data)
|
return self.ujt_execute(obj, data=data) and self.wfjt_admin(obj) and JobLaunchConfigAccess(self.user).can_change(obj, data)
|
||||||
|
|
||||||
def can_delete(self, obj):
|
def can_delete(self, obj):
|
||||||
return self.wfjt_admin(obj)
|
return self.wfjt_admin(obj)
|
||||||
@@ -1955,29 +1964,14 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
||||||
if not self.wfjt_admin(obj):
|
if relationship in ('success_nodes', 'failure_nodes', 'always_nodes'):
|
||||||
return False
|
return self.wfjt_admin(obj) and self.check_same_WFJT(obj, sub_obj)
|
||||||
if relationship == 'credentials':
|
return super().can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
||||||
# Need permission to related template to attach a credential
|
|
||||||
if not self.ujt_execute(obj):
|
|
||||||
return False
|
|
||||||
return JobLaunchConfigAccess(self.user).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
|
||||||
elif relationship in ('success_nodes', 'failure_nodes', 'always_nodes'):
|
|
||||||
return self.check_same_WFJT(obj, sub_obj)
|
|
||||||
else:
|
|
||||||
raise NotImplementedError('Relationship {} not understood for WFJT nodes.'.format(relationship))
|
|
||||||
|
|
||||||
def can_unattach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
def can_unattach(self, obj, sub_obj, relationship, data=None):
|
||||||
if not self.wfjt_admin(obj):
|
if relationship in ('success_nodes', 'failure_nodes', 'always_nodes'):
|
||||||
return False
|
return self.wfjt_admin(obj)
|
||||||
if relationship == 'credentials':
|
return super().can_unattach(obj, sub_obj, relationship, data=None)
|
||||||
if not self.ujt_execute(obj):
|
|
||||||
return False
|
|
||||||
return JobLaunchConfigAccess(self.user).can_unattach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
|
||||||
elif relationship in ('success_nodes', 'failure_nodes', 'always_nodes'):
|
|
||||||
return self.check_same_WFJT(obj, sub_obj)
|
|
||||||
else:
|
|
||||||
raise NotImplementedError('Relationship {} not understood for WFJT nodes.'.format(relationship))
|
|
||||||
|
|
||||||
|
|
||||||
class WorkflowJobNodeAccess(BaseAccess):
|
class WorkflowJobNodeAccess(BaseAccess):
|
||||||
@@ -2052,13 +2046,10 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
if not data: # So the browseable API will work
|
if not data: # So the browseable API will work
|
||||||
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
|
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
|
||||||
|
|
||||||
if data.get('execution_environment'):
|
return bool(
|
||||||
ee = get_object_from_data('execution_environment', ExecutionEnvironment, data)
|
self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True)
|
||||||
if not self.user.can_access(ExecutionEnvironment, 'read', ee):
|
and self.check_related('inventory', Inventory, data, role_field='use_role')
|
||||||
return False
|
and self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role')
|
||||||
|
|
||||||
return self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True) and self.check_related(
|
|
||||||
'inventory', Inventory, data, role_field='use_role'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def can_copy(self, obj):
|
def can_copy(self, obj):
|
||||||
@@ -2104,14 +2095,10 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
if self.user.is_superuser:
|
if self.user.is_superuser:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if data and data.get('execution_environment'):
|
|
||||||
ee = get_object_from_data('execution_environment', ExecutionEnvironment, data)
|
|
||||||
if not self.user.can_access(ExecutionEnvironment, 'read', ee):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
self.check_related('organization', Organization, data, role_field='workflow_admin_role', obj=obj)
|
self.check_related('organization', Organization, data, role_field='workflow_admin_role', obj=obj)
|
||||||
and self.check_related('inventory', Inventory, data, role_field='use_role', obj=obj)
|
and self.check_related('inventory', Inventory, data, role_field='use_role', obj=obj)
|
||||||
|
and self.check_related('execution_environment', ExecutionEnvironment, data, obj=obj, role_field='read_role')
|
||||||
and self.user in obj.admin_role
|
and self.user in obj.admin_role
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -2364,7 +2351,6 @@ class JobEventAccess(BaseAccess):
|
|||||||
|
|
||||||
|
|
||||||
class UnpartitionedJobEventAccess(JobEventAccess):
|
class UnpartitionedJobEventAccess(JobEventAccess):
|
||||||
|
|
||||||
model = UnpartitionedJobEvent
|
model = UnpartitionedJobEvent
|
||||||
|
|
||||||
|
|
||||||
@@ -2518,7 +2504,7 @@ class UnifiedJobAccess(BaseAccess):
|
|||||||
return super(UnifiedJobAccess, self).get_queryset().filter(workflowapproval__isnull=True)
|
return super(UnifiedJobAccess, self).get_queryset().filter(workflowapproval__isnull=True)
|
||||||
|
|
||||||
|
|
||||||
class ScheduleAccess(BaseAccess):
|
class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can see a schedule if I can see it's related unified job, I can create them or update them if I have write access
|
I can see a schedule if I can see it's related unified job, I can create them or update them if I have write access
|
||||||
"""
|
"""
|
||||||
@@ -2559,12 +2545,6 @@ class ScheduleAccess(BaseAccess):
|
|||||||
def can_delete(self, obj):
|
def can_delete(self, obj):
|
||||||
return self.can_change(obj, {})
|
return self.can_change(obj, {})
|
||||||
|
|
||||||
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
|
||||||
return JobLaunchConfigAccess(self.user).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
|
||||||
|
|
||||||
def can_unattach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
|
||||||
return JobLaunchConfigAccess(self.user).can_unattach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
|
||||||
|
|
||||||
|
|
||||||
class NotificationTemplateAccess(BaseAccess):
|
class NotificationTemplateAccess(BaseAccess):
|
||||||
"""
|
"""
|
||||||
@@ -2715,46 +2695,66 @@ class ActivityStreamAccess(BaseAccess):
|
|||||||
# 'job_template', 'job', 'project', 'project_update', 'workflow_job',
|
# 'job_template', 'job', 'project', 'project_update', 'workflow_job',
|
||||||
# 'inventory_source', 'workflow_job_template'
|
# 'inventory_source', 'workflow_job_template'
|
||||||
|
|
||||||
inventory_set = Inventory.accessible_objects(self.user, 'read_role')
|
q = Q(user=self.user)
|
||||||
credential_set = Credential.accessible_objects(self.user, 'read_role')
|
inventory_set = Inventory.accessible_pk_qs(self.user, 'read_role')
|
||||||
|
if inventory_set:
|
||||||
|
q |= (
|
||||||
|
Q(ad_hoc_command__inventory__in=inventory_set)
|
||||||
|
| Q(inventory__in=inventory_set)
|
||||||
|
| Q(host__inventory__in=inventory_set)
|
||||||
|
| Q(group__inventory__in=inventory_set)
|
||||||
|
| Q(inventory_source__inventory__in=inventory_set)
|
||||||
|
| Q(inventory_update__inventory_source__inventory__in=inventory_set)
|
||||||
|
)
|
||||||
|
|
||||||
|
credential_set = Credential.accessible_pk_qs(self.user, 'read_role')
|
||||||
|
if credential_set:
|
||||||
|
q |= Q(credential__in=credential_set)
|
||||||
|
|
||||||
auditing_orgs = (
|
auditing_orgs = (
|
||||||
(Organization.accessible_objects(self.user, 'admin_role') | Organization.accessible_objects(self.user, 'auditor_role'))
|
(Organization.accessible_objects(self.user, 'admin_role') | Organization.accessible_objects(self.user, 'auditor_role'))
|
||||||
.distinct()
|
.distinct()
|
||||||
.values_list('id', flat=True)
|
.values_list('id', flat=True)
|
||||||
)
|
)
|
||||||
project_set = Project.accessible_objects(self.user, 'read_role')
|
if auditing_orgs:
|
||||||
jt_set = JobTemplate.accessible_objects(self.user, 'read_role')
|
q |= (
|
||||||
team_set = Team.accessible_objects(self.user, 'read_role')
|
Q(user__in=auditing_orgs.values('member_role__members'))
|
||||||
wfjt_set = WorkflowJobTemplate.accessible_objects(self.user, 'read_role')
|
| Q(organization__in=auditing_orgs)
|
||||||
app_set = OAuth2ApplicationAccess(self.user).filtered_queryset()
|
| Q(notification_template__organization__in=auditing_orgs)
|
||||||
token_set = OAuth2TokenAccess(self.user).filtered_queryset()
|
| Q(notification__notification_template__organization__in=auditing_orgs)
|
||||||
|
| Q(label__organization__in=auditing_orgs)
|
||||||
|
| Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
|
||||||
|
)
|
||||||
|
|
||||||
return qs.filter(
|
project_set = Project.accessible_pk_qs(self.user, 'read_role')
|
||||||
Q(ad_hoc_command__inventory__in=inventory_set)
|
if project_set:
|
||||||
| Q(o_auth2_application__in=app_set)
|
q |= Q(project__in=project_set) | Q(project_update__project__in=project_set)
|
||||||
| Q(o_auth2_access_token__in=token_set)
|
|
||||||
| Q(user__in=auditing_orgs.values('member_role__members'))
|
jt_set = JobTemplate.accessible_pk_qs(self.user, 'read_role')
|
||||||
| Q(user=self.user)
|
if jt_set:
|
||||||
| Q(organization__in=auditing_orgs)
|
q |= Q(job_template__in=jt_set) | Q(job__job_template__in=jt_set)
|
||||||
| Q(inventory__in=inventory_set)
|
|
||||||
| Q(host__inventory__in=inventory_set)
|
wfjt_set = WorkflowJobTemplate.accessible_pk_qs(self.user, 'read_role')
|
||||||
| Q(group__inventory__in=inventory_set)
|
if wfjt_set:
|
||||||
| Q(inventory_source__inventory__in=inventory_set)
|
q |= (
|
||||||
| Q(inventory_update__inventory_source__inventory__in=inventory_set)
|
Q(workflow_job_template__in=wfjt_set)
|
||||||
| Q(credential__in=credential_set)
|
| Q(workflow_job_template_node__workflow_job_template__in=wfjt_set)
|
||||||
| Q(team__in=team_set)
|
| Q(workflow_job__workflow_job_template__in=wfjt_set)
|
||||||
| Q(project__in=project_set)
|
)
|
||||||
| Q(project_update__project__in=project_set)
|
|
||||||
| Q(job_template__in=jt_set)
|
team_set = Team.accessible_pk_qs(self.user, 'read_role')
|
||||||
| Q(job__job_template__in=jt_set)
|
if team_set:
|
||||||
| Q(workflow_job_template__in=wfjt_set)
|
q |= Q(team__in=team_set)
|
||||||
| Q(workflow_job_template_node__workflow_job_template__in=wfjt_set)
|
|
||||||
| Q(workflow_job__workflow_job_template__in=wfjt_set)
|
app_set = OAuth2ApplicationAccess(self.user).filtered_queryset()
|
||||||
| Q(notification_template__organization__in=auditing_orgs)
|
if app_set:
|
||||||
| Q(notification__notification_template__organization__in=auditing_orgs)
|
q |= Q(o_auth2_application__in=app_set)
|
||||||
| Q(label__organization__in=auditing_orgs)
|
|
||||||
| Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
|
token_set = OAuth2TokenAccess(self.user).filtered_queryset()
|
||||||
).distinct()
|
if token_set:
|
||||||
|
q |= Q(o_auth2_access_token__in=token_set)
|
||||||
|
|
||||||
|
return qs.filter(q).distinct()
|
||||||
|
|
||||||
def can_add(self, data):
|
def can_add(self, data):
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import aioredis
|
|
||||||
import redis
|
import redis
|
||||||
|
import redis.asyncio
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from prometheus_client import (
|
from prometheus_client import (
|
||||||
@@ -82,7 +82,7 @@ class BroadcastWebsocketStatsManager:
|
|||||||
|
|
||||||
async def run_loop(self):
|
async def run_loop(self):
|
||||||
try:
|
try:
|
||||||
redis_conn = await aioredis.create_redis_pool(settings.BROKER_URL)
|
redis_conn = await redis.asyncio.Redis.from_url(settings.BROKER_URL)
|
||||||
while True:
|
while True:
|
||||||
stats_data_str = ''.join(stat.serialize() for stat in self._stats.values())
|
stats_data_str = ''.join(stat.serialize() for stat in self._stats.values())
|
||||||
await redis_conn.set(self._redis_key, stats_data_str)
|
await redis_conn.set(self._redis_key, stats_data_str)
|
||||||
@@ -122,8 +122,8 @@ class BroadcastWebsocketStats:
|
|||||||
'Number of messages received, to be forwarded, by the broadcast websocket system',
|
'Number of messages received, to be forwarded, by the broadcast websocket system',
|
||||||
registry=self._registry,
|
registry=self._registry,
|
||||||
)
|
)
|
||||||
self._messages_received = Gauge(
|
self._messages_received_current_conn = Gauge(
|
||||||
f'awx_{self.remote_name}_messages_received',
|
f'awx_{self.remote_name}_messages_received_currrent_conn',
|
||||||
'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection',
|
'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection',
|
||||||
registry=self._registry,
|
registry=self._registry,
|
||||||
)
|
)
|
||||||
@@ -144,13 +144,13 @@ class BroadcastWebsocketStats:
|
|||||||
|
|
||||||
def record_message_received(self):
|
def record_message_received(self):
|
||||||
self._internal_messages_received_per_minute.record()
|
self._internal_messages_received_per_minute.record()
|
||||||
self._messages_received.inc()
|
self._messages_received_current_conn.inc()
|
||||||
self._messages_received_total.inc()
|
self._messages_received_total.inc()
|
||||||
|
|
||||||
def record_connection_established(self):
|
def record_connection_established(self):
|
||||||
self._connection.state('connected')
|
self._connection.state('connected')
|
||||||
self._connection_start.set_to_current_time()
|
self._connection_start.set_to_current_time()
|
||||||
self._messages_received.set(0)
|
self._messages_received_current_conn.set(0)
|
||||||
|
|
||||||
def record_connection_lost(self):
|
def record_connection_lost(self):
|
||||||
self._connection.state('disconnected')
|
self._connection.state('disconnected')
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ from awx.conf.license import get_license
|
|||||||
from awx.main.utils import get_awx_version, camelcase_to_underscore, datetime_hook
|
from awx.main.utils import get_awx_version, camelcase_to_underscore, datetime_hook
|
||||||
from awx.main import models
|
from awx.main import models
|
||||||
from awx.main.analytics import register
|
from awx.main.analytics import register
|
||||||
|
from awx.main.scheduler.task_manager_models import TaskManagerModels
|
||||||
|
|
||||||
"""
|
"""
|
||||||
This module is used to define metrics collected by awx.main.analytics.gather()
|
This module is used to define metrics collected by awx.main.analytics.gather()
|
||||||
@@ -235,25 +236,25 @@ def projects_by_scm_type(since, **kwargs):
|
|||||||
@register('instance_info', '1.2', description=_('Cluster topology and capacity'))
|
@register('instance_info', '1.2', description=_('Cluster topology and capacity'))
|
||||||
def instance_info(since, include_hostnames=False, **kwargs):
|
def instance_info(since, include_hostnames=False, **kwargs):
|
||||||
info = {}
|
info = {}
|
||||||
instances = models.Instance.objects.values_list('hostname').values(
|
# Use same method that the TaskManager does to compute consumed capacity without querying all running jobs for each Instance
|
||||||
'uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'hostname', 'enabled'
|
tm_models = TaskManagerModels.init_with_consumed_capacity(instance_fields=['uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'enabled'])
|
||||||
)
|
for tm_instance in tm_models.instances.instances_by_hostname.values():
|
||||||
for instance in instances:
|
instance = tm_instance.obj
|
||||||
consumed_capacity = sum(x.task_impact for x in models.UnifiedJob.objects.filter(execution_node=instance['hostname'], status__in=('running', 'waiting')))
|
|
||||||
instance_info = {
|
instance_info = {
|
||||||
'uuid': instance['uuid'],
|
'uuid': instance.uuid,
|
||||||
'version': instance['version'],
|
'version': instance.version,
|
||||||
'capacity': instance['capacity'],
|
'capacity': instance.capacity,
|
||||||
'cpu': instance['cpu'],
|
'cpu': instance.cpu,
|
||||||
'memory': instance['memory'],
|
'memory': instance.memory,
|
||||||
'managed_by_policy': instance['managed_by_policy'],
|
'managed_by_policy': instance.managed_by_policy,
|
||||||
'enabled': instance['enabled'],
|
'enabled': instance.enabled,
|
||||||
'consumed_capacity': consumed_capacity,
|
'consumed_capacity': tm_instance.consumed_capacity,
|
||||||
'remaining_capacity': instance['capacity'] - consumed_capacity,
|
'remaining_capacity': instance.capacity - tm_instance.consumed_capacity,
|
||||||
|
'node_type': instance.node_type,
|
||||||
}
|
}
|
||||||
if include_hostnames is True:
|
if include_hostnames is True:
|
||||||
instance_info['hostname'] = instance['hostname']
|
instance_info['hostname'] = instance.hostname
|
||||||
info[instance['uuid']] = instance_info
|
info[instance.uuid] = instance_info
|
||||||
return info
|
return info
|
||||||
|
|
||||||
|
|
||||||
@@ -396,7 +397,7 @@ def events_table_partitioned_modified(since, full_path, until, **kwargs):
|
|||||||
return _events_table(since, full_path, until, 'main_jobevent', 'modified', project_job_created=True, **kwargs)
|
return _events_table(since, full_path, until, 'main_jobevent', 'modified', project_job_created=True, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@register('unified_jobs_table', '1.3', format='csv', description=_('Data on jobs run'), expensive=four_hour_slicing)
|
@register('unified_jobs_table', '1.4', format='csv', description=_('Data on jobs run'), expensive=four_hour_slicing)
|
||||||
def unified_jobs_table(since, full_path, until, **kwargs):
|
def unified_jobs_table(since, full_path, until, **kwargs):
|
||||||
unified_job_query = '''COPY (SELECT main_unifiedjob.id,
|
unified_job_query = '''COPY (SELECT main_unifiedjob.id,
|
||||||
main_unifiedjob.polymorphic_ctype_id,
|
main_unifiedjob.polymorphic_ctype_id,
|
||||||
@@ -422,7 +423,8 @@ def unified_jobs_table(since, full_path, until, **kwargs):
|
|||||||
main_unifiedjob.job_explanation,
|
main_unifiedjob.job_explanation,
|
||||||
main_unifiedjob.instance_group_id,
|
main_unifiedjob.instance_group_id,
|
||||||
main_unifiedjob.installed_collections,
|
main_unifiedjob.installed_collections,
|
||||||
main_unifiedjob.ansible_version
|
main_unifiedjob.ansible_version,
|
||||||
|
main_job.forks
|
||||||
FROM main_unifiedjob
|
FROM main_unifiedjob
|
||||||
JOIN django_content_type ON main_unifiedjob.polymorphic_ctype_id = django_content_type.id
|
JOIN django_content_type ON main_unifiedjob.polymorphic_ctype_id = django_content_type.id
|
||||||
LEFT JOIN main_job ON main_unifiedjob.id = main_job.unifiedjob_ptr_id
|
LEFT JOIN main_job ON main_unifiedjob.id = main_job.unifiedjob_ptr_id
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ from prometheus_client import CollectorRegistry, Gauge, Info, generate_latest
|
|||||||
|
|
||||||
from awx.conf.license import get_license
|
from awx.conf.license import get_license
|
||||||
from awx.main.utils import get_awx_version
|
from awx.main.utils import get_awx_version
|
||||||
|
from awx.main.models import UnifiedJob
|
||||||
from awx.main.analytics.collectors import (
|
from awx.main.analytics.collectors import (
|
||||||
counts,
|
counts,
|
||||||
instance_info,
|
instance_info,
|
||||||
@@ -56,6 +57,7 @@ def metrics():
|
|||||||
[
|
[
|
||||||
'hostname',
|
'hostname',
|
||||||
'instance_uuid',
|
'instance_uuid',
|
||||||
|
'node_type',
|
||||||
],
|
],
|
||||||
registry=REGISTRY,
|
registry=REGISTRY,
|
||||||
)
|
)
|
||||||
@@ -83,6 +85,7 @@ def metrics():
|
|||||||
[
|
[
|
||||||
'hostname',
|
'hostname',
|
||||||
'instance_uuid',
|
'instance_uuid',
|
||||||
|
'node_type',
|
||||||
],
|
],
|
||||||
registry=REGISTRY,
|
registry=REGISTRY,
|
||||||
)
|
)
|
||||||
@@ -110,6 +113,7 @@ def metrics():
|
|||||||
[
|
[
|
||||||
'hostname',
|
'hostname',
|
||||||
'instance_uuid',
|
'instance_uuid',
|
||||||
|
'node_type',
|
||||||
],
|
],
|
||||||
registry=REGISTRY,
|
registry=REGISTRY,
|
||||||
)
|
)
|
||||||
@@ -119,6 +123,7 @@ def metrics():
|
|||||||
[
|
[
|
||||||
'hostname',
|
'hostname',
|
||||||
'instance_uuid',
|
'instance_uuid',
|
||||||
|
'node_type',
|
||||||
],
|
],
|
||||||
registry=REGISTRY,
|
registry=REGISTRY,
|
||||||
)
|
)
|
||||||
@@ -169,8 +174,9 @@ def metrics():
|
|||||||
|
|
||||||
all_job_data = job_counts(None)
|
all_job_data = job_counts(None)
|
||||||
statuses = all_job_data.get('status', {})
|
statuses = all_job_data.get('status', {})
|
||||||
for status, value in statuses.items():
|
states = set(dict(UnifiedJob.STATUS_CHOICES).keys()) - set(['new'])
|
||||||
STATUS.labels(status=status).set(value)
|
for state in states:
|
||||||
|
STATUS.labels(status=state).set(statuses.get(state, 0))
|
||||||
|
|
||||||
RUNNING_JOBS.set(current_counts['running_jobs'])
|
RUNNING_JOBS.set(current_counts['running_jobs'])
|
||||||
PENDING_JOBS.set(current_counts['pending_jobs'])
|
PENDING_JOBS.set(current_counts['pending_jobs'])
|
||||||
@@ -178,12 +184,13 @@ def metrics():
|
|||||||
instance_data = instance_info(None, include_hostnames=True)
|
instance_data = instance_info(None, include_hostnames=True)
|
||||||
for uuid, info in instance_data.items():
|
for uuid, info in instance_data.items():
|
||||||
hostname = info['hostname']
|
hostname = info['hostname']
|
||||||
INSTANCE_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['capacity'])
|
node_type = info['node_type']
|
||||||
|
INSTANCE_CAPACITY.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).set(instance_data[uuid]['capacity'])
|
||||||
INSTANCE_CPU.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['cpu'])
|
INSTANCE_CPU.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['cpu'])
|
||||||
INSTANCE_MEMORY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['memory'])
|
INSTANCE_MEMORY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['memory'])
|
||||||
INSTANCE_CONSUMED_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['consumed_capacity'])
|
INSTANCE_CONSUMED_CAPACITY.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).set(instance_data[uuid]['consumed_capacity'])
|
||||||
INSTANCE_REMAINING_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['remaining_capacity'])
|
INSTANCE_REMAINING_CAPACITY.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).set(instance_data[uuid]['remaining_capacity'])
|
||||||
INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid).info(
|
INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).info(
|
||||||
{
|
{
|
||||||
'enabled': str(instance_data[uuid]['enabled']),
|
'enabled': str(instance_data[uuid]['enabled']),
|
||||||
'managed_by_policy': str(instance_data[uuid]['managed_by_policy']),
|
'managed_by_policy': str(instance_data[uuid]['managed_by_policy']),
|
||||||
|
|||||||
@@ -5,7 +5,9 @@ import logging
|
|||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
|
|
||||||
from awx.main.consumers import emit_channel_notification
|
from awx.main.consumers import emit_channel_notification
|
||||||
|
from awx.main.utils import is_testing
|
||||||
|
|
||||||
root_key = 'awx_metrics'
|
root_key = 'awx_metrics'
|
||||||
logger = logging.getLogger('awx.main.analytics')
|
logger = logging.getLogger('awx.main.analytics')
|
||||||
@@ -163,10 +165,10 @@ class Metrics:
|
|||||||
Instance = apps.get_model('main', 'Instance')
|
Instance = apps.get_model('main', 'Instance')
|
||||||
if instance_name:
|
if instance_name:
|
||||||
self.instance_name = instance_name
|
self.instance_name = instance_name
|
||||||
elif settings.IS_TESTING():
|
elif is_testing():
|
||||||
self.instance_name = "awx_testing"
|
self.instance_name = "awx_testing"
|
||||||
else:
|
else:
|
||||||
self.instance_name = Instance.objects.me().hostname
|
self.instance_name = Instance.objects.my_hostname()
|
||||||
|
|
||||||
# metric name, help_text
|
# metric name, help_text
|
||||||
METRICSLIST = [
|
METRICSLIST = [
|
||||||
@@ -184,19 +186,29 @@ class Metrics:
|
|||||||
FloatM('subsystem_metrics_pipe_execute_seconds', 'Time spent saving metrics to redis'),
|
FloatM('subsystem_metrics_pipe_execute_seconds', 'Time spent saving metrics to redis'),
|
||||||
IntM('subsystem_metrics_pipe_execute_calls', 'Number of calls to pipe_execute'),
|
IntM('subsystem_metrics_pipe_execute_calls', 'Number of calls to pipe_execute'),
|
||||||
FloatM('subsystem_metrics_send_metrics_seconds', 'Time spent sending metrics to other nodes'),
|
FloatM('subsystem_metrics_send_metrics_seconds', 'Time spent sending metrics to other nodes'),
|
||||||
SetFloatM('task_manager_get_tasks_seconds', 'Time spent in loading all tasks from db'),
|
SetFloatM('task_manager_get_tasks_seconds', 'Time spent in loading tasks from db'),
|
||||||
SetFloatM('task_manager_start_task_seconds', 'Time spent starting task'),
|
SetFloatM('task_manager_start_task_seconds', 'Time spent starting task'),
|
||||||
SetFloatM('task_manager_process_running_tasks_seconds', 'Time spent processing running tasks'),
|
SetFloatM('task_manager_process_running_tasks_seconds', 'Time spent processing running tasks'),
|
||||||
SetFloatM('task_manager_process_pending_tasks_seconds', 'Time spent processing pending tasks'),
|
SetFloatM('task_manager_process_pending_tasks_seconds', 'Time spent processing pending tasks'),
|
||||||
SetFloatM('task_manager_generate_dependencies_seconds', 'Time spent generating dependencies for pending tasks'),
|
|
||||||
SetFloatM('task_manager_spawn_workflow_graph_jobs_seconds', 'Time spent spawning workflow jobs'),
|
|
||||||
SetFloatM('task_manager__schedule_seconds', 'Time spent in running the entire _schedule'),
|
SetFloatM('task_manager__schedule_seconds', 'Time spent in running the entire _schedule'),
|
||||||
IntM('task_manager_schedule_calls', 'Number of calls to task manager schedule'),
|
IntM('task_manager__schedule_calls', 'Number of calls to _schedule, after lock is acquired'),
|
||||||
SetFloatM('task_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
SetFloatM('task_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
||||||
SetIntM('task_manager_tasks_started', 'Number of tasks started'),
|
SetIntM('task_manager_tasks_started', 'Number of tasks started'),
|
||||||
SetIntM('task_manager_running_processed', 'Number of running tasks processed'),
|
SetIntM('task_manager_running_processed', 'Number of running tasks processed'),
|
||||||
SetIntM('task_manager_pending_processed', 'Number of pending tasks processed'),
|
SetIntM('task_manager_pending_processed', 'Number of pending tasks processed'),
|
||||||
SetIntM('task_manager_tasks_blocked', 'Number of tasks blocked from running'),
|
SetIntM('task_manager_tasks_blocked', 'Number of tasks blocked from running'),
|
||||||
|
SetFloatM('task_manager_commit_seconds', 'Time spent in db transaction, including on_commit calls'),
|
||||||
|
SetFloatM('dependency_manager_get_tasks_seconds', 'Time spent loading pending tasks from db'),
|
||||||
|
SetFloatM('dependency_manager_generate_dependencies_seconds', 'Time spent generating dependencies for pending tasks'),
|
||||||
|
SetFloatM('dependency_manager__schedule_seconds', 'Time spent in running the entire _schedule'),
|
||||||
|
IntM('dependency_manager__schedule_calls', 'Number of calls to _schedule, after lock is acquired'),
|
||||||
|
SetFloatM('dependency_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
||||||
|
SetIntM('dependency_manager_pending_processed', 'Number of pending tasks processed'),
|
||||||
|
SetFloatM('workflow_manager__schedule_seconds', 'Time spent in running the entire _schedule'),
|
||||||
|
IntM('workflow_manager__schedule_calls', 'Number of calls to _schedule, after lock is acquired'),
|
||||||
|
SetFloatM('workflow_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
||||||
|
SetFloatM('workflow_manager_spawn_workflow_graph_jobs_seconds', 'Time spent spawning workflow tasks'),
|
||||||
|
SetFloatM('workflow_manager_get_tasks_seconds', 'Time spent loading workflow tasks from db'),
|
||||||
]
|
]
|
||||||
# turn metric list into dictionary with the metric name as a key
|
# turn metric list into dictionary with the metric name as a key
|
||||||
self.METRICS = {}
|
self.METRICS = {}
|
||||||
@@ -303,7 +315,12 @@ class Metrics:
|
|||||||
self.previous_send_metrics.set(current_time)
|
self.previous_send_metrics.set(current_time)
|
||||||
self.previous_send_metrics.store_value(self.conn)
|
self.previous_send_metrics.store_value(self.conn)
|
||||||
finally:
|
finally:
|
||||||
lock.release()
|
try:
|
||||||
|
lock.release()
|
||||||
|
except Exception as exc:
|
||||||
|
# After system failures, we might throw redis.exceptions.LockNotOwnedError
|
||||||
|
# this is to avoid print a Traceback, and importantly, avoid raising an exception into parent context
|
||||||
|
logger.warning(f'Error releasing subsystem metrics redis lock, error: {str(exc)}')
|
||||||
|
|
||||||
def load_other_metrics(self, request):
|
def load_other_metrics(self, request):
|
||||||
# data received from other nodes are stored in their own keys
|
# data received from other nodes are stored in their own keys
|
||||||
|
|||||||
@@ -3,6 +3,5 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
|
|
||||||
|
|
||||||
class MainConfig(AppConfig):
|
class MainConfig(AppConfig):
|
||||||
|
|
||||||
name = 'awx.main'
|
name = 'awx.main'
|
||||||
verbose_name = _('Main')
|
verbose_name = _('Main')
|
||||||
|
|||||||
@@ -446,7 +446,7 @@ register(
|
|||||||
label=_('Default Job Idle Timeout'),
|
label=_('Default Job Idle Timeout'),
|
||||||
help_text=_(
|
help_text=_(
|
||||||
'If no output is detected from ansible in this number of seconds the execution will be terminated. '
|
'If no output is detected from ansible in this number of seconds the execution will be terminated. '
|
||||||
'Use value of 0 to used default idle_timeout is 600s.'
|
'Use value of 0 to indicate that no idle timeout should be imposed.'
|
||||||
),
|
),
|
||||||
category=_('Jobs'),
|
category=_('Jobs'),
|
||||||
category_slug='jobs',
|
category_slug='jobs',
|
||||||
@@ -569,7 +569,7 @@ register(
|
|||||||
register(
|
register(
|
||||||
'LOG_AGGREGATOR_LOGGERS',
|
'LOG_AGGREGATOR_LOGGERS',
|
||||||
field_class=fields.StringListField,
|
field_class=fields.StringListField,
|
||||||
default=['awx', 'activity_stream', 'job_events', 'system_tracking'],
|
default=['awx', 'activity_stream', 'job_events', 'system_tracking', 'broadcast_websocket'],
|
||||||
label=_('Loggers Sending Data to Log Aggregator Form'),
|
label=_('Loggers Sending Data to Log Aggregator Form'),
|
||||||
help_text=_(
|
help_text=_(
|
||||||
'List of loggers that will send HTTP logs to the collector, these can '
|
'List of loggers that will send HTTP logs to the collector, these can '
|
||||||
@@ -577,7 +577,8 @@ register(
|
|||||||
'awx - service logs\n'
|
'awx - service logs\n'
|
||||||
'activity_stream - activity stream records\n'
|
'activity_stream - activity stream records\n'
|
||||||
'job_events - callback data from Ansible job events\n'
|
'job_events - callback data from Ansible job events\n'
|
||||||
'system_tracking - facts gathered from scan jobs.'
|
'system_tracking - facts gathered from scan jobs\n'
|
||||||
|
'broadcast_websocket - errors pertaining to websockets broadcast metrics\n'
|
||||||
),
|
),
|
||||||
category=_('Logging'),
|
category=_('Logging'),
|
||||||
category_slug='logging',
|
category_slug='logging',
|
||||||
|
|||||||
@@ -9,10 +9,16 @@ aim_inputs = {
|
|||||||
'fields': [
|
'fields': [
|
||||||
{
|
{
|
||||||
'id': 'url',
|
'id': 'url',
|
||||||
'label': _('CyberArk AIM URL'),
|
'label': _('CyberArk CCP URL'),
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'format': 'url',
|
'format': 'url',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
'id': 'webservice_id',
|
||||||
|
'label': _('Web Service ID'),
|
||||||
|
'type': 'string',
|
||||||
|
'help_text': _('The CCP Web Service ID. Leave blank to default to AIMWebService.'),
|
||||||
|
},
|
||||||
{
|
{
|
||||||
'id': 'app_id',
|
'id': 'app_id',
|
||||||
'label': _('Application ID'),
|
'label': _('Application ID'),
|
||||||
@@ -64,10 +70,13 @@ def aim_backend(**kwargs):
|
|||||||
client_cert = kwargs.get('client_cert', None)
|
client_cert = kwargs.get('client_cert', None)
|
||||||
client_key = kwargs.get('client_key', None)
|
client_key = kwargs.get('client_key', None)
|
||||||
verify = kwargs['verify']
|
verify = kwargs['verify']
|
||||||
|
webservice_id = kwargs['webservice_id']
|
||||||
app_id = kwargs['app_id']
|
app_id = kwargs['app_id']
|
||||||
object_query = kwargs['object_query']
|
object_query = kwargs['object_query']
|
||||||
object_query_format = kwargs['object_query_format']
|
object_query_format = kwargs['object_query_format']
|
||||||
reason = kwargs.get('reason', None)
|
reason = kwargs.get('reason', None)
|
||||||
|
if webservice_id == '':
|
||||||
|
webservice_id = 'AIMWebService'
|
||||||
|
|
||||||
query_params = {
|
query_params = {
|
||||||
'AppId': app_id,
|
'AppId': app_id,
|
||||||
@@ -78,7 +87,7 @@ def aim_backend(**kwargs):
|
|||||||
query_params['reason'] = reason
|
query_params['reason'] = reason
|
||||||
|
|
||||||
request_qs = '?' + urlencode(query_params, quote_via=quote)
|
request_qs = '?' + urlencode(query_params, quote_via=quote)
|
||||||
request_url = urljoin(url, '/'.join(['AIMWebService', 'api', 'Accounts']))
|
request_url = urljoin(url, '/'.join([webservice_id, 'api', 'Accounts']))
|
||||||
|
|
||||||
with CertFiles(client_cert, client_key) as cert:
|
with CertFiles(client_cert, client_key) as cert:
|
||||||
res = requests.get(
|
res = requests.get(
|
||||||
@@ -92,4 +101,4 @@ def aim_backend(**kwargs):
|
|||||||
return res.json()['Content']
|
return res.json()['Content']
|
||||||
|
|
||||||
|
|
||||||
aim_plugin = CredentialPlugin('CyberArk AIM Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
|
aim_plugin = CredentialPlugin('CyberArk Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
||||||
|
|
||||||
import base64
|
|
||||||
from urllib.parse import urljoin, quote
|
from urllib.parse import urljoin, quote
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
@@ -61,7 +60,7 @@ def conjur_backend(**kwargs):
|
|||||||
cacert = kwargs.get('cacert', None)
|
cacert = kwargs.get('cacert', None)
|
||||||
|
|
||||||
auth_kwargs = {
|
auth_kwargs = {
|
||||||
'headers': {'Content-Type': 'text/plain'},
|
'headers': {'Content-Type': 'text/plain', 'Accept-Encoding': 'base64'},
|
||||||
'data': api_key,
|
'data': api_key,
|
||||||
'allow_redirects': False,
|
'allow_redirects': False,
|
||||||
}
|
}
|
||||||
@@ -69,9 +68,13 @@ def conjur_backend(**kwargs):
|
|||||||
with CertFiles(cacert) as cert:
|
with CertFiles(cacert) as cert:
|
||||||
# https://www.conjur.org/api.html#authentication-authenticate-post
|
# https://www.conjur.org/api.html#authentication-authenticate-post
|
||||||
auth_kwargs['verify'] = cert
|
auth_kwargs['verify'] = cert
|
||||||
resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
|
try:
|
||||||
|
resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||||
|
resp.raise_for_status()
|
||||||
|
except requests.exceptions.HTTPError:
|
||||||
|
resp = requests.post(urljoin(url, '/'.join(['api', 'authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||||
raise_for_status(resp)
|
raise_for_status(resp)
|
||||||
token = base64.b64encode(resp.content).decode('utf-8')
|
token = resp.content.decode('utf-8')
|
||||||
|
|
||||||
lookup_kwargs = {
|
lookup_kwargs = {
|
||||||
'headers': {'Authorization': 'Token token="{}"'.format(token)},
|
'headers': {'Authorization': 'Token token="{}"'.format(token)},
|
||||||
@@ -80,14 +83,21 @@ def conjur_backend(**kwargs):
|
|||||||
|
|
||||||
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
|
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
|
||||||
path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path]))
|
path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path]))
|
||||||
|
path_conjurcloud = urljoin(url, '/'.join(['api', 'secrets', account, 'variable', secret_path]))
|
||||||
if version:
|
if version:
|
||||||
path = '?'.join([path, version])
|
ver = "version={}".format(version)
|
||||||
|
path = '?'.join([path, ver])
|
||||||
|
path_conjurcloud = '?'.join([path_conjurcloud, ver])
|
||||||
|
|
||||||
with CertFiles(cacert) as cert:
|
with CertFiles(cacert) as cert:
|
||||||
lookup_kwargs['verify'] = cert
|
lookup_kwargs['verify'] = cert
|
||||||
resp = requests.get(path, timeout=30, **lookup_kwargs)
|
try:
|
||||||
|
resp = requests.get(path, timeout=30, **lookup_kwargs)
|
||||||
|
resp.raise_for_status()
|
||||||
|
except requests.exceptions.HTTPError:
|
||||||
|
resp = requests.get(path_conjurcloud, timeout=30, **lookup_kwargs)
|
||||||
raise_for_status(resp)
|
raise_for_status(resp)
|
||||||
return resp.text
|
return resp.text
|
||||||
|
|
||||||
|
|
||||||
conjur_plugin = CredentialPlugin('CyberArk Conjur Secret Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
conjur_plugin = CredentialPlugin('CyberArk Conjur Secrets Manager Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import copy
|
import copy
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
|
import time
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
||||||
@@ -247,7 +248,15 @@ def kv_backend(**kwargs):
|
|||||||
request_url = urljoin(url, '/'.join(['v1'] + path_segments)).rstrip('/')
|
request_url = urljoin(url, '/'.join(['v1'] + path_segments)).rstrip('/')
|
||||||
with CertFiles(cacert) as cert:
|
with CertFiles(cacert) as cert:
|
||||||
request_kwargs['verify'] = cert
|
request_kwargs['verify'] = cert
|
||||||
response = sess.get(request_url, **request_kwargs)
|
request_retries = 0
|
||||||
|
while request_retries < 5:
|
||||||
|
response = sess.get(request_url, **request_kwargs)
|
||||||
|
# https://developer.hashicorp.com/vault/docs/enterprise/consistency
|
||||||
|
if response.status_code == 412:
|
||||||
|
request_retries += 1
|
||||||
|
time.sleep(1)
|
||||||
|
else:
|
||||||
|
break
|
||||||
raise_for_status(response)
|
raise_for_status(response)
|
||||||
|
|
||||||
json = response.json()
|
json = response.json()
|
||||||
@@ -289,8 +298,15 @@ def ssh_backend(**kwargs):
|
|||||||
|
|
||||||
with CertFiles(cacert) as cert:
|
with CertFiles(cacert) as cert:
|
||||||
request_kwargs['verify'] = cert
|
request_kwargs['verify'] = cert
|
||||||
resp = sess.post(request_url, **request_kwargs)
|
request_retries = 0
|
||||||
|
while request_retries < 5:
|
||||||
|
resp = sess.post(request_url, **request_kwargs)
|
||||||
|
# https://developer.hashicorp.com/vault/docs/enterprise/consistency
|
||||||
|
if resp.status_code == 412:
|
||||||
|
request_retries += 1
|
||||||
|
time.sleep(1)
|
||||||
|
else:
|
||||||
|
break
|
||||||
raise_for_status(resp)
|
raise_for_status(resp)
|
||||||
return resp.json()['data']['signed_key']
|
return resp.json()['data']['signed_key']
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import select
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.db import connection as pg_connection
|
||||||
|
|
||||||
|
|
||||||
NOT_READY = ([], [], [])
|
NOT_READY = ([], [], [])
|
||||||
@@ -15,7 +16,6 @@ def get_local_queuename():
|
|||||||
|
|
||||||
class PubSub(object):
|
class PubSub(object):
|
||||||
def __init__(self, conn):
|
def __init__(self, conn):
|
||||||
assert conn.autocommit, "Connection must be in autocommit mode."
|
|
||||||
self.conn = conn
|
self.conn = conn
|
||||||
|
|
||||||
def listen(self, channel):
|
def listen(self, channel):
|
||||||
@@ -31,6 +31,9 @@ class PubSub(object):
|
|||||||
cur.execute('SELECT pg_notify(%s, %s);', (channel, payload))
|
cur.execute('SELECT pg_notify(%s, %s);', (channel, payload))
|
||||||
|
|
||||||
def events(self, select_timeout=5, yield_timeouts=False):
|
def events(self, select_timeout=5, yield_timeouts=False):
|
||||||
|
if not self.conn.autocommit:
|
||||||
|
raise RuntimeError('Listening for events can only be done in autocommit mode')
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
if select.select([self.conn], [], [], select_timeout) == NOT_READY:
|
if select.select([self.conn], [], [], select_timeout) == NOT_READY:
|
||||||
if yield_timeouts:
|
if yield_timeouts:
|
||||||
@@ -45,11 +48,32 @@ class PubSub(object):
|
|||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def pg_bus_conn():
|
def pg_bus_conn(new_connection=False):
|
||||||
conf = settings.DATABASES['default']
|
'''
|
||||||
conn = psycopg2.connect(dbname=conf['NAME'], host=conf['HOST'], user=conf['USER'], password=conf['PASSWORD'], port=conf['PORT'], **conf.get("OPTIONS", {}))
|
Any listeners probably want to establish a new database connection,
|
||||||
# Django connection.cursor().connection doesn't have autocommit=True on
|
separate from the Django connection used for queries, because that will prevent
|
||||||
conn.set_session(autocommit=True)
|
losing connection to the channel whenever a .close() happens.
|
||||||
|
|
||||||
|
Any publishers probably want to use the existing connection
|
||||||
|
so that messages follow postgres transaction rules
|
||||||
|
https://www.postgresql.org/docs/current/sql-notify.html
|
||||||
|
'''
|
||||||
|
|
||||||
|
if new_connection:
|
||||||
|
conf = settings.DATABASES['default']
|
||||||
|
conn = psycopg2.connect(
|
||||||
|
dbname=conf['NAME'], host=conf['HOST'], user=conf['USER'], password=conf['PASSWORD'], port=conf['PORT'], **conf.get("OPTIONS", {})
|
||||||
|
)
|
||||||
|
# Django connection.cursor().connection doesn't have autocommit=True on by default
|
||||||
|
conn.set_session(autocommit=True)
|
||||||
|
else:
|
||||||
|
if pg_connection.connection is None:
|
||||||
|
pg_connection.connect()
|
||||||
|
if pg_connection.connection is None:
|
||||||
|
raise RuntimeError('Unexpectedly could not connect to postgres for pg_notify actions')
|
||||||
|
conn = pg_connection.connection
|
||||||
|
|
||||||
pubsub = PubSub(conn)
|
pubsub = PubSub(conn)
|
||||||
yield pubsub
|
yield pubsub
|
||||||
conn.close()
|
if new_connection:
|
||||||
|
conn.close()
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import uuid
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.db import connection
|
||||||
import redis
|
import redis
|
||||||
|
|
||||||
from awx.main.dispatch import get_local_queuename
|
from awx.main.dispatch import get_local_queuename
|
||||||
@@ -13,7 +14,6 @@ logger = logging.getLogger('awx.main.dispatch')
|
|||||||
|
|
||||||
|
|
||||||
class Control(object):
|
class Control(object):
|
||||||
|
|
||||||
services = ('dispatcher', 'callback_receiver')
|
services = ('dispatcher', 'callback_receiver')
|
||||||
result = None
|
result = None
|
||||||
|
|
||||||
@@ -37,18 +37,27 @@ class Control(object):
|
|||||||
def running(self, *args, **kwargs):
|
def running(self, *args, **kwargs):
|
||||||
return self.control_with_reply('running', *args, **kwargs)
|
return self.control_with_reply('running', *args, **kwargs)
|
||||||
|
|
||||||
|
def cancel(self, task_ids, *args, **kwargs):
|
||||||
|
return self.control_with_reply('cancel', *args, extra_data={'task_ids': task_ids}, **kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def generate_reply_queue_name(cls):
|
def generate_reply_queue_name(cls):
|
||||||
return f"reply_to_{str(uuid.uuid4()).replace('-','_')}"
|
return f"reply_to_{str(uuid.uuid4()).replace('-','_')}"
|
||||||
|
|
||||||
def control_with_reply(self, command, timeout=5):
|
def control_with_reply(self, command, timeout=5, extra_data=None):
|
||||||
logger.warning('checking {} {} for {}'.format(self.service, command, self.queuename))
|
logger.warning('checking {} {} for {}'.format(self.service, command, self.queuename))
|
||||||
reply_queue = Control.generate_reply_queue_name()
|
reply_queue = Control.generate_reply_queue_name()
|
||||||
self.result = None
|
self.result = None
|
||||||
|
|
||||||
|
if not connection.get_autocommit():
|
||||||
|
raise RuntimeError('Control-with-reply messages can only be done in autocommit mode')
|
||||||
|
|
||||||
with pg_bus_conn() as conn:
|
with pg_bus_conn() as conn:
|
||||||
conn.listen(reply_queue)
|
conn.listen(reply_queue)
|
||||||
conn.notify(self.queuename, json.dumps({'control': command, 'reply_to': reply_queue}))
|
send_data = {'control': command, 'reply_to': reply_queue}
|
||||||
|
if extra_data:
|
||||||
|
send_data.update(extra_data)
|
||||||
|
conn.notify(self.queuename, json.dumps(send_data))
|
||||||
|
|
||||||
for reply in conn.events(select_timeout=timeout, yield_timeouts=True):
|
for reply in conn.events(select_timeout=timeout, yield_timeouts=True):
|
||||||
if reply is None:
|
if reply is None:
|
||||||
|
|||||||
@@ -16,13 +16,14 @@ from queue import Full as QueueFull, Empty as QueueEmpty
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import connection as django_connection, connections
|
from django.db import connection as django_connection, connections
|
||||||
from django.core.cache import cache as django_cache
|
from django.core.cache import cache as django_cache
|
||||||
|
from django.utils.timezone import now as tz_now
|
||||||
from django_guid import set_guid
|
from django_guid import set_guid
|
||||||
from jinja2 import Template
|
from jinja2 import Template
|
||||||
import psutil
|
import psutil
|
||||||
|
|
||||||
from awx.main.models import UnifiedJob
|
from awx.main.models import UnifiedJob
|
||||||
from awx.main.dispatch import reaper
|
from awx.main.dispatch import reaper
|
||||||
from awx.main.utils.common import convert_mem_str_to_bytes, get_mem_effective_capacity
|
from awx.main.utils.common import convert_mem_str_to_bytes, get_mem_effective_capacity, log_excess_runtime
|
||||||
|
|
||||||
if 'run_callback_receiver' in sys.argv:
|
if 'run_callback_receiver' in sys.argv:
|
||||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||||
@@ -191,7 +192,6 @@ class PoolWorker(object):
|
|||||||
|
|
||||||
|
|
||||||
class StatefulPoolWorker(PoolWorker):
|
class StatefulPoolWorker(PoolWorker):
|
||||||
|
|
||||||
track_managed_tasks = True
|
track_managed_tasks = True
|
||||||
|
|
||||||
|
|
||||||
@@ -328,12 +328,16 @@ class AutoscalePool(WorkerPool):
|
|||||||
# Get same number as max forks based on memory, this function takes memory as bytes
|
# Get same number as max forks based on memory, this function takes memory as bytes
|
||||||
self.max_workers = get_mem_effective_capacity(total_memory_gb * 2**30)
|
self.max_workers = get_mem_effective_capacity(total_memory_gb * 2**30)
|
||||||
|
|
||||||
|
# add magic prime number of extra workers to ensure
|
||||||
|
# we have a few extra workers to run the heartbeat
|
||||||
|
self.max_workers += 7
|
||||||
|
|
||||||
# max workers can't be less than min_workers
|
# max workers can't be less than min_workers
|
||||||
self.max_workers = max(self.min_workers, self.max_workers)
|
self.max_workers = max(self.min_workers, self.max_workers)
|
||||||
|
|
||||||
def debug(self, *args, **kwargs):
|
# the task manager enforces settings.TASK_MANAGER_TIMEOUT on its own
|
||||||
self.cleanup()
|
# but if the task takes longer than the time defined here, we will force it to stop here
|
||||||
return super(AutoscalePool, self).debug(*args, **kwargs)
|
self.task_manager_timeout = settings.TASK_MANAGER_TIMEOUT + settings.TASK_MANAGER_TIMEOUT_GRACE_PERIOD
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def should_grow(self):
|
def should_grow(self):
|
||||||
@@ -351,6 +355,7 @@ class AutoscalePool(WorkerPool):
|
|||||||
def debug_meta(self):
|
def debug_meta(self):
|
||||||
return 'min={} max={}'.format(self.min_workers, self.max_workers)
|
return 'min={} max={}'.format(self.min_workers, self.max_workers)
|
||||||
|
|
||||||
|
@log_excess_runtime(logger)
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
"""
|
"""
|
||||||
Perform some internal account and cleanup. This is run on
|
Perform some internal account and cleanup. This is run on
|
||||||
@@ -359,8 +364,6 @@ class AutoscalePool(WorkerPool):
|
|||||||
1. Discover worker processes that exited, and recover messages they
|
1. Discover worker processes that exited, and recover messages they
|
||||||
were handling.
|
were handling.
|
||||||
2. Clean up unnecessary, idle workers.
|
2. Clean up unnecessary, idle workers.
|
||||||
3. Check to see if the database says this node is running any tasks
|
|
||||||
that aren't actually running. If so, reap them.
|
|
||||||
|
|
||||||
IMPORTANT: this function is one of the few places in the dispatcher
|
IMPORTANT: this function is one of the few places in the dispatcher
|
||||||
(aside from setting lookups) where we talk to the database. As such,
|
(aside from setting lookups) where we talk to the database. As such,
|
||||||
@@ -383,6 +386,8 @@ class AutoscalePool(WorkerPool):
|
|||||||
reaper.reap_job(j, 'failed')
|
reaper.reap_job(j, 'failed')
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception('failed to reap job UUID {}'.format(w.current_task['uuid']))
|
logger.exception('failed to reap job UUID {}'.format(w.current_task['uuid']))
|
||||||
|
else:
|
||||||
|
logger.warning(f'Worker was told to quit but has not, pid={w.pid}')
|
||||||
orphaned.extend(w.orphaned_tasks)
|
orphaned.extend(w.orphaned_tasks)
|
||||||
self.workers.remove(w)
|
self.workers.remove(w)
|
||||||
elif w.idle and len(self.workers) > self.min_workers:
|
elif w.idle and len(self.workers) > self.min_workers:
|
||||||
@@ -401,13 +406,15 @@ class AutoscalePool(WorkerPool):
|
|||||||
# the task manager to never do more work
|
# the task manager to never do more work
|
||||||
current_task = w.current_task
|
current_task = w.current_task
|
||||||
if current_task and isinstance(current_task, dict):
|
if current_task and isinstance(current_task, dict):
|
||||||
if current_task.get('task', '').endswith('tasks.run_task_manager'):
|
endings = ['tasks.task_manager', 'tasks.dependency_manager', 'tasks.workflow_manager']
|
||||||
|
current_task_name = current_task.get('task', '')
|
||||||
|
if any(current_task_name.endswith(e) for e in endings):
|
||||||
if 'started' not in current_task:
|
if 'started' not in current_task:
|
||||||
w.managed_tasks[current_task['uuid']]['started'] = time.time()
|
w.managed_tasks[current_task['uuid']]['started'] = time.time()
|
||||||
age = time.time() - current_task['started']
|
age = time.time() - current_task['started']
|
||||||
w.managed_tasks[current_task['uuid']]['age'] = age
|
w.managed_tasks[current_task['uuid']]['age'] = age
|
||||||
if age > (60 * 5):
|
if age > self.task_manager_timeout:
|
||||||
logger.error(f'run_task_manager has held the advisory lock for >5m, sending SIGTERM to {w.pid}') # noqa
|
logger.error(f'{current_task_name} has held the advisory lock for {age}, sending SIGTERM to {w.pid}')
|
||||||
os.kill(w.pid, signal.SIGTERM)
|
os.kill(w.pid, signal.SIGTERM)
|
||||||
|
|
||||||
for m in orphaned:
|
for m in orphaned:
|
||||||
@@ -417,13 +424,17 @@ class AutoscalePool(WorkerPool):
|
|||||||
idx = random.choice(range(len(self.workers)))
|
idx = random.choice(range(len(self.workers)))
|
||||||
self.write(idx, m)
|
self.write(idx, m)
|
||||||
|
|
||||||
# if the database says a job is running on this node, but it's *not*,
|
def add_bind_kwargs(self, body):
|
||||||
# then reap it
|
bind_kwargs = body.pop('bind_kwargs', [])
|
||||||
running_uuids = []
|
body.setdefault('kwargs', {})
|
||||||
for worker in self.workers:
|
if 'dispatch_time' in bind_kwargs:
|
||||||
worker.calculate_managed_tasks()
|
body['kwargs']['dispatch_time'] = tz_now().isoformat()
|
||||||
running_uuids.extend(list(worker.managed_tasks.keys()))
|
if 'worker_tasks' in bind_kwargs:
|
||||||
reaper.reap(excluded_uuids=running_uuids)
|
worker_tasks = {}
|
||||||
|
for worker in self.workers:
|
||||||
|
worker.calculate_managed_tasks()
|
||||||
|
worker_tasks[worker.pid] = list(worker.managed_tasks.keys())
|
||||||
|
body['kwargs']['worker_tasks'] = worker_tasks
|
||||||
|
|
||||||
def up(self):
|
def up(self):
|
||||||
if self.full:
|
if self.full:
|
||||||
@@ -438,9 +449,8 @@ class AutoscalePool(WorkerPool):
|
|||||||
if 'guid' in body:
|
if 'guid' in body:
|
||||||
set_guid(body['guid'])
|
set_guid(body['guid'])
|
||||||
try:
|
try:
|
||||||
# when the cluster heartbeat occurs, clean up internally
|
if isinstance(body, dict) and body.get('bind_kwargs'):
|
||||||
if isinstance(body, dict) and 'cluster_node_heartbeat' in body['task']:
|
self.add_bind_kwargs(body)
|
||||||
self.cleanup()
|
|
||||||
if self.should_grow:
|
if self.should_grow:
|
||||||
self.up()
|
self.up()
|
||||||
# we don't care about "preferred queue" round robin distribution, just
|
# we don't care about "preferred queue" round robin distribution, just
|
||||||
@@ -452,6 +462,10 @@ class AutoscalePool(WorkerPool):
|
|||||||
w.put(body)
|
w.put(body)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
task_name = 'unknown'
|
||||||
|
if isinstance(body, dict):
|
||||||
|
task_name = body.get('task')
|
||||||
|
logger.warning(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
|
||||||
return super(AutoscalePool, self).write(preferred_queue, body)
|
return super(AutoscalePool, self).write(preferred_queue, body)
|
||||||
except Exception:
|
except Exception:
|
||||||
for conn in connections.all():
|
for conn in connections.all():
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
import json
|
import json
|
||||||
|
import time
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django_guid import get_guid
|
from django_guid import get_guid
|
||||||
|
|
||||||
from . import pg_bus_conn
|
from . import pg_bus_conn
|
||||||
|
from awx.main.utils import is_testing
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.dispatch')
|
logger = logging.getLogger('awx.main.dispatch')
|
||||||
|
|
||||||
@@ -49,16 +49,23 @@ class task:
|
|||||||
@task(queue='tower_broadcast')
|
@task(queue='tower_broadcast')
|
||||||
def announce():
|
def announce():
|
||||||
print("Run this everywhere!")
|
print("Run this everywhere!")
|
||||||
|
|
||||||
|
# The special parameter bind_kwargs tells the main dispatcher process to add certain kwargs
|
||||||
|
|
||||||
|
@task(bind_kwargs=['dispatch_time'])
|
||||||
|
def print_time(dispatch_time=None):
|
||||||
|
print(f"Time I was dispatched: {dispatch_time}")
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, queue=None):
|
def __init__(self, queue=None, bind_kwargs=None):
|
||||||
self.queue = queue
|
self.queue = queue
|
||||||
|
self.bind_kwargs = bind_kwargs
|
||||||
|
|
||||||
def __call__(self, fn=None):
|
def __call__(self, fn=None):
|
||||||
queue = self.queue
|
queue = self.queue
|
||||||
|
bind_kwargs = self.bind_kwargs
|
||||||
|
|
||||||
class PublisherMixin(object):
|
class PublisherMixin(object):
|
||||||
|
|
||||||
queue = None
|
queue = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -75,14 +82,16 @@ class task:
|
|||||||
msg = f'{cls.name}: Queue value required and may not be None'
|
msg = f'{cls.name}: Queue value required and may not be None'
|
||||||
logger.error(msg)
|
logger.error(msg)
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
obj = {'uuid': task_id, 'args': args, 'kwargs': kwargs, 'task': cls.name}
|
obj = {'uuid': task_id, 'args': args, 'kwargs': kwargs, 'task': cls.name, 'time_pub': time.time()}
|
||||||
guid = get_guid()
|
guid = get_guid()
|
||||||
if guid:
|
if guid:
|
||||||
obj['guid'] = guid
|
obj['guid'] = guid
|
||||||
|
if bind_kwargs:
|
||||||
|
obj['bind_kwargs'] = bind_kwargs
|
||||||
obj.update(**kw)
|
obj.update(**kw)
|
||||||
if callable(queue):
|
if callable(queue):
|
||||||
queue = queue()
|
queue = queue()
|
||||||
if not settings.IS_TESTING(sys.argv):
|
if not is_testing():
|
||||||
with pg_bus_conn() as conn:
|
with pg_bus_conn() as conn:
|
||||||
conn.notify(queue, json.dumps(obj))
|
conn.notify(queue, json.dumps(obj))
|
||||||
return (obj, queue)
|
return (obj, queue)
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ from datetime import timedelta
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
|
from django.conf import settings
|
||||||
from django.utils.timezone import now as tz_now
|
from django.utils.timezone import now as tz_now
|
||||||
from django.contrib.contenttypes.models import ContentType
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
|
||||||
@@ -15,58 +16,73 @@ def startup_reaping():
|
|||||||
If this particular instance is starting, then we know that any running jobs are invalid
|
If this particular instance is starting, then we know that any running jobs are invalid
|
||||||
so we will reap those jobs as a special action here
|
so we will reap those jobs as a special action here
|
||||||
"""
|
"""
|
||||||
me = Instance.objects.me()
|
jobs = UnifiedJob.objects.filter(status='running', controller_node=Instance.objects.my_hostname())
|
||||||
jobs = UnifiedJob.objects.filter(status='running', controller_node=me.hostname)
|
|
||||||
job_ids = []
|
job_ids = []
|
||||||
for j in jobs:
|
for j in jobs:
|
||||||
job_ids.append(j.id)
|
job_ids.append(j.id)
|
||||||
j.status = 'failed'
|
reap_job(
|
||||||
j.start_args = ''
|
j,
|
||||||
j.job_explanation += 'Task was marked as running at system start up. The system must have not shut down properly, so it has been marked as failed.'
|
'failed',
|
||||||
j.save(update_fields=['status', 'start_args', 'job_explanation'])
|
job_explanation='Task was marked as running at system start up. The system must have not shut down properly, so it has been marked as failed.',
|
||||||
if hasattr(j, 'send_notification_templates'):
|
)
|
||||||
j.send_notification_templates('failed')
|
|
||||||
j.websocket_emit_status('failed')
|
|
||||||
if job_ids:
|
if job_ids:
|
||||||
logger.error(f'Unified jobs {job_ids} were reaped on dispatch startup')
|
logger.error(f'Unified jobs {job_ids} were reaped on dispatch startup')
|
||||||
|
|
||||||
|
|
||||||
def reap_job(j, status):
|
def reap_job(j, status, job_explanation=None):
|
||||||
if UnifiedJob.objects.get(id=j.id).status not in ('running', 'waiting'):
|
j.refresh_from_db(fields=['status', 'job_explanation'])
|
||||||
|
status_before = j.status
|
||||||
|
if status_before not in ('running', 'waiting'):
|
||||||
# just in case, don't reap jobs that aren't running
|
# just in case, don't reap jobs that aren't running
|
||||||
return
|
return
|
||||||
j.status = status
|
j.status = status
|
||||||
j.start_args = '' # blank field to remove encrypted passwords
|
j.start_args = '' # blank field to remove encrypted passwords
|
||||||
j.job_explanation += ' '.join(
|
if j.job_explanation:
|
||||||
(
|
j.job_explanation += ' ' # Separate messages for readability
|
||||||
'Task was marked as running but was not present in',
|
if job_explanation is None:
|
||||||
'the job queue, so it has been marked as failed.',
|
j.job_explanation += 'Task was marked as running but was not present in the job queue, so it has been marked as failed.'
|
||||||
)
|
else:
|
||||||
)
|
j.job_explanation += job_explanation
|
||||||
j.save(update_fields=['status', 'start_args', 'job_explanation'])
|
j.save(update_fields=['status', 'start_args', 'job_explanation'])
|
||||||
if hasattr(j, 'send_notification_templates'):
|
if hasattr(j, 'send_notification_templates'):
|
||||||
j.send_notification_templates('failed')
|
j.send_notification_templates('failed')
|
||||||
j.websocket_emit_status(status)
|
j.websocket_emit_status(status)
|
||||||
logger.error('{} is no longer running; reaping'.format(j.log_format))
|
logger.error(f'{j.log_format} is no longer {status_before}; reaping')
|
||||||
|
|
||||||
|
|
||||||
def reap(instance=None, status='failed', excluded_uuids=[]):
|
def reap_waiting(instance=None, status='failed', job_explanation=None, grace_period=None, excluded_uuids=None, ref_time=None):
|
||||||
"""
|
"""
|
||||||
Reap all jobs in waiting|running for this instance.
|
Reap all jobs in waiting for this instance.
|
||||||
"""
|
"""
|
||||||
me = instance
|
if grace_period is None:
|
||||||
if me is None:
|
grace_period = settings.JOB_WAITING_GRACE_PERIOD + settings.TASK_MANAGER_TIMEOUT
|
||||||
try:
|
|
||||||
me = Instance.objects.me()
|
if instance is None:
|
||||||
except RuntimeError as e:
|
hostname = Instance.objects.my_hostname()
|
||||||
logger.warning(f'Local instance is not registered, not running reaper: {e}')
|
else:
|
||||||
return
|
hostname = instance.hostname
|
||||||
now = tz_now()
|
if ref_time is None:
|
||||||
|
ref_time = tz_now()
|
||||||
|
jobs = UnifiedJob.objects.filter(status='waiting', modified__lte=ref_time - timedelta(seconds=grace_period), controller_node=hostname)
|
||||||
|
if excluded_uuids:
|
||||||
|
jobs = jobs.exclude(celery_task_id__in=excluded_uuids)
|
||||||
|
for j in jobs:
|
||||||
|
reap_job(j, status, job_explanation=job_explanation)
|
||||||
|
|
||||||
|
|
||||||
|
def reap(instance=None, status='failed', job_explanation=None, excluded_uuids=None):
|
||||||
|
"""
|
||||||
|
Reap all jobs in running for this instance.
|
||||||
|
"""
|
||||||
|
if instance is None:
|
||||||
|
hostname = Instance.objects.my_hostname()
|
||||||
|
else:
|
||||||
|
hostname = instance.hostname
|
||||||
workflow_ctype_id = ContentType.objects.get_for_model(WorkflowJob).id
|
workflow_ctype_id = ContentType.objects.get_for_model(WorkflowJob).id
|
||||||
jobs = UnifiedJob.objects.filter(
|
jobs = UnifiedJob.objects.filter(
|
||||||
(Q(status='running') | Q(status='waiting', modified__lte=now - timedelta(seconds=60)))
|
Q(status='running') & (Q(execution_node=hostname) | Q(controller_node=hostname)) & ~Q(polymorphic_ctype_id=workflow_ctype_id)
|
||||||
& (Q(execution_node=me.hostname) | Q(controller_node=me.hostname))
|
)
|
||||||
& ~Q(polymorphic_ctype_id=workflow_ctype_id)
|
if excluded_uuids:
|
||||||
).exclude(celery_task_id__in=excluded_uuids)
|
jobs = jobs.exclude(celery_task_id__in=excluded_uuids)
|
||||||
for j in jobs:
|
for j in jobs:
|
||||||
reap_job(j, status)
|
reap_job(j, status, job_explanation=job_explanation)
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ from django.conf import settings
|
|||||||
|
|
||||||
from awx.main.dispatch.pool import WorkerPool
|
from awx.main.dispatch.pool import WorkerPool
|
||||||
from awx.main.dispatch import pg_bus_conn
|
from awx.main.dispatch import pg_bus_conn
|
||||||
|
from awx.main.utils.common import log_excess_runtime
|
||||||
|
|
||||||
if 'run_callback_receiver' in sys.argv:
|
if 'run_callback_receiver' in sys.argv:
|
||||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||||
@@ -39,7 +40,6 @@ class WorkerSignalHandler:
|
|||||||
|
|
||||||
|
|
||||||
class AWXConsumerBase(object):
|
class AWXConsumerBase(object):
|
||||||
|
|
||||||
last_stats = time.time()
|
last_stats = time.time()
|
||||||
|
|
||||||
def __init__(self, name, worker, queues=[], pool=None):
|
def __init__(self, name, worker, queues=[], pool=None):
|
||||||
@@ -62,7 +62,7 @@ class AWXConsumerBase(object):
|
|||||||
def control(self, body):
|
def control(self, body):
|
||||||
logger.warning(f'Received control signal:\n{body}')
|
logger.warning(f'Received control signal:\n{body}')
|
||||||
control = body.get('control')
|
control = body.get('control')
|
||||||
if control in ('status', 'running'):
|
if control in ('status', 'running', 'cancel'):
|
||||||
reply_queue = body['reply_to']
|
reply_queue = body['reply_to']
|
||||||
if control == 'status':
|
if control == 'status':
|
||||||
msg = '\n'.join([self.listening_on, self.pool.debug()])
|
msg = '\n'.join([self.listening_on, self.pool.debug()])
|
||||||
@@ -71,6 +71,17 @@ class AWXConsumerBase(object):
|
|||||||
for worker in self.pool.workers:
|
for worker in self.pool.workers:
|
||||||
worker.calculate_managed_tasks()
|
worker.calculate_managed_tasks()
|
||||||
msg.extend(worker.managed_tasks.keys())
|
msg.extend(worker.managed_tasks.keys())
|
||||||
|
elif control == 'cancel':
|
||||||
|
msg = []
|
||||||
|
task_ids = set(body['task_ids'])
|
||||||
|
for worker in self.pool.workers:
|
||||||
|
task = worker.current_task
|
||||||
|
if task and task['uuid'] in task_ids:
|
||||||
|
logger.warn(f'Sending SIGTERM to task id={task["uuid"]}, task={task.get("task")}, args={task.get("args")}')
|
||||||
|
os.kill(worker.pid, signal.SIGTERM)
|
||||||
|
msg.append(task['uuid'])
|
||||||
|
if task_ids and not msg:
|
||||||
|
logger.info(f'Could not locate running tasks to cancel with ids={task_ids}')
|
||||||
|
|
||||||
with pg_bus_conn() as conn:
|
with pg_bus_conn() as conn:
|
||||||
conn.notify(reply_queue, json.dumps(msg))
|
conn.notify(reply_queue, json.dumps(msg))
|
||||||
@@ -81,6 +92,9 @@ class AWXConsumerBase(object):
|
|||||||
logger.error('unrecognized control message: {}'.format(control))
|
logger.error('unrecognized control message: {}'.format(control))
|
||||||
|
|
||||||
def process_task(self, body):
|
def process_task(self, body):
|
||||||
|
if isinstance(body, dict):
|
||||||
|
body['time_ack'] = time.time()
|
||||||
|
|
||||||
if 'control' in body:
|
if 'control' in body:
|
||||||
try:
|
try:
|
||||||
return self.control(body)
|
return self.control(body)
|
||||||
@@ -99,8 +113,8 @@ class AWXConsumerBase(object):
|
|||||||
queue = 0
|
queue = 0
|
||||||
self.pool.write(queue, body)
|
self.pool.write(queue, body)
|
||||||
self.total_messages += 1
|
self.total_messages += 1
|
||||||
self.record_statistics()
|
|
||||||
|
|
||||||
|
@log_excess_runtime(logger)
|
||||||
def record_statistics(self):
|
def record_statistics(self):
|
||||||
if time.time() - self.last_stats > 1: # buffer stat recording to once per second
|
if time.time() - self.last_stats > 1: # buffer stat recording to once per second
|
||||||
try:
|
try:
|
||||||
@@ -140,6 +154,16 @@ class AWXConsumerPG(AWXConsumerBase):
|
|||||||
# if no successful loops have ran since startup, then we should fail right away
|
# if no successful loops have ran since startup, then we should fail right away
|
||||||
self.pg_is_down = True # set so that we fail if we get database errors on startup
|
self.pg_is_down = True # set so that we fail if we get database errors on startup
|
||||||
self.pg_down_time = time.time() - self.pg_max_wait # allow no grace period
|
self.pg_down_time = time.time() - self.pg_max_wait # allow no grace period
|
||||||
|
self.last_cleanup = time.time()
|
||||||
|
|
||||||
|
def run_periodic_tasks(self):
|
||||||
|
self.record_statistics() # maintains time buffer in method
|
||||||
|
|
||||||
|
if time.time() - self.last_cleanup > 60: # same as cluster_node_heartbeat
|
||||||
|
# NOTE: if we run out of database connections, it is important to still run cleanup
|
||||||
|
# so that we scale down workers and free up connections
|
||||||
|
self.pool.cleanup()
|
||||||
|
self.last_cleanup = time.time()
|
||||||
|
|
||||||
def run(self, *args, **kwargs):
|
def run(self, *args, **kwargs):
|
||||||
super(AWXConsumerPG, self).run(*args, **kwargs)
|
super(AWXConsumerPG, self).run(*args, **kwargs)
|
||||||
@@ -149,14 +173,16 @@ class AWXConsumerPG(AWXConsumerBase):
|
|||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
with pg_bus_conn() as conn:
|
with pg_bus_conn(new_connection=True) as conn:
|
||||||
for queue in self.queues:
|
for queue in self.queues:
|
||||||
conn.listen(queue)
|
conn.listen(queue)
|
||||||
if init is False:
|
if init is False:
|
||||||
self.worker.on_start()
|
self.worker.on_start()
|
||||||
init = True
|
init = True
|
||||||
for e in conn.events():
|
for e in conn.events(yield_timeouts=True):
|
||||||
self.process_task(json.loads(e.payload))
|
if e is not None:
|
||||||
|
self.process_task(json.loads(e.payload))
|
||||||
|
self.run_periodic_tasks()
|
||||||
self.pg_is_down = False
|
self.pg_is_down = False
|
||||||
if self.should_stop:
|
if self.should_stop:
|
||||||
return
|
return
|
||||||
@@ -213,6 +239,8 @@ class BaseWorker(object):
|
|||||||
# so we can establish a new connection
|
# so we can establish a new connection
|
||||||
conn.close_if_unusable_or_obsolete()
|
conn.close_if_unusable_or_obsolete()
|
||||||
self.perform_work(body, *args)
|
self.perform_work(body, *args)
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f'Unhandled exception in perform_work in worker pid={os.getpid()}')
|
||||||
finally:
|
finally:
|
||||||
if 'uuid' in body:
|
if 'uuid' in body:
|
||||||
uuid = body['uuid']
|
uuid = body['uuid']
|
||||||
|
|||||||
@@ -3,14 +3,12 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
import time
|
import time
|
||||||
import traceback
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
from django.utils.timezone import now as tz_now
|
from django.utils.timezone import now as tz_now
|
||||||
from django.db import DatabaseError, OperationalError, transaction, connection as django_connection
|
from django.db import transaction, connection as django_connection
|
||||||
from django.db.utils import InterfaceError, InternalError
|
|
||||||
from django_guid import set_guid
|
from django_guid import set_guid
|
||||||
|
|
||||||
import psutil
|
import psutil
|
||||||
@@ -64,6 +62,7 @@ class CallbackBrokerWorker(BaseWorker):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
MAX_RETRIES = 2
|
MAX_RETRIES = 2
|
||||||
|
INDIVIDUAL_EVENT_RETRIES = 3
|
||||||
last_stats = time.time()
|
last_stats = time.time()
|
||||||
last_flush = time.time()
|
last_flush = time.time()
|
||||||
total = 0
|
total = 0
|
||||||
@@ -155,6 +154,8 @@ class CallbackBrokerWorker(BaseWorker):
|
|||||||
metrics_events_missing_created = 0
|
metrics_events_missing_created = 0
|
||||||
metrics_total_job_event_processing_seconds = datetime.timedelta(seconds=0)
|
metrics_total_job_event_processing_seconds = datetime.timedelta(seconds=0)
|
||||||
for cls, events in self.buff.items():
|
for cls, events in self.buff.items():
|
||||||
|
if not events:
|
||||||
|
continue
|
||||||
logger.debug(f'{cls.__name__}.objects.bulk_create({len(events)})')
|
logger.debug(f'{cls.__name__}.objects.bulk_create({len(events)})')
|
||||||
for e in events:
|
for e in events:
|
||||||
e.modified = now # this can be set before created because now is set above on line 149
|
e.modified = now # this can be set before created because now is set above on line 149
|
||||||
@@ -164,28 +165,48 @@ class CallbackBrokerWorker(BaseWorker):
|
|||||||
else: # only calculate the seconds if the created time already has been set
|
else: # only calculate the seconds if the created time already has been set
|
||||||
metrics_total_job_event_processing_seconds += e.modified - e.created
|
metrics_total_job_event_processing_seconds += e.modified - e.created
|
||||||
metrics_duration_to_save = time.perf_counter()
|
metrics_duration_to_save = time.perf_counter()
|
||||||
|
saved_events = []
|
||||||
try:
|
try:
|
||||||
cls.objects.bulk_create(events)
|
cls.objects.bulk_create(events)
|
||||||
metrics_bulk_events_saved += len(events)
|
metrics_bulk_events_saved += len(events)
|
||||||
except Exception:
|
saved_events = events
|
||||||
|
self.buff[cls] = []
|
||||||
|
except Exception as exc:
|
||||||
|
# If the database is flaking, let ensure_connection throw a general exception
|
||||||
|
# will be caught by the outer loop, which goes into a proper sleep and retry loop
|
||||||
|
django_connection.ensure_connection()
|
||||||
|
logger.warning(f'Error in events bulk_create, will try indiviually, error: {str(exc)}')
|
||||||
# if an exception occurs, we should re-attempt to save the
|
# if an exception occurs, we should re-attempt to save the
|
||||||
# events one-by-one, because something in the list is
|
# events one-by-one, because something in the list is
|
||||||
# broken/stale
|
# broken/stale
|
||||||
metrics_events_batch_save_errors += 1
|
metrics_events_batch_save_errors += 1
|
||||||
for e in events:
|
for e in events.copy():
|
||||||
try:
|
try:
|
||||||
e.save()
|
e.save()
|
||||||
metrics_singular_events_saved += 1
|
metrics_singular_events_saved += 1
|
||||||
except Exception:
|
events.remove(e)
|
||||||
logger.exception('Database Error Saving Job Event')
|
saved_events.append(e) # Importantly, remove successfully saved events from the buffer
|
||||||
|
except Exception as exc_indv:
|
||||||
|
retry_count = getattr(e, '_retry_count', 0) + 1
|
||||||
|
e._retry_count = retry_count
|
||||||
|
|
||||||
|
# special sanitization logic for postgres treatment of NUL 0x00 char
|
||||||
|
if (retry_count == 1) and isinstance(exc_indv, ValueError) and ("\x00" in e.stdout):
|
||||||
|
e.stdout = e.stdout.replace("\x00", "")
|
||||||
|
|
||||||
|
if retry_count >= self.INDIVIDUAL_EVENT_RETRIES:
|
||||||
|
logger.error(f'Hit max retries ({retry_count}) saving individual Event error: {str(exc_indv)}\ndata:\n{e.__dict__}')
|
||||||
|
events.remove(e)
|
||||||
|
else:
|
||||||
|
logger.info(f'Database Error Saving individual Event uuid={e.uuid} try={retry_count}, error: {str(exc_indv)}')
|
||||||
|
|
||||||
metrics_duration_to_save = time.perf_counter() - metrics_duration_to_save
|
metrics_duration_to_save = time.perf_counter() - metrics_duration_to_save
|
||||||
for e in events:
|
for e in saved_events:
|
||||||
if not getattr(e, '_skip_websocket_message', False):
|
if not getattr(e, '_skip_websocket_message', False):
|
||||||
metrics_events_broadcast += 1
|
metrics_events_broadcast += 1
|
||||||
emit_event_detail(e)
|
emit_event_detail(e)
|
||||||
if getattr(e, '_notification_trigger_event', False):
|
if getattr(e, '_notification_trigger_event', False):
|
||||||
job_stats_wrapup(getattr(e, e.JOB_REFERENCE), event=e)
|
job_stats_wrapup(getattr(e, e.JOB_REFERENCE), event=e)
|
||||||
self.buff = {}
|
|
||||||
self.last_flush = time.time()
|
self.last_flush = time.time()
|
||||||
# only update metrics if we saved events
|
# only update metrics if we saved events
|
||||||
if (metrics_bulk_events_saved + metrics_singular_events_saved) > 0:
|
if (metrics_bulk_events_saved + metrics_singular_events_saved) > 0:
|
||||||
@@ -257,19 +278,16 @@ class CallbackBrokerWorker(BaseWorker):
|
|||||||
try:
|
try:
|
||||||
self.flush(force=flush)
|
self.flush(force=flush)
|
||||||
break
|
break
|
||||||
except (OperationalError, InterfaceError, InternalError):
|
except Exception as exc:
|
||||||
|
# Aside form bugs, exceptions here are assumed to be due to database flake
|
||||||
if retries >= self.MAX_RETRIES:
|
if retries >= self.MAX_RETRIES:
|
||||||
logger.exception('Worker could not re-establish database connectivity, giving up on one or more events.')
|
logger.exception('Worker could not re-establish database connectivity, giving up on one or more events.')
|
||||||
|
self.buff = {}
|
||||||
return
|
return
|
||||||
delay = 60 * retries
|
delay = 60 * retries
|
||||||
logger.exception('Database Error Saving Job Event, retry #{i} in {delay} seconds:'.format(i=retries + 1, delay=delay))
|
logger.warning(f'Database Error Flushing Job Events, retry #{retries + 1} in {delay} seconds: {str(exc)}')
|
||||||
django_connection.close()
|
django_connection.close()
|
||||||
time.sleep(delay)
|
time.sleep(delay)
|
||||||
retries += 1
|
retries += 1
|
||||||
except DatabaseError:
|
except Exception:
|
||||||
logger.exception('Database Error Saving Job Event')
|
logger.exception(f'Callback Task Processor Raised Unexpected Exception processing event data:\n{body}')
|
||||||
break
|
|
||||||
except Exception as exc:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
logger.error('Callback Task Processor Raised Exception: %r', exc)
|
|
||||||
logger.error('Detail: {}'.format(tb))
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import logging
|
|||||||
import importlib
|
import importlib
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
import time
|
||||||
|
|
||||||
from kubernetes.config import kube_config
|
from kubernetes.config import kube_config
|
||||||
|
|
||||||
@@ -60,8 +61,19 @@ class TaskWorker(BaseWorker):
|
|||||||
# the callable is a class, e.g., RunJob; instantiate and
|
# the callable is a class, e.g., RunJob; instantiate and
|
||||||
# return its `run()` method
|
# return its `run()` method
|
||||||
_call = _call().run
|
_call = _call().run
|
||||||
|
|
||||||
|
log_extra = ''
|
||||||
|
logger_method = logger.debug
|
||||||
|
if ('time_ack' in body) and ('time_pub' in body):
|
||||||
|
time_publish = body['time_ack'] - body['time_pub']
|
||||||
|
time_waiting = time.time() - body['time_ack']
|
||||||
|
if time_waiting > 5.0 or time_publish > 5.0:
|
||||||
|
# If task too a very long time to process, add this information to the log
|
||||||
|
log_extra = f' took {time_publish:.4f} to ack, {time_waiting:.4f} in local dispatcher'
|
||||||
|
logger_method = logger.info
|
||||||
# don't print kwargs, they often contain launch-time secrets
|
# don't print kwargs, they often contain launch-time secrets
|
||||||
logger.debug('task {} starting {}(*{})'.format(uuid, task, args))
|
logger_method(f'task {uuid} starting {task}(*{args}){log_extra}')
|
||||||
|
|
||||||
return _call(*args, **kwargs)
|
return _call(*args, **kwargs)
|
||||||
|
|
||||||
def perform_work(self, body):
|
def perform_work(self, body):
|
||||||
|
|||||||
@@ -232,7 +232,6 @@ class ImplicitRoleField(models.ForeignKey):
|
|||||||
field_names = [field_names]
|
field_names = [field_names]
|
||||||
|
|
||||||
for field_name in field_names:
|
for field_name in field_names:
|
||||||
|
|
||||||
if field_name.startswith('singleton:'):
|
if field_name.startswith('singleton:'):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -244,7 +243,6 @@ class ImplicitRoleField(models.ForeignKey):
|
|||||||
field = getattr(cls, field_name, None)
|
field = getattr(cls, field_name, None)
|
||||||
|
|
||||||
if field and type(field) is ReverseManyToOneDescriptor or type(field) is ManyToManyDescriptor:
|
if field and type(field) is ReverseManyToOneDescriptor or type(field) is ManyToManyDescriptor:
|
||||||
|
|
||||||
if '.' in field_attr:
|
if '.' in field_attr:
|
||||||
raise Exception('Referencing deep roles through ManyToMany fields is unsupported.')
|
raise Exception('Referencing deep roles through ManyToMany fields is unsupported.')
|
||||||
|
|
||||||
@@ -629,7 +627,6 @@ class CredentialInputField(JSONSchemaField):
|
|||||||
# `ssh_key_unlock` requirements are very specific and can't be
|
# `ssh_key_unlock` requirements are very specific and can't be
|
||||||
# represented without complicated JSON schema
|
# represented without complicated JSON schema
|
||||||
if model_instance.credential_type.managed is True and 'ssh_key_unlock' in defined_fields:
|
if model_instance.credential_type.managed is True and 'ssh_key_unlock' in defined_fields:
|
||||||
|
|
||||||
# in order to properly test the necessity of `ssh_key_unlock`, we
|
# in order to properly test the necessity of `ssh_key_unlock`, we
|
||||||
# need to know the real value of `ssh_key_data`; for a payload like:
|
# need to know the real value of `ssh_key_data`; for a payload like:
|
||||||
# {
|
# {
|
||||||
@@ -791,7 +788,8 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
|||||||
'type': 'object',
|
'type': 'object',
|
||||||
'patternProperties': {
|
'patternProperties': {
|
||||||
# http://docs.ansible.com/ansible/playbooks_variables.html#what-makes-a-valid-variable-name
|
# http://docs.ansible.com/ansible/playbooks_variables.html#what-makes-a-valid-variable-name
|
||||||
'^[a-zA-Z_]+[a-zA-Z0-9_]*$': {'type': 'string'},
|
# plus, add ability to template
|
||||||
|
r'^[a-zA-Z_\{\}]+[a-zA-Z0-9_\{\}]*$': {"anyOf": [{'type': 'string'}, {'type': 'array'}, {'$ref': '#/properties/extra_vars'}]}
|
||||||
},
|
},
|
||||||
'additionalProperties': False,
|
'additionalProperties': False,
|
||||||
},
|
},
|
||||||
@@ -858,27 +856,44 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
|||||||
template_name = template_name.split('.')[1]
|
template_name = template_name.split('.')[1]
|
||||||
setattr(valid_namespace['tower'].filename, template_name, 'EXAMPLE_FILENAME')
|
setattr(valid_namespace['tower'].filename, template_name, 'EXAMPLE_FILENAME')
|
||||||
|
|
||||||
|
def validate_template_string(type_, key, tmpl):
|
||||||
|
try:
|
||||||
|
sandbox.ImmutableSandboxedEnvironment(undefined=StrictUndefined).from_string(tmpl).render(valid_namespace)
|
||||||
|
except UndefinedError as e:
|
||||||
|
raise django_exceptions.ValidationError(
|
||||||
|
_('{sub_key} uses an undefined field ({error_msg})').format(sub_key=key, error_msg=e),
|
||||||
|
code='invalid',
|
||||||
|
params={'value': value},
|
||||||
|
)
|
||||||
|
except SecurityError as e:
|
||||||
|
raise django_exceptions.ValidationError(_('Encountered unsafe code execution: {}').format(e))
|
||||||
|
except TemplateSyntaxError as e:
|
||||||
|
raise django_exceptions.ValidationError(
|
||||||
|
_('Syntax error rendering template for {sub_key} inside of {type} ({error_msg})').format(sub_key=key, type=type_, error_msg=e),
|
||||||
|
code='invalid',
|
||||||
|
params={'value': value},
|
||||||
|
)
|
||||||
|
|
||||||
|
def validate_extra_vars(key, node):
|
||||||
|
if isinstance(node, dict):
|
||||||
|
for k, v in node.items():
|
||||||
|
validate_template_string("extra_vars", 'a key' if key is None else key, k)
|
||||||
|
validate_extra_vars(k if key is None else "{key}.{k}".format(key=key, k=k), v)
|
||||||
|
elif isinstance(node, list):
|
||||||
|
for i, x in enumerate(node):
|
||||||
|
validate_extra_vars("{key}[{i}]".format(key=key, i=i), x)
|
||||||
|
else:
|
||||||
|
validate_template_string("extra_vars", key, node)
|
||||||
|
|
||||||
for type_, injector in value.items():
|
for type_, injector in value.items():
|
||||||
if type_ == 'env':
|
if type_ == 'env':
|
||||||
for key in injector.keys():
|
for key in injector.keys():
|
||||||
self.validate_env_var_allowed(key)
|
self.validate_env_var_allowed(key)
|
||||||
for key, tmpl in injector.items():
|
if type_ == 'extra_vars':
|
||||||
try:
|
validate_extra_vars(None, injector)
|
||||||
sandbox.ImmutableSandboxedEnvironment(undefined=StrictUndefined).from_string(tmpl).render(valid_namespace)
|
else:
|
||||||
except UndefinedError as e:
|
for key, tmpl in injector.items():
|
||||||
raise django_exceptions.ValidationError(
|
validate_template_string(type_, key, tmpl)
|
||||||
_('{sub_key} uses an undefined field ({error_msg})').format(sub_key=key, error_msg=e),
|
|
||||||
code='invalid',
|
|
||||||
params={'value': value},
|
|
||||||
)
|
|
||||||
except SecurityError as e:
|
|
||||||
raise django_exceptions.ValidationError(_('Encountered unsafe code execution: {}').format(e))
|
|
||||||
except TemplateSyntaxError as e:
|
|
||||||
raise django_exceptions.ValidationError(
|
|
||||||
_('Syntax error rendering template for {sub_key} inside of {type} ({error_msg})').format(sub_key=key, type=type_, error_msg=e),
|
|
||||||
code='invalid',
|
|
||||||
params={'value': value},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AskForField(models.BooleanField):
|
class AskForField(models.BooleanField):
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user