mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 11:34:43 -03:30
Compare commits
1052 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
799968460d | ||
|
|
3f08e26881 | ||
|
|
9af2c92795 | ||
|
|
dabae456d9 | ||
|
|
c40785b6eb | ||
|
|
e2e80313ac | ||
|
|
14a99a7b9e | ||
|
|
50e8c299c6 | ||
|
|
326d12382f | ||
|
|
1de9dddd21 | ||
|
|
87b1f0d0de | ||
|
|
f085afd92f | ||
|
|
604cbc1737 | ||
|
|
60b6faff19 | ||
|
|
b26c1c16b9 | ||
|
|
c2bf9d94be | ||
|
|
ea09adbbf3 | ||
|
|
9d0de57fae | ||
|
|
da733538c4 | ||
|
|
6db7cea148 | ||
|
|
3993aa9524 | ||
|
|
6f9d4d89cd | ||
|
|
443bdc1234 | ||
|
|
9cd43d044e | ||
|
|
f8e680867b | ||
|
|
96a5540083 | ||
|
|
750e1bd80a | ||
|
|
a12f161be5 | ||
|
|
04568ea830 | ||
|
|
3be0b527d6 | ||
|
|
afc0732a32 | ||
|
|
9703fb06fc | ||
|
|
54cbf13219 | ||
|
|
6774a12c67 | ||
|
|
94e53d988b | ||
|
|
22d47ea8c4 | ||
|
|
73bba00cc6 | ||
|
|
6ed429ada2 | ||
|
|
d2c2d459c4 | ||
|
|
c8b906ffb7 | ||
|
|
264f1d6638 | ||
|
|
16c7908adc | ||
|
|
c9d05d7d4a | ||
|
|
ec7e4488dc | ||
|
|
72f440acf5 | ||
|
|
21bf698c81 | ||
|
|
489ee30e54 | ||
|
|
2abab0772f | ||
|
|
0bca0fabaa | ||
|
|
93ac3fea43 | ||
|
|
c72b71a43a | ||
|
|
9e8c40598c | ||
|
|
4ded4afb7d | ||
|
|
801c45da6d | ||
|
|
278b356a18 | ||
|
|
a718e01dbf | ||
|
|
8e6cdde861 | ||
|
|
62b0c2b647 | ||
|
|
1cd30ceb31 | ||
|
|
15c7a3f85b | ||
|
|
d977aff8cf | ||
|
|
e3b44c3950 | ||
|
|
ba035efc91 | ||
|
|
76cfd7784a | ||
|
|
3e6875ce1d | ||
|
|
1ab7aa0fc4 | ||
|
|
5950e0bfcb | ||
|
|
ac540d3d3f | ||
|
|
848ddc5f3e | ||
|
|
30d1d63813 | ||
|
|
9781a9094f | ||
|
|
ab3de5898d | ||
|
|
7ff8a3764b | ||
|
|
32d6d746b3 | ||
|
|
ecf9a0827d | ||
|
|
a9a7fac308 | ||
|
|
54b5884943 | ||
|
|
1fb38137dc | ||
|
|
2d6192db75 | ||
|
|
9ecceb4a1e | ||
|
|
6b25fcaa80 | ||
|
|
c5c83a4240 | ||
|
|
5e0eb5ab97 | ||
|
|
2de5ffc8d9 | ||
|
|
3b2fe39a0a | ||
|
|
285ff080d0 | ||
|
|
627bde9e9e | ||
|
|
ef7d5e6004 | ||
|
|
598c8a1c4d | ||
|
|
b3c20ee0ae | ||
|
|
cd8d382038 | ||
|
|
b678d61318 | ||
|
|
43c8231f7d | ||
|
|
db401e0daa | ||
|
|
675d4c5f2b | ||
|
|
fdbf3ed279 | ||
|
|
5660f9ac59 | ||
|
|
546e63aa4c | ||
|
|
ddbd143793 | ||
|
|
35ba321546 | ||
|
|
2fe7fe30f8 | ||
|
|
8d4d1d594b | ||
|
|
c86fafbd7e | ||
|
|
709c439afc | ||
|
|
4cdc88e4bb | ||
|
|
7c550a76a5 | ||
|
|
cfabbcaaf6 | ||
|
|
7ae6286152 | ||
|
|
fd9c28c960 | ||
|
|
fa9ee96f7f | ||
|
|
334c33ca07 | ||
|
|
85cc67fb4e | ||
|
|
af9eb7c374 | ||
|
|
44968cc01e | ||
|
|
af69b25eaa | ||
|
|
eb33b95083 | ||
|
|
aa9124e072 | ||
|
|
c086fad945 | ||
|
|
0fef88c358 | ||
|
|
56f8f8d3f4 | ||
|
|
5bced09fc5 | ||
|
|
b4e9ff7ce0 | ||
|
|
208cbabb31 | ||
|
|
2fb5cfd55d | ||
|
|
582036ba45 | ||
|
|
e06f9f5438 | ||
|
|
461876da93 | ||
|
|
4f1c662691 | ||
|
|
9abd4e05d0 | ||
|
|
faba64890e | ||
|
|
add54bfd0b | ||
|
|
16d39bb72b | ||
|
|
e63ce9ed08 | ||
|
|
60831cae88 | ||
|
|
97cf46eaa9 | ||
|
|
381e75b913 | ||
|
|
7bd516a16c | ||
|
|
3dd01cde89 | ||
|
|
495394084d | ||
|
|
2609ee5ed0 | ||
|
|
da930ce276 | ||
|
|
987924cbda | ||
|
|
8fac1c18c8 | ||
|
|
eb64fde885 | ||
|
|
b1e9537499 | ||
|
|
9d636cad29 | ||
|
|
696c0b0055 | ||
|
|
6e030fd62f | ||
|
|
bb14a95076 | ||
|
|
9664aed1f2 | ||
|
|
6dda5f477e | ||
|
|
72cd73ca71 | ||
|
|
02e18cf919 | ||
|
|
82671680e3 | ||
|
|
bff49f2a5f | ||
|
|
59d582ce83 | ||
|
|
a4a3ba65d7 | ||
|
|
11f4b64229 | ||
|
|
b76029fac3 | ||
|
|
3d45f31536 | ||
|
|
ade00c70e5 | ||
|
|
82dca5336d | ||
|
|
8c33d0ecbd | ||
|
|
dea5fd1a9d | ||
|
|
6a131f70f0 | ||
|
|
d33a0d5dde | ||
|
|
11cc7e37e1 | ||
|
|
7e6cb7ecc9 | ||
|
|
807c58dc36 | ||
|
|
1517f2d910 | ||
|
|
b0c59ee330 | ||
|
|
1ff52bab56 | ||
|
|
7a9fca7f77 | ||
|
|
dea53a0dba | ||
|
|
db999b82ed | ||
|
|
c92468062d | ||
|
|
4de0f09c85 | ||
|
|
9c9c1b4d3b | ||
|
|
5ffe91f069 | ||
|
|
63867518ee | ||
|
|
53ff99e391 | ||
|
|
c035c12c0a | ||
|
|
6e39a02e99 | ||
|
|
956638e564 | ||
|
|
37907ad348 | ||
|
|
386aa898ec | ||
|
|
f1c5da7026 | ||
|
|
fc2a5224ef | ||
|
|
ce5aefd3d8 | ||
|
|
b2124dffb5 | ||
|
|
25eaace4be | ||
|
|
bb8efbcc82 | ||
|
|
e0bd5ad041 | ||
|
|
69ec49d0e9 | ||
|
|
8126f734e3 | ||
|
|
f2aaa6778c | ||
|
|
4fd5b01a83 | ||
|
|
1747a844fc | ||
|
|
afc210a70d | ||
|
|
f63003f982 | ||
|
|
e89037dd77 | ||
|
|
ab6e650e9c | ||
|
|
2ed246cb61 | ||
|
|
4449555abe | ||
|
|
f340f491dc | ||
|
|
c8f1e714e1 | ||
|
|
ddc428532f | ||
|
|
3414cae677 | ||
|
|
9d6972c6ce | ||
|
|
0566a0f1d6 | ||
|
|
de0561dcc2 | ||
|
|
a9f4f53f92 | ||
|
|
5fdfd4114a | ||
|
|
b195f9da44 | ||
|
|
1205d71f4b | ||
|
|
3f762a6476 | ||
|
|
4aa403c122 | ||
|
|
a13070a8da | ||
|
|
b63b171653 | ||
|
|
7219f8fed8 | ||
|
|
b6a5f834d6 | ||
|
|
99b9d53bbb | ||
|
|
edca19a697 | ||
|
|
c13d721062 | ||
|
|
d2f316c484 | ||
|
|
70e832d4db | ||
|
|
21895bd09b | ||
|
|
411ef5f9e8 | ||
|
|
f6282b9a09 | ||
|
|
e10030b73d | ||
|
|
cdf14158b4 | ||
|
|
f310e672b0 | ||
|
|
675d0d28d2 | ||
|
|
4c2fd056ef | ||
|
|
a259e48377 | ||
|
|
095c586172 | ||
|
|
c9c198b54b | ||
|
|
2a11bb4f3b | ||
|
|
35bac50962 | ||
|
|
366d2c1d97 | ||
|
|
9a930cbd95 | ||
|
|
03277513a9 | ||
|
|
1b0fca8026 | ||
|
|
c9cf5b78c5 | ||
|
|
d6679a1e9b | ||
|
|
b721a4b361 | ||
|
|
88bbd43314 | ||
|
|
fb1c97cdc1 | ||
|
|
f5ae8a0a4c | ||
|
|
1994eaa406 | ||
|
|
510b40a776 | ||
|
|
f37b070965 | ||
|
|
41385261f3 | ||
|
|
19b4849345 | ||
|
|
76283bd299 | ||
|
|
2e4cda74c8 | ||
|
|
5512b71e16 | ||
|
|
97b60c43b7 | ||
|
|
35b62f8526 | ||
|
|
a15a3f005c | ||
|
|
776c4a988a | ||
|
|
c419969253 | ||
|
|
ba324c73ce | ||
|
|
4a5dc78331 | ||
|
|
55dc9dfb54 | ||
|
|
23a8191bb5 | ||
|
|
c665caaf35 | ||
|
|
099efb883d | ||
|
|
44237426df | ||
|
|
eeefd19ad3 | ||
|
|
47ae6e7a5a | ||
|
|
03ed6e9755 | ||
|
|
8d4e7f0a82 | ||
|
|
7fdf491c05 | ||
|
|
ef1563283e | ||
|
|
a206d79851 | ||
|
|
42c9c0a06b | ||
|
|
f0ede01017 | ||
|
|
d67007f777 | ||
|
|
83d81e3788 | ||
|
|
e789e16289 | ||
|
|
61c9683aa6 | ||
|
|
ee9d1356b2 | ||
|
|
f92a49fda9 | ||
|
|
3dc6a055ac | ||
|
|
229f0d97f9 | ||
|
|
7cc530f950 | ||
|
|
2ef840ce12 | ||
|
|
a372d8d1d5 | ||
|
|
aad150cf1d | ||
|
|
be13a11dd5 | ||
|
|
59c6f35b0b | ||
|
|
37e45c5e7c | ||
|
|
39370f1eab | ||
|
|
aec7ac6ebd | ||
|
|
f6e63d0917 | ||
|
|
0ae67edaba | ||
|
|
481f6435ee | ||
|
|
d0c5c3d3cf | ||
|
|
9f8250bd47 | ||
|
|
3a3fffb2dd | ||
|
|
4cfa4eaf8e | ||
|
|
abb1125a2c | ||
|
|
a2acbe9fe6 | ||
|
|
cab8c690d2 | ||
|
|
0d1f8a06ce | ||
|
|
d42fe921db | ||
|
|
db7fb81855 | ||
|
|
d3c695b853 | ||
|
|
010c3ab0b8 | ||
|
|
58cdbca5cf | ||
|
|
8275082896 | ||
|
|
d79da1ef9f | ||
|
|
a9636426b8 | ||
|
|
329caad681 | ||
|
|
ecb84e090c | ||
|
|
8e9fc14b0e | ||
|
|
0f77ca605d | ||
|
|
231fcc8178 | ||
|
|
2839091b22 | ||
|
|
47e67481b3 | ||
|
|
55059b015f | ||
|
|
eb6c58682d | ||
|
|
26055de772 | ||
|
|
ebb4581595 | ||
|
|
d1fecc11c9 | ||
|
|
056247a34a | ||
|
|
7010015e8a | ||
|
|
62d50d27be | ||
|
|
1e5231d68b | ||
|
|
e04efad3c0 | ||
|
|
e54db3ce50 | ||
|
|
77076dbd67 | ||
|
|
6f20a798ab | ||
|
|
0d3a22bbc3 | ||
|
|
f34c96ecf5 | ||
|
|
206c85778e | ||
|
|
d6b4b9f973 | ||
|
|
3065e29deb | ||
|
|
481047bed8 | ||
|
|
f72292cce2 | ||
|
|
7b35902d33 | ||
|
|
1660900914 | ||
|
|
a7be25ce8b | ||
|
|
54b5ba08b8 | ||
|
|
0fb8d48074 | ||
|
|
b5fac4157d | ||
|
|
9e61949f9f | ||
|
|
6c5640798f | ||
|
|
03222197a3 | ||
|
|
12f417d0a3 | ||
|
|
c77aaece1d | ||
|
|
25140c9072 | ||
|
|
3a636c29ab | ||
|
|
a11d5ccd37 | ||
|
|
f6e7937f74 | ||
|
|
e447b667e5 | ||
|
|
24c635e9bc | ||
|
|
2ad4dcd741 | ||
|
|
f5cd9e0799 | ||
|
|
e7064868b4 | ||
|
|
65cbbf15c9 | ||
|
|
a325509e1e | ||
|
|
69ae731898 | ||
|
|
3452dee1b0 | ||
|
|
64b337e3c6 | ||
|
|
5df9655fe3 | ||
|
|
f3669f3be6 | ||
|
|
61eb99c46d | ||
|
|
f74a14e34f | ||
|
|
517f1d7991 | ||
|
|
25e69885d0 | ||
|
|
60a357eda1 | ||
|
|
d74679a5f9 | ||
|
|
73a865073d | ||
|
|
4ff8c28fe4 | ||
|
|
4ab2539c8a | ||
|
|
459eb3903e | ||
|
|
611a537b55 | ||
|
|
3a74cc5a74 | ||
|
|
f1520e1a70 | ||
|
|
727b4668c2 | ||
|
|
1287e001d8 | ||
|
|
c9b53cf975 | ||
|
|
64811d0b6b | ||
|
|
74af187568 | ||
|
|
a28c023cf1 | ||
|
|
cdf7fd64b2 | ||
|
|
84ffa4a5b7 | ||
|
|
326a43de11 | ||
|
|
07f193d8d6 | ||
|
|
f79a57c3e2 | ||
|
|
f8319fcd02 | ||
|
|
815ef4c9c9 | ||
|
|
d1800aa6d0 | ||
|
|
dda940344e | ||
|
|
1fffeb430c | ||
|
|
7d0bbd0a4c | ||
|
|
15fd22681d | ||
|
|
6a2826b91c | ||
|
|
112111c7f9 | ||
|
|
ed8498f43f | ||
|
|
77a5bb9069 | ||
|
|
37f86803f7 | ||
|
|
160858b051 | ||
|
|
68f44c01ea | ||
|
|
bef8d7426f | ||
|
|
c758f079cd | ||
|
|
7e404b7c19 | ||
|
|
4b7faea552 | ||
|
|
4ddd391033 | ||
|
|
e52416fd47 | ||
|
|
f67a2d2f46 | ||
|
|
fcdda8d7a7 | ||
|
|
1f0b936e82 | ||
|
|
b70793db5c | ||
|
|
0f044f6c21 | ||
|
|
4c205dfde9 | ||
|
|
d58d460119 | ||
|
|
24a6edef9e | ||
|
|
a5485096ac | ||
|
|
60a5ccf70b | ||
|
|
d93a7c2997 | ||
|
|
af5f8e8a4a | ||
|
|
1596c855ff | ||
|
|
f45dd7a748 | ||
|
|
a036363e85 | ||
|
|
4aceea41fd | ||
|
|
7bbfcbaefd | ||
|
|
18eaa9bb92 | ||
|
|
6826d5444b | ||
|
|
622ec69216 | ||
|
|
d38c109d49 | ||
|
|
a31b2d0259 | ||
|
|
b13c076881 | ||
|
|
c429a55382 | ||
|
|
20c4b21c39 | ||
|
|
d3289dc688 | ||
|
|
685c0b844e | ||
|
|
57c9b14198 | ||
|
|
0736f4d166 | ||
|
|
fed94b531d | ||
|
|
43a77e8667 | ||
|
|
637dc3844d | ||
|
|
815a45cf2f | ||
|
|
0b66b61dd6 | ||
|
|
7c011a1796 | ||
|
|
bf8859f401 | ||
|
|
c14d5ec59e | ||
|
|
6d850e031a | ||
|
|
38af9e2d42 | ||
|
|
a3d7901d5f | ||
|
|
54b3e2f285 | ||
|
|
d0a13cb12a | ||
|
|
003bf29dce | ||
|
|
71c72f74a1 | ||
|
|
adaa24a562 | ||
|
|
ad24fe7017 | ||
|
|
e5578a8ef3 | ||
|
|
3a40d5e243 | ||
|
|
8e34898b4e | ||
|
|
7eefa897b3 | ||
|
|
4c7c89b410 | ||
|
|
caafa55c35 | ||
|
|
7776d426ac | ||
|
|
2d87ccface | ||
|
|
b9131b9e8b | ||
|
|
7c9626b0e7 | ||
|
|
1338aef2bd | ||
|
|
b9ecf389c2 | ||
|
|
75a873079d | ||
|
|
4824153cd9 | ||
|
|
5b28e7b397 | ||
|
|
f3f781917a | ||
|
|
4398c7c777 | ||
|
|
b6179c6073 | ||
|
|
dd4943310d | ||
|
|
7df6f8d88c | ||
|
|
c026790f55 | ||
|
|
0b0d049071 | ||
|
|
87105a654c | ||
|
|
32651db4e9 | ||
|
|
270f6c4abd | ||
|
|
3664cc3369 | ||
|
|
2204e03123 | ||
|
|
7b6befa3d2 | ||
|
|
84bc91defd | ||
|
|
2dca92c788 | ||
|
|
76dc22cd06 | ||
|
|
6d4b4cac37 | ||
|
|
3fc63489f1 | ||
|
|
e8cd8c249c | ||
|
|
471f47cd9e | ||
|
|
e5dbb592fa | ||
|
|
44466a3e76 | ||
|
|
d6ef84e9e2 | ||
|
|
c4d8485c81 | ||
|
|
dbb1a0c733 | ||
|
|
b5dee61e57 | ||
|
|
2c7d9320e2 | ||
|
|
fd3a82d430 | ||
|
|
3a776ccbff | ||
|
|
f96ed11a87 | ||
|
|
86f8ced486 | ||
|
|
940f055412 | ||
|
|
d7f1f0c7e6 | ||
|
|
045785c36f | ||
|
|
45600d034d | ||
|
|
33c7f0b5fc | ||
|
|
62e9e7ea80 | ||
|
|
a75c10f447 | ||
|
|
ee4b47595a | ||
|
|
9be8fba63d | ||
|
|
15f41a0f16 | ||
|
|
f06eb5e2f1 | ||
|
|
a9f4011a45 | ||
|
|
55f2125a51 | ||
|
|
b41f90e7d4 | ||
|
|
7c707ede2b | ||
|
|
4df9f9eca0 | ||
|
|
6af27fffbc | ||
|
|
a7ed9c5ff6 | ||
|
|
51b45c4fac | ||
|
|
313de35e60 | ||
|
|
0ac3a377fd | ||
|
|
1319fadc60 | ||
|
|
181bda51ce | ||
|
|
e914c23c42 | ||
|
|
c1587b25b8 | ||
|
|
9e74ac24fa | ||
|
|
48eb06f320 | ||
|
|
65ba87e71f | ||
|
|
f92924d57e | ||
|
|
eeb0feabc0 | ||
|
|
ac8b49b39d | ||
|
|
1b50db26b6 | ||
|
|
cbe612baa5 | ||
|
|
1f34d4c134 | ||
|
|
f864335463 | ||
|
|
47970d3455 | ||
|
|
6cdaacdda3 | ||
|
|
9b66bda8b9 | ||
|
|
ef354ca1e6 | ||
|
|
515c3450c2 | ||
|
|
5607c350cd | ||
|
|
b9758f5c1a | ||
|
|
aad432aaa3 | ||
|
|
d4971eb7b7 | ||
|
|
7860eb529f | ||
|
|
49c2a38437 | ||
|
|
d4bf238173 | ||
|
|
c085397bcb | ||
|
|
58fab2530f | ||
|
|
287b32870e | ||
|
|
46ac9506e6 | ||
|
|
19ccfcff9a | ||
|
|
f8a08c8a5e | ||
|
|
6f7fe8f9f9 | ||
|
|
86b41a4887 | ||
|
|
3786693078 | ||
|
|
6a17e5b65b | ||
|
|
169c0f6642 | ||
|
|
054569da70 | ||
|
|
4a6ab622df | ||
|
|
07cc75f6d4 | ||
|
|
7fc8775654 | ||
|
|
41a6473782 | ||
|
|
f39834ad82 | ||
|
|
bdb13343bb | ||
|
|
262cd3c695 | ||
|
|
f02099e8b7 | ||
|
|
7bf3ee69ef | ||
|
|
41e837d1e2 | ||
|
|
2090e46ac2 | ||
|
|
f09ee33e6c | ||
|
|
22782f8c5f | ||
|
|
e61e7df54e | ||
|
|
baf37e94eb | ||
|
|
bba2a264ea | ||
|
|
324ca7fe72 | ||
|
|
fb5394e31c | ||
|
|
53baea4c6c | ||
|
|
35a51b393a | ||
|
|
9ee9de76b5 | ||
|
|
ae15dcaf0b | ||
|
|
eb0528c157 | ||
|
|
764089e493 | ||
|
|
77e704cef1 | ||
|
|
59ce1bba16 | ||
|
|
1d3a36d821 | ||
|
|
dc0d74ca2c | ||
|
|
ef36d7c87f | ||
|
|
81fe39f060 | ||
|
|
5a6e9a06e2 | ||
|
|
9083425c24 | ||
|
|
010f5031a7 | ||
|
|
40e5b70495 | ||
|
|
9588ff3b4f | ||
|
|
30cf483357 | ||
|
|
4d1fa4d262 | ||
|
|
ac40449d6e | ||
|
|
dc4b014d12 | ||
|
|
d129928e42 | ||
|
|
573b2bc44f | ||
|
|
c095f0fc19 | ||
|
|
ae06e9cb14 | ||
|
|
73af95f55e | ||
|
|
64d9a7983b | ||
|
|
f6d14564a2 | ||
|
|
6c266b47e6 | ||
|
|
a2b984a1a5 | ||
|
|
0a7945a911 | ||
|
|
9c3e78443b | ||
|
|
68f79a1f3a | ||
|
|
b00e5876d4 | ||
|
|
7481d20261 | ||
|
|
637d6173bc | ||
|
|
e23e634974 | ||
|
|
1c65fbaae3 | ||
|
|
dc0cc0f910 | ||
|
|
424dbe8208 | ||
|
|
db34423af8 | ||
|
|
ca76f4db0c | ||
|
|
711e5e09ba | ||
|
|
6001bd5446 | ||
|
|
02f60467d7 | ||
|
|
cdce745c55 | ||
|
|
467a37f8fe | ||
|
|
88a6412b54 | ||
|
|
502eaf9fb9 | ||
|
|
de8eab0434 | ||
|
|
f317fca9e4 | ||
|
|
561fc289fb | ||
|
|
77933e97c0 | ||
|
|
ee4792dbf8 | ||
|
|
cde0df937f | ||
|
|
daf4310176 | ||
|
|
fb0e55fd1b | ||
|
|
2e5ef22585 | ||
|
|
8e043b139a | ||
|
|
e7dbe90cb5 | ||
|
|
42484cf98d | ||
|
|
274e487a96 | ||
|
|
940c189c12 | ||
|
|
c3ad479fc6 | ||
|
|
928c35ede5 | ||
|
|
1a9fcdccc2 | ||
|
|
3b1e40d227 | ||
|
|
4e84c7c4c4 | ||
|
|
f47eb126e2 | ||
|
|
5d4ab13386 | ||
|
|
b53d3bc81d | ||
|
|
46ccc58749 | ||
|
|
289beb85d2 | ||
|
|
460c7c3379 | ||
|
|
9881bb72b8 | ||
|
|
264c560a8a | ||
|
|
2fc581c249 | ||
|
|
a79d7444e5 | ||
|
|
f8d074db01 | ||
|
|
c3843004aa | ||
|
|
f597205fa7 | ||
|
|
e7be86867d | ||
|
|
13300bdbd4 | ||
|
|
b09da48835 | ||
|
|
39e23db523 | ||
|
|
b10a8b0fa9 | ||
|
|
05cb876df5 | ||
|
|
4a271d6897 | ||
|
|
41342883d4 | ||
|
|
cc7488bc15 | ||
|
|
367e0a5e87 | ||
|
|
4a2917b6a0 | ||
|
|
c6a63d01db | ||
|
|
0694cb9a7d | ||
|
|
da2bf4c510 | ||
|
|
48a044cc68 | ||
|
|
b7c0f02cb1 | ||
|
|
a76194c493 | ||
|
|
86390152bc | ||
|
|
899d36b2c9 | ||
|
|
530977d6b3 | ||
|
|
aa682fa2c9 | ||
|
|
28ad404baa | ||
|
|
1ff8ebab94 | ||
|
|
c616678beb | ||
|
|
500d407099 | ||
|
|
b99129c6b2 | ||
|
|
60f1919791 | ||
|
|
262a2b70e2 | ||
|
|
977164b920 | ||
|
|
a0df379225 | ||
|
|
b5bc9bb3f4 | ||
|
|
b5708a8cc4 | ||
|
|
c8604c73a9 | ||
|
|
949c2b92af | ||
|
|
5473e54219 | ||
|
|
aefc28a0ed | ||
|
|
f102b0ccf9 | ||
|
|
55e37f6229 | ||
|
|
ad0dc028f2 | ||
|
|
e3893b1887 | ||
|
|
c89296e76d | ||
|
|
c58fef949d | ||
|
|
26ab6dd264 | ||
|
|
abf870e604 | ||
|
|
a83aa7c0ae | ||
|
|
82fe099060 | ||
|
|
304ec80d80 | ||
|
|
f6104dd438 | ||
|
|
7fadc00fb3 | ||
|
|
26e5830b80 | ||
|
|
efcac6d55a | ||
|
|
6a9de16cda | ||
|
|
9d648edc19 | ||
|
|
9da383fe2d | ||
|
|
86600531e2 | ||
|
|
7505ecd284 | ||
|
|
a22de5a9ee | ||
|
|
514616ad6e | ||
|
|
4f5909ad21 | ||
|
|
aa98a5b5e1 | ||
|
|
93ee0a362f | ||
|
|
56fd5c435d | ||
|
|
ebe5fff992 | ||
|
|
2a56be77b3 | ||
|
|
bf70200550 | ||
|
|
9b2d2a1856 | ||
|
|
ec729a3f15 | ||
|
|
34d0595bab | ||
|
|
b4243c6f03 | ||
|
|
0aa82c2784 | ||
|
|
80053cea83 | ||
|
|
100c1cbbce | ||
|
|
1173dca900 | ||
|
|
205935bc38 | ||
|
|
7025bc2678 | ||
|
|
6cc6442f0d | ||
|
|
071973d89e | ||
|
|
13ddc78b7d | ||
|
|
bc083089bb | ||
|
|
ec66ffb1eb | ||
|
|
3a6b228f6f | ||
|
|
bc365e2d01 | ||
|
|
e9b9dd3072 | ||
|
|
7b065ae0a0 | ||
|
|
c10d556f17 | ||
|
|
1ea0ff611a | ||
|
|
92e35978dc | ||
|
|
8d06c64495 | ||
|
|
fe8cd7188c | ||
|
|
c721fe0b37 | ||
|
|
ba1f89b9d8 | ||
|
|
d94eba7179 | ||
|
|
f2e8b90628 | ||
|
|
2f0b5fc20a | ||
|
|
53817d3cbe | ||
|
|
c23f7f5bdc | ||
|
|
4277149a3f | ||
|
|
3ba00c7a72 | ||
|
|
0d2bc750e8 | ||
|
|
97a4122ceb | ||
|
|
8b165b333e | ||
|
|
ea71fef2bd | ||
|
|
9409dc0085 | ||
|
|
dae3f1a164 | ||
|
|
0213fb5412 | ||
|
|
a4f263bc92 | ||
|
|
231cccbb19 | ||
|
|
791d24bcb6 | ||
|
|
729723205f | ||
|
|
2474f60e00 | ||
|
|
dbb5715fea | ||
|
|
86ebce6d3d | ||
|
|
e84a629ada | ||
|
|
1907859827 | ||
|
|
cf4a68c9b3 | ||
|
|
dea2ce6fde | ||
|
|
f85b2b6352 | ||
|
|
e88e81928c | ||
|
|
d89719c740 | ||
|
|
eef80c8875 | ||
|
|
34bd0588b4 | ||
|
|
bf8d70e657 | ||
|
|
b349774f92 | ||
|
|
452848ff27 | ||
|
|
5d345c22b4 | ||
|
|
33502daf45 | ||
|
|
07a4683f08 | ||
|
|
e207b424b1 | ||
|
|
50279478c8 | ||
|
|
7b6fa1815a | ||
|
|
b57966677e | ||
|
|
dc79d76444 | ||
|
|
3c1de8d683 | ||
|
|
b077f186d1 | ||
|
|
63075976c2 | ||
|
|
5f7db084d3 | ||
|
|
1e30e33d30 | ||
|
|
874b497794 | ||
|
|
8374533c5f | ||
|
|
034c665c83 | ||
|
|
d7521efc91 | ||
|
|
4db6eaf1aa | ||
|
|
fd6ce66906 | ||
|
|
f11b73da12 | ||
|
|
cfa5d1b11d | ||
|
|
706f3f97ea | ||
|
|
34fdf11217 | ||
|
|
3dec379052 | ||
|
|
56fb5479e2 | ||
|
|
0a64f3274e | ||
|
|
57fa2c03f7 | ||
|
|
3ef8008f91 | ||
|
|
70f69b6c8b | ||
|
|
c50c0d3f1e | ||
|
|
6740785054 | ||
|
|
777d37c4b8 | ||
|
|
41321d8ad5 | ||
|
|
e9b7f9ac40 | ||
|
|
f3b6291918 | ||
|
|
3c6e7b0983 | ||
|
|
09479be4ba | ||
|
|
18a51d1dd0 | ||
|
|
7882cb9008 | ||
|
|
05c7d3a60e | ||
|
|
c09050d1f2 | ||
|
|
dd9c6270ba | ||
|
|
18d9bfa06e | ||
|
|
ef82c1ce01 | ||
|
|
6a7aa77033 | ||
|
|
f683c7159d | ||
|
|
cd33db037d | ||
|
|
8609a637f9 | ||
|
|
99ffd3898c | ||
|
|
c36821d6e1 | ||
|
|
b3604ed94a | ||
|
|
0e30c6639a | ||
|
|
f6037f9df5 | ||
|
|
0aca4d658a | ||
|
|
ca14cbefaf | ||
|
|
041e22f609 | ||
|
|
a21dcec85d | ||
|
|
55a9a4ca46 | ||
|
|
e32471adbd | ||
|
|
77f2729a76 | ||
|
|
5fb1b604bf | ||
|
|
29753c6b9b | ||
|
|
1aacd94cb5 | ||
|
|
7bfc1702ab | ||
|
|
d68c118fd4 | ||
|
|
27543aed1d | ||
|
|
edd3554c36 | ||
|
|
2025426a1b | ||
|
|
77ab35e7a8 | ||
|
|
9dc84d69d5 | ||
|
|
fa7867e8a7 | ||
|
|
e6ed7f95f1 | ||
|
|
28de50f14e | ||
|
|
7ec56a56e5 | ||
|
|
b0b295ba1e | ||
|
|
0cc6bf9b60 | ||
|
|
14b767abc3 | ||
|
|
f06c6d349c | ||
|
|
d6621470b3 | ||
|
|
68ab6e4853 | ||
|
|
3980864151 | ||
|
|
0c8f4fc9e7 | ||
|
|
4f428af515 | ||
|
|
5d20acaa92 | ||
|
|
05e5cd6bf1 | ||
|
|
53251434f2 | ||
|
|
9704c57d6b | ||
|
|
9b7ef9ba21 | ||
|
|
e7f1d1d0c1 | ||
|
|
c9259ac45b | ||
|
|
7e46499e18 | ||
|
|
c1b2428b5d | ||
|
|
c073583663 | ||
|
|
66fc92a97f | ||
|
|
10c8480247 | ||
|
|
cfcaa4271c | ||
|
|
872513617e | ||
|
|
9237ca4809 | ||
|
|
719c9e824b | ||
|
|
9d01334a86 | ||
|
|
4c62bf268d | ||
|
|
3e79fa2dcb | ||
|
|
6715b88633 | ||
|
|
556f8aff17 | ||
|
|
c8322ee2f1 | ||
|
|
d2017feb55 | ||
|
|
aa934b1dda | ||
|
|
dd510ab90c | ||
|
|
3353b3f3b7 | ||
|
|
e22612fc3e | ||
|
|
baca30ef83 | ||
|
|
2622a1e764 | ||
|
|
3def23883e | ||
|
|
e77d297a28 | ||
|
|
8183179850 | ||
|
|
52777681d1 | ||
|
|
71a3a816e2 | ||
|
|
d389362ca3 | ||
|
|
87890234f8 | ||
|
|
fb91c8fba1 | ||
|
|
3f44c5d18b | ||
|
|
cf269fb337 | ||
|
|
fd99b366c2 | ||
|
|
001f66980f | ||
|
|
2f6855262e | ||
|
|
954be5dd32 | ||
|
|
6d71997b51 | ||
|
|
021e98b14a | ||
|
|
c92fffaecc | ||
|
|
595cf192b7 | ||
|
|
0ee7d22e9d | ||
|
|
dc1b312672 | ||
|
|
9820c8cd81 | ||
|
|
d346dbb8ba | ||
|
|
81eb3be8d4 | ||
|
|
d01cd5517d | ||
|
|
faf295d7f2 | ||
|
|
9bb7d918eb | ||
|
|
efed55d0c0 | ||
|
|
5f916e6237 | ||
|
|
d3143d9b1d | ||
|
|
207eaaf9b4 | ||
|
|
f16626c808 | ||
|
|
6533a255a7 | ||
|
|
f8ff9ffe62 | ||
|
|
7cc3ac1a11 | ||
|
|
9ed1f3bc0f | ||
|
|
4b81df2ab4 | ||
|
|
8d7bd5fb0f | ||
|
|
62a1eddd1a | ||
|
|
6e88e094ee | ||
|
|
b63313a08b | ||
|
|
b431067aa8 | ||
|
|
39522a35c6 | ||
|
|
718b3bab4a | ||
|
|
7d01cc45bc | ||
|
|
f6a71e770d | ||
|
|
c0e9ffd65a | ||
|
|
c511597c0f | ||
|
|
0e75193e3d | ||
|
|
a307421b82 | ||
|
|
65ddc8c0bd | ||
|
|
7961bcb2cb | ||
|
|
ef4feae9bf | ||
|
|
df3a8d8718 | ||
|
|
4eca133080 | ||
|
|
b7855a3c74 | ||
|
|
397fab793d | ||
|
|
b0c511a7a2 | ||
|
|
d0d9266dd1 | ||
|
|
7d97ad021f | ||
|
|
4e97492ed6 | ||
|
|
45a6d03dcd | ||
|
|
b0fe3d7f85 | ||
|
|
b8239c1b84 | ||
|
|
994175f386 | ||
|
|
8772ca2e3a | ||
|
|
3c43ed6d2d | ||
|
|
667121d325 | ||
|
|
ed02f28cbe | ||
|
|
4ca6c1c6c5 | ||
|
|
35a5c4153c | ||
|
|
11e7387055 | ||
|
|
0f51c56980 | ||
|
|
3e6cbd5114 | ||
|
|
486d6688e1 | ||
|
|
a08ca15f13 | ||
|
|
13a94dd7ac | ||
|
|
8198f045f9 | ||
|
|
6a717a8f3c | ||
|
|
212e924f9b | ||
|
|
45740b5ed0 | ||
|
|
6b865da025 | ||
|
|
c1913b0d44 | ||
|
|
e65f11c95e | ||
|
|
09751efe95 | ||
|
|
f05827df69 | ||
|
|
70ec4b915c | ||
|
|
5964ff6c93 | ||
|
|
0185269d97 | ||
|
|
81c16f4fa7 | ||
|
|
122f282e5d | ||
|
|
9b319cf2bf | ||
|
|
cb8441ed3d | ||
|
|
08cb497689 | ||
|
|
68e309ee32 | ||
|
|
84b32a91f0 | ||
|
|
bf7663a0a1 | ||
|
|
86fcfdf69a | ||
|
|
58d64045a1 | ||
|
|
0e12c7deb4 | ||
|
|
78d6e21256 | ||
|
|
226ffafbd6 | ||
|
|
1b5fa9c799 | ||
|
|
ad2f042f97 | ||
|
|
4b41bbbf34 | ||
|
|
fb64df21c5 | ||
|
|
adb6661015 | ||
|
|
17f9b57028 | ||
|
|
e96080a512 | ||
|
|
9295496949 | ||
|
|
62fc62a3c5 | ||
|
|
267f0a7bbd | ||
|
|
6d9996cd0e | ||
|
|
0a0c635de8 | ||
|
|
869b649b66 | ||
|
|
b41e70f9aa | ||
|
|
a98ae2a87d | ||
|
|
f422677145 | ||
|
|
bc8e19b51d | ||
|
|
64631fca56 | ||
|
|
49c84e6ca9 | ||
|
|
c0f587a2bf | ||
|
|
28147b71c5 | ||
|
|
be93178e65 | ||
|
|
147e50730d | ||
|
|
f88b3806f2 | ||
|
|
24a8653fab | ||
|
|
70f9d6f015 | ||
|
|
46cd62f3f0 | ||
|
|
6f6f601ca8 | ||
|
|
9ea6696bf9 | ||
|
|
a9013c43fa | ||
|
|
48dc1dfa17 | ||
|
|
a58d571858 | ||
|
|
b329d9cbf4 | ||
|
|
5c0c788d10 | ||
|
|
772e9f0d6b | ||
|
|
f0cd6b2457 | ||
|
|
9cd5566869 | ||
|
|
747231d350 | ||
|
|
b8f5dda6da | ||
|
|
21de95862e | ||
|
|
84116349ab | ||
|
|
18e63ae6da | ||
|
|
bb78a0b4ec | ||
|
|
7bc7c9c4bd | ||
|
|
804bd840f1 | ||
|
|
e4a52703f9 | ||
|
|
5975082954 | ||
|
|
c1409f3dc7 | ||
|
|
51f4a40cd4 | ||
|
|
fd3475a813 | ||
|
|
412015b7bd | ||
|
|
58ce89fc0b | ||
|
|
2394c0cb3d |
@@ -1,3 +1,3 @@
|
|||||||
awx/ui/node_modules
|
awx/ui/node_modules
|
||||||
awx/ui_next/node_modules
|
|
||||||
Dockerfile
|
Dockerfile
|
||||||
|
.git
|
||||||
|
|||||||
1
.github/CODEOWNERS
vendored
Normal file
1
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
workflows/e2e_test.yml @tiagodread @shanemcd @jakemcdermott
|
||||||
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -16,7 +16,7 @@ https://www.ansible.com/security
|
|||||||
<!-- Pick the area of AWX for this issue, you can have multiple, delete the rest: -->
|
<!-- Pick the area of AWX for this issue, you can have multiple, delete the rest: -->
|
||||||
- API
|
- API
|
||||||
- UI
|
- UI
|
||||||
- Installer
|
- Collection
|
||||||
|
|
||||||
##### SUMMARY
|
##### SUMMARY
|
||||||
<!-- Briefly describe the problem. -->
|
<!-- Briefly describe the problem. -->
|
||||||
|
|||||||
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,41 +0,0 @@
|
|||||||
---
|
|
||||||
name: "\U0001F41B Bug report"
|
|
||||||
about: Create a report to help us improve
|
|
||||||
|
|
||||||
---
|
|
||||||
<!-- Issues are for **concrete, actionable bugs and feature requests** only - if you're just asking for debugging help or technical support, please use:
|
|
||||||
|
|
||||||
- http://web.libera.chat/?channels=#ansible-awx
|
|
||||||
- https://groups.google.com/forum/#!forum/awx-project
|
|
||||||
|
|
||||||
We have to limit this because of limited volunteer time to respond to issues! -->
|
|
||||||
|
|
||||||
##### ISSUE TYPE
|
|
||||||
- Bug Report
|
|
||||||
|
|
||||||
##### SUMMARY
|
|
||||||
<!-- Briefly describe the problem. -->
|
|
||||||
|
|
||||||
##### ENVIRONMENT
|
|
||||||
* AWX version: X.Y.Z
|
|
||||||
* AWX install method: openshift, minishift, docker on linux, docker for mac, boot2docker
|
|
||||||
* Ansible version: X.Y.Z
|
|
||||||
* Operating System:
|
|
||||||
* Web Browser:
|
|
||||||
|
|
||||||
##### STEPS TO REPRODUCE
|
|
||||||
|
|
||||||
<!-- Please describe exactly how to reproduce the problem. -->
|
|
||||||
|
|
||||||
##### EXPECTED RESULTS
|
|
||||||
|
|
||||||
<!-- What did you expect to happen when running the steps above? -->
|
|
||||||
|
|
||||||
##### ACTUAL RESULTS
|
|
||||||
|
|
||||||
<!-- What actually happened? -->
|
|
||||||
|
|
||||||
##### ADDITIONAL INFORMATION
|
|
||||||
|
|
||||||
<!-- Include any links to sosreport, database dumps, screenshots or other
|
|
||||||
information. -->
|
|
||||||
142
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
142
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
---
|
||||||
|
name: Bug Report
|
||||||
|
description: Create a report to help us improve
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
Issues are for **concrete, actionable bugs and feature requests** only. For debugging help or technical support, please use:
|
||||||
|
- The #ansible-awx channel on irc.libera.chat
|
||||||
|
- The awx project mailing list, https://groups.google.com/forum/#!forum/awx-project
|
||||||
|
|
||||||
|
- type: checkboxes
|
||||||
|
id: terms
|
||||||
|
attributes:
|
||||||
|
label: Please confirm the following
|
||||||
|
options:
|
||||||
|
- label: I agree to follow this project's [code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
|
||||||
|
required: true
|
||||||
|
- label: I have checked the [current issues](https://github.com/ansible/awx/issues) for duplicates.
|
||||||
|
required: true
|
||||||
|
- label: I understand that AWX is open source software provided for free and that I might not receive a timely response.
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: summary
|
||||||
|
attributes:
|
||||||
|
label: Summary
|
||||||
|
description: Briefly describe the problem.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
id: awx-version
|
||||||
|
attributes:
|
||||||
|
label: AWX version
|
||||||
|
description: What version of AWX are you running?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: checkboxes
|
||||||
|
id: components
|
||||||
|
attributes:
|
||||||
|
label: Select the relevant components
|
||||||
|
options:
|
||||||
|
- label: UI
|
||||||
|
- label: API
|
||||||
|
- label: Docs
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
id: awx-install-method
|
||||||
|
attributes:
|
||||||
|
label: Installation method
|
||||||
|
description: How did you install AWX?
|
||||||
|
multiple: false
|
||||||
|
options:
|
||||||
|
- kubernetes
|
||||||
|
- minikube
|
||||||
|
- openshift
|
||||||
|
- minishift
|
||||||
|
- docker on linux
|
||||||
|
- docker for mac
|
||||||
|
- boot2docker
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
id: modified-architecture
|
||||||
|
attributes:
|
||||||
|
label: Modifications
|
||||||
|
description: >-
|
||||||
|
Have you modified the installation, deployment topology, or container images in any way? If yes, please
|
||||||
|
explain in the "additional information" field at the bottom of the form.
|
||||||
|
multiple: false
|
||||||
|
options:
|
||||||
|
- "no"
|
||||||
|
- "yes"
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
id: ansible-version
|
||||||
|
attributes:
|
||||||
|
label: Ansible version
|
||||||
|
description: What version of Ansible are you running?
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
id: operating-system
|
||||||
|
attributes:
|
||||||
|
label: Operating system
|
||||||
|
description: What operating system are you using?
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
id: browsers
|
||||||
|
attributes:
|
||||||
|
label: Web browser
|
||||||
|
description: Which browsers are affected?
|
||||||
|
multiple: true
|
||||||
|
options:
|
||||||
|
- Firefox
|
||||||
|
- Chrome
|
||||||
|
- Safari
|
||||||
|
- Edge
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: steps-to-reproduce
|
||||||
|
attributes:
|
||||||
|
label: Steps to reproduce
|
||||||
|
description: >-
|
||||||
|
Starting from a new installation of the system, describe exactly how a developer or quality engineer can reproduce the bug
|
||||||
|
on infrastructure that isn't yours. Include any and all resources created, input values, test users, roles assigned, playbooks used, etc.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: expected-results
|
||||||
|
attributes:
|
||||||
|
label: Expected results
|
||||||
|
description: What did you expect to happpen when running the steps above?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: actual-results
|
||||||
|
attributes:
|
||||||
|
label: Actual results
|
||||||
|
description: What actually happened?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: additional-information
|
||||||
|
attributes:
|
||||||
|
label: Additional information
|
||||||
|
description: Include any relevant log output, links to sosreport, database dumps, screenshots, or other information.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,3 +1,11 @@
|
|||||||
|
<!--- changelog-entry
|
||||||
|
# Fill in 'msg' below to have an entry automatically added to the next release changelog.
|
||||||
|
# Leaving 'msg' blank will not generate a changelog entry for this PR.
|
||||||
|
# Please ensure this is a simple (and readable) one-line string.
|
||||||
|
---
|
||||||
|
msg: ""
|
||||||
|
-->
|
||||||
|
|
||||||
##### SUMMARY
|
##### SUMMARY
|
||||||
<!--- Describe the change, including rationale and design decisions -->
|
<!--- Describe the change, including rationale and design decisions -->
|
||||||
|
|
||||||
@@ -17,6 +25,7 @@ the change does.
|
|||||||
<!--- Name of the module/plugin/module/task -->
|
<!--- Name of the module/plugin/module/task -->
|
||||||
- API
|
- API
|
||||||
- UI
|
- UI
|
||||||
|
- Collection
|
||||||
|
|
||||||
##### AWX VERSION
|
##### AWX VERSION
|
||||||
<!--- Paste verbatim output from `make VERSION` between quotes below -->
|
<!--- Paste verbatim output from `make VERSION` between quotes below -->
|
||||||
|
|||||||
12
.github/issue_labeler.yml
vendored
Normal file
12
.github/issue_labeler.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
needs_triage:
|
||||||
|
- '.*'
|
||||||
|
"type:bug":
|
||||||
|
- "Please confirm the following"
|
||||||
|
"type:enhancement":
|
||||||
|
- "Feature Idea"
|
||||||
|
"component:ui":
|
||||||
|
- "\\[X\\] UI"
|
||||||
|
"component:api":
|
||||||
|
- "\\[X\\] API"
|
||||||
|
"component:docs":
|
||||||
|
- "\\[X\\] Docs"
|
||||||
14
.github/pr_labeler.yml
vendored
Normal file
14
.github/pr_labeler.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
"component:api":
|
||||||
|
- any: ['awx/**/*', '!awx/ui/*']
|
||||||
|
|
||||||
|
"component:ui":
|
||||||
|
- any: ['awx/ui/**/*']
|
||||||
|
|
||||||
|
"component:docs":
|
||||||
|
- any: ['docs/**/*']
|
||||||
|
|
||||||
|
"component:cli":
|
||||||
|
- any: ['awxkit/**/*']
|
||||||
|
|
||||||
|
"component:collection":
|
||||||
|
- any: ['awx_collection/**/*']
|
||||||
104
.github/workflows/ci.yml
vendored
Normal file
104
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
---
|
||||||
|
name: CI
|
||||||
|
env:
|
||||||
|
BRANCH: ${{ github.base_ref || 'devel' }}
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
jobs:
|
||||||
|
common_tests:
|
||||||
|
name: ${{ matrix.tests.name }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
tests:
|
||||||
|
- name: api-test
|
||||||
|
command: /start_tests.sh
|
||||||
|
label: Run API Tests
|
||||||
|
- name: api-lint
|
||||||
|
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
||||||
|
label: Run API Linters
|
||||||
|
- name: api-swagger
|
||||||
|
command: /start_tests.sh swagger
|
||||||
|
label: Generate API Reference
|
||||||
|
- name: awx-collection
|
||||||
|
command: /start_tests.sh test_collection_all
|
||||||
|
label: Run Collection Tests
|
||||||
|
- name: api-schema
|
||||||
|
label: Check API Schema
|
||||||
|
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
|
||||||
|
- name: ui-lint
|
||||||
|
label: Run UI Linters
|
||||||
|
command: make ui-lint
|
||||||
|
- name: ui-test
|
||||||
|
label: Run UI Tests
|
||||||
|
command: make ui-test
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Get python version from Makefile
|
||||||
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install python ${{ env.py_version }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.py_version }}
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} || :
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
||||||
|
|
||||||
|
- name: ${{ matrix.texts.label }}
|
||||||
|
run: |
|
||||||
|
docker run -u $(id -u) --rm -v ${{ github.workspace}}:/awx_devel/:Z \
|
||||||
|
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} ${{ matrix.tests.command }}
|
||||||
|
|
||||||
|
awx-operator:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout awx
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
path: awx
|
||||||
|
|
||||||
|
- name: Checkout awx-operator
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
repository: ansible/awx-operator
|
||||||
|
path: awx-operator
|
||||||
|
|
||||||
|
- name: Install playbook dependencies
|
||||||
|
run: |
|
||||||
|
python3 -m pip install docker
|
||||||
|
|
||||||
|
- name: Build AWX image
|
||||||
|
working-directory: awx
|
||||||
|
run: |
|
||||||
|
ansible-playbook -v tools/ansible/build.yml \
|
||||||
|
-e headless=yes \
|
||||||
|
-e awx_image=awx \
|
||||||
|
-e awx_image_tag=ci \
|
||||||
|
-e ansible_python_interpreter=$(which python3)
|
||||||
|
|
||||||
|
- name: Run test deployment with awx-operator
|
||||||
|
working-directory: awx-operator
|
||||||
|
run: |
|
||||||
|
python3 -m pip install -r molecule/requirements.txt
|
||||||
|
ansible-galaxy collection install -r molecule/requirements.yml
|
||||||
|
sudo rm -f $(which kustomize)
|
||||||
|
make kustomize
|
||||||
|
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind
|
||||||
|
env:
|
||||||
|
AWX_TEST_IMAGE: awx
|
||||||
|
AWX_TEST_VERSION: ci
|
||||||
38
.github/workflows/devel_image.yml
vendored
Normal file
38
.github/workflows/devel_image.yml
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
---
|
||||||
|
name: Push Development Image
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- devel
|
||||||
|
jobs:
|
||||||
|
push:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Get python version from Makefile
|
||||||
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install python ${{ env.py_version }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.py_version }}
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/}
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
||||||
|
|
||||||
|
- name: Push image
|
||||||
|
run: |
|
||||||
|
docker push ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/}
|
||||||
108
.github/workflows/e2e_test.yml
vendored
Normal file
108
.github/workflows/e2e_test.yml
vendored
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
---
|
||||||
|
name: E2E Tests
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types: [labeled]
|
||||||
|
jobs:
|
||||||
|
e2e-test:
|
||||||
|
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 40
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
job: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Get python version from Makefile
|
||||||
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install python ${{ env.py_version }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.py_version }}
|
||||||
|
|
||||||
|
- name: Install system deps
|
||||||
|
run: sudo apt-get install -y gettext
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ github.base_ref }}
|
||||||
|
|
||||||
|
- name: Build UI
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ github.base_ref }} make ui-devel
|
||||||
|
|
||||||
|
- name: Start AWX
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ github.base_ref }} make docker-compose &> make-docker-compose-output.log &
|
||||||
|
|
||||||
|
- name: Pull awx_cypress_base image
|
||||||
|
run: |
|
||||||
|
docker pull quay.io/awx/awx_cypress_base:latest
|
||||||
|
|
||||||
|
- name: Checkout test project
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
repository: ${{ github.repository_owner }}/tower-qa
|
||||||
|
ssh-key: ${{ secrets.QA_REPO_KEY }}
|
||||||
|
path: tower-qa
|
||||||
|
ref: devel
|
||||||
|
|
||||||
|
- name: Build cypress
|
||||||
|
run: |
|
||||||
|
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
||||||
|
docker build -t awx-pf-tests .
|
||||||
|
|
||||||
|
- name: Update default AWX password
|
||||||
|
run: |
|
||||||
|
while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' -k https://localhost:8043/api/v2/ping/)" != "200" ]]
|
||||||
|
do
|
||||||
|
echo "Waiting for AWX..."
|
||||||
|
sleep 5;
|
||||||
|
done
|
||||||
|
echo "AWX is up, updating the password..."
|
||||||
|
docker exec -i tools_awx_1 sh <<-EOSH
|
||||||
|
awx-manage update_password --username=admin --password=password
|
||||||
|
EOSH
|
||||||
|
|
||||||
|
- name: Run E2E tests
|
||||||
|
env:
|
||||||
|
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||||
|
run: |
|
||||||
|
export COMMIT_INFO_BRANCH=$GITHUB_HEAD_REF
|
||||||
|
export COMMIT_INFO_AUTHOR=$GITHUB_ACTOR
|
||||||
|
export COMMIT_INFO_SHA=$GITHUB_SHA
|
||||||
|
export COMMIT_INFO_REMOTE=$GITHUB_REPOSITORY_OWNER
|
||||||
|
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
||||||
|
AWX_IP=$(docker inspect -f '{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' tools_awx_1)
|
||||||
|
printenv > .env
|
||||||
|
echo "Executing tests:"
|
||||||
|
docker run \
|
||||||
|
--network '_sources_default' \
|
||||||
|
--ipc=host \
|
||||||
|
--env-file=.env \
|
||||||
|
-e CYPRESS_baseUrl="https://$AWX_IP:8043" \
|
||||||
|
-e CYPRESS_AWX_E2E_USERNAME=admin \
|
||||||
|
-e CYPRESS_AWX_E2E_PASSWORD='password' \
|
||||||
|
-e COMMAND="npm run cypress-concurrently-gha" \
|
||||||
|
-v /dev/shm:/dev/shm \
|
||||||
|
-v $PWD:/e2e \
|
||||||
|
-w /e2e \
|
||||||
|
awx-pf-tests run --project .
|
||||||
|
|
||||||
|
- name: Save AWX logs
|
||||||
|
uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: AWX-logs-${{ matrix.job }}
|
||||||
|
path: make-docker-compose-output.log
|
||||||
|
|
||||||
|
|
||||||
22
.github/workflows/label_issue.yml
vendored
Normal file
22
.github/workflows/label_issue.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
name: Label Issue
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- reopened
|
||||||
|
- edited
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
triage:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Label Issue
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Label Issue
|
||||||
|
uses: github/issue-labeler@v2.4.1
|
||||||
|
with:
|
||||||
|
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
not-before: 2021-12-07T07:00:00Z
|
||||||
|
configuration-path: .github/issue_labeler.yml
|
||||||
|
enable-versioned-regex: 0
|
||||||
20
.github/workflows/label_pr.yml
vendored
Normal file
20
.github/workflows/label_pr.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
name: Label PR
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- reopened
|
||||||
|
- synchronize
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
triage:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Label PR
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Label PR
|
||||||
|
uses: actions/labeler@v3
|
||||||
|
with:
|
||||||
|
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
configuration-path: .github/pr_labeler.yml
|
||||||
26
.github/workflows/promote.yml
vendored
Normal file
26
.github/workflows/promote.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
---
|
||||||
|
name: Promote Release
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [published]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
promote:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Log in to GHCR
|
||||||
|
run: |
|
||||||
|
echo ${{ secrets.GITHUB_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Log in to Quay
|
||||||
|
run: |
|
||||||
|
echo ${{ secrets.QUAY_TOKEN }} | docker login quay.io -u ${{ secrets.QUAY_USER }} --password-stdin
|
||||||
|
|
||||||
|
- name: Re-tag and promote awx image
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||||
|
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||||
|
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
|
||||||
|
docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||||
|
docker push quay.io/${{ github.repository }}:latest
|
||||||
|
|
||||||
131
.github/workflows/stage.yml
vendored
Normal file
131
.github/workflows/stage.yml
vendored
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
---
|
||||||
|
name: Stage Release
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'AWX version.'
|
||||||
|
required: true
|
||||||
|
default: ''
|
||||||
|
operator_version:
|
||||||
|
description: 'Operator version. Leave blank to skip staging awx-operator.'
|
||||||
|
default: ''
|
||||||
|
confirm:
|
||||||
|
description: 'Are you sure? Set this to yes.'
|
||||||
|
required: true
|
||||||
|
default: 'no'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
stage:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Verify inputs
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [[ ${{ github.event.inputs.confirm }} != "yes" ]]; then
|
||||||
|
>&2 echo "Confirm must be 'yes'"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${{ github.event.inputs.version }} == "" ]]; then
|
||||||
|
>&2 echo "Set version to continue."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
|
||||||
|
- name: Checkout awx
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
path: awx
|
||||||
|
|
||||||
|
- name: Get python version from Makefile
|
||||||
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install python ${{ env.py_version }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.py_version }}
|
||||||
|
|
||||||
|
- name: Checkout awx-logos
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
repository: ansible/awx-logos
|
||||||
|
path: awx-logos
|
||||||
|
|
||||||
|
- name: Checkout awx-operator
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
repository: ${{ github.repository_owner }}/awx-operator
|
||||||
|
path: awx-operator
|
||||||
|
|
||||||
|
- name: Install playbook dependencies
|
||||||
|
run: |
|
||||||
|
python3 -m pip install docker
|
||||||
|
|
||||||
|
- name: Build and stage AWX
|
||||||
|
working-directory: awx
|
||||||
|
run: |
|
||||||
|
ansible-playbook -v tools/ansible/build.yml \
|
||||||
|
-e registry=ghcr.io \
|
||||||
|
-e registry_username=${{ github.actor }} \
|
||||||
|
-e registry_password=${{ secrets.GITHUB_TOKEN }} \
|
||||||
|
-e awx_image=${{ github.repository }} \
|
||||||
|
-e awx_version=${{ github.event.inputs.version }} \
|
||||||
|
-e ansible_python_interpreter=$(which python3) \
|
||||||
|
-e push=yes \
|
||||||
|
-e awx_official=yes
|
||||||
|
|
||||||
|
- name: Build and stage awx-operator
|
||||||
|
working-directory: awx-operator
|
||||||
|
run: |
|
||||||
|
BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version }}" \
|
||||||
|
IMAGE_TAG_BASE=ghcr.io/${{ github.repository_owner }}/awx-operator \
|
||||||
|
VERSION=${{ github.event.inputs.operator_version }} make docker-build docker-push
|
||||||
|
|
||||||
|
- name: Run test deployment with awx-operator
|
||||||
|
working-directory: awx-operator
|
||||||
|
run: |
|
||||||
|
python3 -m pip install -r molecule/requirements.txt
|
||||||
|
ansible-galaxy collection install -r molecule/requirements.yml
|
||||||
|
sudo rm -f $(which kustomize)
|
||||||
|
make kustomize
|
||||||
|
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule test -s kind
|
||||||
|
env:
|
||||||
|
AWX_TEST_IMAGE: ${{ github.repository }}
|
||||||
|
AWX_TEST_VERSION: ${{ github.event.inputs.version }}
|
||||||
|
|
||||||
|
- name: Generate changelog
|
||||||
|
uses: shanemcd/simple-changelog-generator@v1
|
||||||
|
id: changelog
|
||||||
|
with:
|
||||||
|
repo: "${{ github.repository }}"
|
||||||
|
|
||||||
|
- name: Write changelog to file
|
||||||
|
run: |
|
||||||
|
cat << 'EOF' > /tmp/awx-changelog
|
||||||
|
${{ steps.changelog.outputs.changelog }}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Create draft release for AWX
|
||||||
|
working-directory: awx
|
||||||
|
run: |
|
||||||
|
ansible-playbook -v tools/ansible/stage.yml \
|
||||||
|
-e changelog_path=/tmp/awx-changelog \
|
||||||
|
-e repo=${{ github.repository }} \
|
||||||
|
-e awx_image=ghcr.io/${{ github.repository }} \
|
||||||
|
-e version=${{ github.event.inputs.version }} \
|
||||||
|
-e github_token=${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Create draft release for awx-operator
|
||||||
|
if: ${{ github.event.inputs.operator_version != '' }}
|
||||||
|
working-directory: awx
|
||||||
|
run: |
|
||||||
|
ansible-playbook tools/ansible/stage.yml \
|
||||||
|
-e version=${{ github.event.inputs.operator_version }} \
|
||||||
|
-e repo=${{ github.repository_owner }}/awx-operator \
|
||||||
|
-e github_token=${{ secrets.AWX_OPERATOR_RELEASE_TOKEN }}
|
||||||
52
.github/workflows/upload_schema.yml
vendored
Normal file
52
.github/workflows/upload_schema.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
---
|
||||||
|
name: Upload API Schema
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- devel
|
||||||
|
- release_4.1
|
||||||
|
jobs:
|
||||||
|
push:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Get python version from Makefile
|
||||||
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install python ${{ env.py_version }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.py_version }}
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
||||||
|
|
||||||
|
- name: Generate API Schema
|
||||||
|
run: |
|
||||||
|
docker run -u $(id -u) --rm -v ${{ github.workspace }}:/awx_devel/:Z \
|
||||||
|
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} /start_tests.sh genschema
|
||||||
|
|
||||||
|
- name: Upload API Schema
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
|
||||||
|
AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
|
||||||
|
AWS_REGION: 'us-east-1'
|
||||||
|
run: |
|
||||||
|
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||||
|
ansible localhost -c local -m aws_s3 \
|
||||||
|
-a "src=${{ github.workspace }}/schema.json bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=put permission=public-read"
|
||||||
|
|
||||||
|
|
||||||
19
.gitignore
vendored
19
.gitignore
vendored
@@ -28,12 +28,12 @@ awx/ui/build_test
|
|||||||
awx/ui/client/languages
|
awx/ui/client/languages
|
||||||
awx/ui/templates/ui/index.html
|
awx/ui/templates/ui/index.html
|
||||||
awx/ui/templates/ui/installing.html
|
awx/ui/templates/ui/installing.html
|
||||||
awx/ui_next/node_modules/
|
awx/ui/node_modules/
|
||||||
awx/ui_next/src/locales/*/messages.js
|
awx/ui/src/locales/*/messages.js
|
||||||
awx/ui_next/coverage/
|
awx/ui/coverage/
|
||||||
awx/ui_next/build
|
awx/ui/build
|
||||||
awx/ui_next/.env.local
|
awx/ui/.env.local
|
||||||
awx/ui_next/instrumented
|
awx/ui/instrumented
|
||||||
rsyslog.pid
|
rsyslog.pid
|
||||||
tools/prometheus/data
|
tools/prometheus/data
|
||||||
tools/docker-compose/ansible/awx_dump.sql
|
tools/docker-compose/ansible/awx_dump.sql
|
||||||
@@ -41,6 +41,8 @@ tools/docker-compose/Dockerfile
|
|||||||
tools/docker-compose/_build
|
tools/docker-compose/_build
|
||||||
tools/docker-compose/_sources
|
tools/docker-compose/_sources
|
||||||
tools/docker-compose/overrides/
|
tools/docker-compose/overrides/
|
||||||
|
tools/docker-compose-minikube/_sources
|
||||||
|
tools/docker-compose/keycloak.awx.realm.json
|
||||||
|
|
||||||
# Tower setup playbook testing
|
# Tower setup playbook testing
|
||||||
setup/test/roles/postgresql
|
setup/test/roles/postgresql
|
||||||
@@ -57,19 +59,18 @@ __pycache__
|
|||||||
/dist
|
/dist
|
||||||
/*.egg-info
|
/*.egg-info
|
||||||
*.py[c,o]
|
*.py[c,o]
|
||||||
|
/.eggs
|
||||||
|
|
||||||
# JavaScript
|
# JavaScript
|
||||||
/Gruntfile.js
|
/Gruntfile.js
|
||||||
/Brocfile.js
|
/Brocfile.js
|
||||||
/bower.json
|
/bower.json
|
||||||
/package.json
|
|
||||||
/testem.yml
|
/testem.yml
|
||||||
**/coverage
|
**/coverage
|
||||||
/.istanbul.yml
|
/.istanbul.yml
|
||||||
**/node_modules/**
|
**/node_modules/**
|
||||||
/tmp
|
/tmp
|
||||||
**/npm-debug.log*
|
**/npm-debug.log*
|
||||||
**/package-lock.json
|
|
||||||
|
|
||||||
# UI build flag files
|
# UI build flag files
|
||||||
awx/ui/.deps_built
|
awx/ui/.deps_built
|
||||||
@@ -153,7 +154,7 @@ use_dev_supervisor.txt
|
|||||||
.idea/*
|
.idea/*
|
||||||
*.unison.tmp
|
*.unison.tmp
|
||||||
*.#
|
*.#
|
||||||
/awx/ui_next/.ui-built
|
/awx/ui/.ui-built
|
||||||
/Dockerfile
|
/Dockerfile
|
||||||
/_build/
|
/_build/
|
||||||
/_build_kube_dev/
|
/_build_kube_dev/
|
||||||
|
|||||||
@@ -1,12 +1,16 @@
|
|||||||
---
|
---
|
||||||
ignore: |
|
ignore: |
|
||||||
|
.github
|
||||||
.tox
|
.tox
|
||||||
awx/main/tests/data/inventory/plugins/**
|
awx/main/tests/data/inventory/plugins/**
|
||||||
# vault files
|
# vault files
|
||||||
awx/main/tests/data/ansible_utils/playbooks/valid/vault.yml
|
awx/main/tests/data/ansible_utils/playbooks/valid/vault.yml
|
||||||
awx/ui/test/e2e/tests/smoke-vars.yml
|
awx/ui/test/e2e/tests/smoke-vars.yml
|
||||||
|
awx/ui/node_modules
|
||||||
|
tools/docker-compose/_sources
|
||||||
|
|
||||||
extends: default
|
extends: default
|
||||||
|
|
||||||
rules:
|
rules:
|
||||||
line-length: disable
|
line-length: disable
|
||||||
|
truthy: disable
|
||||||
|
|||||||
500
CHANGELOG.md
500
CHANGELOG.md
@@ -1,501 +1,7 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
# 19.2.2 (June 28, 2021)
|
**Note:** This file is deprecated and will be removed at some point in a future release.
|
||||||
|
|
||||||
- Fixed bug where symlinks pointing to directories were not preserved (https://github.com/ansible/ansible-runner/pull/736)
|
Starting with AWX 20, release notes are published to [GitHub Releases](https://github.com/ansible/awx/releases).
|
||||||
- Various bugfixes found during testing (https://github.com/ansible/awx/pull/10532)
|
|
||||||
|
|
||||||
# 19.2.1 (June 17, 2021)
|
For older release notes, see https://github.com/ansible/awx/blob/19.3.0/CHANGELOG.md.
|
||||||
|
|
||||||
- There are now 2 default Instance Groups: 'controlplane' and 'default' (https://github.com/ansible/awx/pull/10324)
|
|
||||||
- Removed deprecated modules: `tower_send`, `tower_receive`, `tower_workflow_template` (https://github.com/ansible/awx/pull/9980)
|
|
||||||
- Improved UI performance when a large amount of events are being emitted by jobs (https://github.com/ansible/awx/pull/10053)
|
|
||||||
- Settings UI Revert All button now issues a DELETE instead of PATCHing all fields (https://github.com/ansible/awx/pull/10376)
|
|
||||||
- Fixed a bug with the schedule date/time picker in Firefox (https://github.com/ansible/awx/pull/10291)
|
|
||||||
- UI now preselects the system default Galaxy credential when creating a new organization (https://github.com/ansible/awx/pull/10395)
|
|
||||||
- Added favicon (https://github.com/ansible/awx/pull/10388)
|
|
||||||
- Removed `not` option from smart inventory host filter search as it's not supported by the API (https://github.com/ansible/awx/pull/10380)
|
|
||||||
- Added button to allow user to refetch project revision after project sync has finished (https://github.com/ansible/awx/pull/10334)
|
|
||||||
- Fixed bug where extraneous CONFIG requests were made on logout (https://github.com/ansible/awx/pull/10379)
|
|
||||||
- Fixed bug where users were unable to cancel inventory syncs (https://github.com/ansible/awx/pull/10346)
|
|
||||||
- Added missing dashboard graph filters (https://github.com/ansible/awx/pull/10349)
|
|
||||||
- Added support for typing in to single select lookup form fields (https://github.com/ansible/awx/pull/10257)
|
|
||||||
- Fixed various bugs related to user sessions (https://github.com/ansible/awx/pull/9908)
|
|
||||||
- Fixed bug where sorting in modals would close the modal (https://github.com/ansible/awx/pull/10215)
|
|
||||||
- Added support for Red Hat Insights as an inventory source (https://github.com/ansible/awx/pull/8650)
|
|
||||||
- Fixed bugs when selecting items in a list then sorting/paginating (https://github.com/ansible/awx/pull/10329)
|
|
||||||
|
|
||||||
# 19.2.0 (June 1, 2021)
|
|
||||||
- Fixed race condition that would sometimes cause jobs to error out at the very end of an otherwise successful run (https://github.com/ansible/receptor/pull/328)
|
|
||||||
- Fixes bug where users were unable to click on text next to checkboxes in modals (https://github.com/ansible/awx/pull/10279)
|
|
||||||
- Have the project update playbook warn if role/collection syncing is disabled. (https://github.com/ansible/awx/pull/10068)
|
|
||||||
- Move irc references to point to irc.libera.chat (https://github.com/ansible/awx/pull/10295)
|
|
||||||
- Fixes bug where activity stream changes were displaying as [object object] (https://github.com/ansible/awx/pull/10267)
|
|
||||||
- Update awxkit to enable export of Galaxy credentials associated to organizations (https://github.com/ansible/awx/pull/10271)
|
|
||||||
- Bump receptor and receptorctl versions to 1.0.0a2 (https://github.com/ansible/awx/pull/10261)
|
|
||||||
- Add the ability to disable local authentication (https://github.com/ansible/awx/pull/10102)
|
|
||||||
- Show error if no Execution Environment is found on project sync/job run (https://github.com/ansible/awx/pull/10183)
|
|
||||||
- Allow for editing and deleting managed_by_tower EEs from API/UI (https://github.com/ansible/awx/pull/10173)
|
|
||||||
|
|
||||||
|
|
||||||
# 19.1.0 (May 1, 2021)
|
|
||||||
|
|
||||||
- Custom inventory scripts have been removed from the API https://github.com/ansible/awx/pull/9822
|
|
||||||
- Old scripts can be exported via `awx-manage export_custom_scripts`
|
|
||||||
- Fixed a bug where ad-hoc commands targeted against multiple hosts would run against only 1 host https://github.com/ansible/awx/pull/9973
|
|
||||||
- AWX will now look for a top-level requirements.yml when installing collections / roles in project updates https://github.com/ansible/awx/pull/9945
|
|
||||||
- Improved error handling when Container Group pods fail to launch https://github.com/ansible/awx/pull/10025
|
|
||||||
- Added ability to set server-side password policies using Django's AUTH_PASSWORD_VALIDATORS setting https://github.com/ansible/awx/pull/9999
|
|
||||||
- Bumped versions of Ansible Runner & AWX EE https://github.com/ansible/awx/pull/10013
|
|
||||||
- If you have built any custom EEs on top of awx-ee 0.1.0, you will need to rebuild on top of 0.2.0.
|
|
||||||
- Remove legacy resource profiling code https://github.com/ansible/awx/pull/9883
|
|
||||||
|
|
||||||
# 19.0.0 (April 7, 2021)
|
|
||||||
|
|
||||||
- AWX now runs on Python 3.8 (https://github.com/ansible/awx/pull/8778/)
|
|
||||||
- Fixed inventories-from-projects when running in Kubernetes (https://github.com/ansible/awx/pull/9741)
|
|
||||||
- Fixed a bug where a slash was appended to invetory file paths in UI dropdown (https://github.com/ansible/awx/pull/9713)
|
|
||||||
- Fix a bug with large file parsing in project sync (https://github.com/ansible/awx/pull/9627)
|
|
||||||
- Fix k8s credentials that use a custom ca cert (https://github.com/ansible/awx/pull/9744)
|
|
||||||
- Fix a bug that allowed a user to attempt deleting a running job (https://github.com/ansible/awx/pull/9758)
|
|
||||||
- Fixed the Kubernetes Pod reaper to properly delete Pods launched by Receptor (https://github.com/ansible/awx/pull/9819)
|
|
||||||
- AWX Collection Modules: added ability to set instance groups for organization, job templates, and inventories. (https://github.com/ansible/awx/pull/9804)
|
|
||||||
- Fixed CSP violation errors on job details and job settings views (https://github.com/ansible/awx/pull/9818)
|
|
||||||
- Added support for convergence any/all on workflow nodes (https://github.com/ansible/awx/pull/9737)
|
|
||||||
- Fixed race condition that causes InvalidGitRepositoryError (https://github.com/ansible/awx/pull/9754)
|
|
||||||
- Added support for Execution Environments to the Activity Stream (https://github.com/ansible/awx/issues/9308)
|
|
||||||
- Fixed a bug that improperly formats OpenSSH keys specified in custom Credential Types (https://github.com/ansible/awx/issues/9361)
|
|
||||||
- Fixed an HTTP 500 error for unauthenticated users (https://github.com/ansible/awx/pull/9725)
|
|
||||||
- Added subscription wizard: https://github.com/ansible/awx/pull/9496
|
|
||||||
|
|
||||||
# 18.0.0 (March 23, 2021)
|
|
||||||
|
|
||||||
**IMPORTANT INSTALL AND UPGRADE NOTES**
|
|
||||||
|
|
||||||
Starting in version 18.0, the [AWX Operator](https://github.com/ansible/awx-operator) is the preferred way to install AWX: https://github.com/ansible/awx/blob/devel/INSTALL.md#installing-awx
|
|
||||||
|
|
||||||
If you have a pre-existing installation of AWX that utilizes the Docker-based installation method, this install method has ** notably changed** from 17.x to 18.x. For details, please see:
|
|
||||||
|
|
||||||
- https://groups.google.com/g/awx-project/c/47MjWSUQaOc/m/bCjSDn0eBQAJ
|
|
||||||
- https://github.com/ansible/awx/blob/devel/tools/docker-compose
|
|
||||||
- https://github.com/ansible/awx/blob/devel/tools/docker-compose/docs/data_migration.md
|
|
||||||
|
|
||||||
### Introducing Execution Environments
|
|
||||||
|
|
||||||
After a herculean effort from a number of contributors, we're excited to announce that AWX 18.0.0 introduces a new concept called Execution Environments.
|
|
||||||
|
|
||||||
Execution Environments are container images which consist of everything necessary to run a playbook within AWX, and which drive the entire management and lifecycle of playbook execution runtime in AWX: https://github.com/ansible/awx/issues/5157. This means that going forward, AWX no longer utilizes the [bubblewrap](https://github.com/containers/bubblewrap) project for playbook isolation, but instead utilizes a container per playbook run.
|
|
||||||
|
|
||||||
Much like custom virtualenvs, custom Execution Environments can be crafted to specify additional Python or system-level dependencies. [Ansible Builder](https://github.com/ansible/ansible-builder) outputs images you can upload to your registry which can *then* be defined in AWX and utilized for playbook runs.
|
|
||||||
|
|
||||||
To learn more about Ansible Builder and Execution Environments, see: https://www.ansible.com/blog/introduction-to-ansible-builder
|
|
||||||
|
|
||||||
### Other Notable Changes
|
|
||||||
|
|
||||||
- Removed `installer` directory.
|
|
||||||
- The Kubernetes installer has been removed in favor of [AWX Operator](https://github.com/ansible/awx-operator). Official images for Operator-based installs are no longer hosted on Docker Hub, but are instead available on [Quay](https://quay.io/repository/ansible/awx?tab=tags).
|
|
||||||
- The "Local Docker" install method has been removed in favor of the development environment. Details can be found at: https://github.com/ansible/awx/blob/devel/tools/docker-compose/README.md
|
|
||||||
- Removal of custom virtual environments https://github.com/ansible/awx/pull/9498
|
|
||||||
- Custom virtual environments have been replaced by Execution Environments https://github.com/ansible/awx/pull/9570
|
|
||||||
- The default Container Group Pod definition has changed. All custom Pod specs have been reset. https://github.com/ansible/awx/commit/05ef51f710dad8f8036bc5acee4097db4adc0d71
|
|
||||||
- Added user interface for the activity stream: https://github.com/ansible/awx/pull/9083
|
|
||||||
- Converted many of the top-level list views (Jobs, Teams, Hosts, Inventories, Projects, and more) to a new, permanent table component for substantially increased responsiveness, usability, maintainability, and other 'ility's: https://github.com/ansible/awx/pull/8970, https://github.com/ansible/awx/pull/9182 and many others!
|
|
||||||
- Added support for Centrify Vault (https://www.centrify.com) as a credential lookup plugin (https://github.com/ansible/awx/pull/9542)
|
|
||||||
- Added support for namespaces in Hashicorp Vault credential plugin (https://github.com/ansible/awx/pull/9590)
|
|
||||||
- Added click-to-expand details for job tables
|
|
||||||
- Added search filtering to job output https://github.com/ansible/awx/pull/9208
|
|
||||||
- Added the new migration, update, and "installation in progress" page https://github.com/ansible/awx/pull/9123
|
|
||||||
- Added the user interface for job settings https://github.com/ansible/awx/pull/8661
|
|
||||||
- Runtime errors from jobs are now displayed, along with an explanation for what went wrong, on the output page https://github.com/ansible/awx/pull/8726
|
|
||||||
- You can now cancel a running job from its output and details panel https://github.com/ansible/awx/pull/9199
|
|
||||||
- Fixed a bug where launch prompt inputs were unexpectedly deposited in the url: https://github.com/ansible/awx/pull/9231
|
|
||||||
- Playbook, credential type, and inventory file inputs now support type-ahead and manual type-in! https://github.com/ansible/awx/pull/9120
|
|
||||||
- Added ability to relaunch against failed hosts: https://github.com/ansible/awx/pull/9225
|
|
||||||
- Added pending workflow approval count to the application header https://github.com/ansible/awx/pull/9334
|
|
||||||
- Added user interface for management jobs: https://github.com/ansible/awx/pull/9224
|
|
||||||
- Added toast message to show notification template test result to notification templates list https://github.com/ansible/awx/pull/9318
|
|
||||||
- Replaced CodeMirror with AceEditor for editing template variables and notification templates https://github.com/ansible/awx/pull/9281
|
|
||||||
- Added support for filtering and pagination on job output https://github.com/ansible/awx/pull/9208
|
|
||||||
- Added support for html in custom login text https://github.com/ansible/awx/pull/9519
|
|
||||||
|
|
||||||
# 17.1.0 (March 9, 2021)
|
|
||||||
- Addressed a security issue in AWX (CVE-2021-20253)
|
|
||||||
- Fixed a bug permissions error related to redis in K8S-based deployments: https://github.com/ansible/awx/issues/9401
|
|
||||||
|
|
||||||
# 17.0.1 (January 26, 2021)
|
|
||||||
- Fixed pgdocker directory permissions issue with Local Docker installer: https://github.com/ansible/awx/pull/9152
|
|
||||||
- Fixed a bug in the UI which caused toggle settings to not be changed when clicked: https://github.com/ansible/awx/pull/9093
|
|
||||||
|
|
||||||
# 17.0.0 (January 22, 2021)
|
|
||||||
- AWX now requires PostgreSQL 12 by default: https://github.com/ansible/awx/pull/8943
|
|
||||||
**Note:** users who encounter permissions errors at upgrade time should `chown -R ~/.awx/pgdocker` to ensure it's owned by the user running the install playbook
|
|
||||||
- Added support for region name for OpenStack inventory: https://github.com/ansible/awx/issues/5080
|
|
||||||
- Added the ability to chain undefined attributes in custom notification templates: https://github.com/ansible/awx/issues/8677
|
|
||||||
- Dramatically simplified the `image_build` role: https://github.com/ansible/awx/pull/8980
|
|
||||||
- Fixed a bug which can cause schema migrations to fail at install time: https://github.com/ansible/awx/issues/9077
|
|
||||||
- Fixed a bug which caused the `is_superuser` user property to be out of date in certain circumstances: https://github.com/ansible/awx/pull/8833
|
|
||||||
- Fixed a bug which sometimes results in race conditions on setting access: https://github.com/ansible/awx/pull/8580
|
|
||||||
- Fixed a bug which sometimes causes an unexpected delay in stdout for some playbooks: https://github.com/ansible/awx/issues/9085
|
|
||||||
- (UI) Added support for credential password prompting on job launch: https://github.com/ansible/awx/pull/9028
|
|
||||||
- (UI) Added the ability to configure LDAP settings in the UI: https://github.com/ansible/awx/issues/8291
|
|
||||||
- (UI) Added a sync button to the Project detail view: https://github.com/ansible/awx/issues/8847
|
|
||||||
- (UI) Added a form for configuring Google Outh 2.0 settings: https://github.com/ansible/awx/pull/8762
|
|
||||||
- (UI) Added searchable keys and related keys to the Credentials list: https://github.com/ansible/awx/issues/8603
|
|
||||||
- (UI) Added support for advanced search and copying to Notification Templates: https://github.com/ansible/awx/issues/7879
|
|
||||||
- (UI) Added support for prompting on workflow nodes: https://github.com/ansible/awx/issues/5913
|
|
||||||
- (UI) Added support for session timeouts: https://github.com/ansible/awx/pull/8250
|
|
||||||
- (UI) Fixed a bug that broke websocket streaming for the insecure ws:// protocol: https://github.com/ansible/awx/pull/8877
|
|
||||||
- (UI) Fixed a bug in the user interface when a translation for the browser's preferred locale isn't available: https://github.com/ansible/awx/issues/8884
|
|
||||||
- (UI) Fixed bug where navigating from one survey question form directly to another wasn't reloading the form: https://github.com/ansible/awx/issues/7522
|
|
||||||
- (UI) Fixed a bug which can cause an uncaught error while launching a Job Template: https://github.com/ansible/awx/issues/8936
|
|
||||||
- Updated autobahn to address CVE-2020-35678
|
|
||||||
|
|
||||||
## 16.0.0 (December 10, 2020)
|
|
||||||
- AWX now ships with a reimagined user interface. **Please read this before upgrading:** https://groups.google.com/g/awx-project/c/KuT5Ao92HWo
|
|
||||||
- Removed support for syncing inventory from Red Hat CloudForms - https://github.com/ansible/awx/commit/0b701b3b2
|
|
||||||
- Removed support for Mercurial-based project updates - https://github.com/ansible/awx/issues/7932
|
|
||||||
- Upgraded NodeJS to actively maintained LTS 14.15.1 - https://github.com/ansible/awx/pull/8766
|
|
||||||
- Added Git-LFS to the default image build - https://github.com/ansible/awx/pull/8700
|
|
||||||
- Added the ability to specify `metadata.labels` in the podspec for container groups - https://github.com/ansible/awx/issues/8486
|
|
||||||
- Added support for Kubernetes pod annotations - https://github.com/ansible/awx/pull/8434
|
|
||||||
- Added the ability to label the web container in local Docker installs - https://github.com/ansible/awx/pull/8449
|
|
||||||
- Added additional metadata (as an extra var) to playbook runs to report the SCM branch name - https://github.com/ansible/awx/pull/8433
|
|
||||||
- Fixed a bug that caused k8s installations to fail due to an incorrect Helm repo - https://github.com/ansible/awx/issues/8715
|
|
||||||
- Fixed a bug that prevented certain Workflow Approval resources from being deleted - https://github.com/ansible/awx/pull/8612
|
|
||||||
- Fixed a bug that prevented the deletion of inventories stuck in "pending deletion" state - https://github.com/ansible/awx/issues/8525
|
|
||||||
- Fixed a display bug in webhook notifications with certain unicode characters - https://github.com/ansible/awx/issues/7400
|
|
||||||
- Improved support for exporting dependent objects (Inventory Hosts and Groups) in the `awx export` CLI tool - https://github.com/ansible/awx/commit/607bc0788
|
|
||||||
|
|
||||||
## 15.0.1 (October 20, 2020)
|
|
||||||
- Added several optimizations to improve performance for a variety of high-load simultaneous job launch use cases https://github.com/ansible/awx/pull/8403
|
|
||||||
- Added the ability to source roles and collections from requirements.yaml files (not just requirements.yml) - https://github.com/ansible/awx/issues/4540
|
|
||||||
- awx.awx collection modules now provide a clearer error message for incompatible versions of awxkit - https://github.com/ansible/awx/issues/8127
|
|
||||||
- Fixed a bug in notification messages that contain certain unicode characters - https://github.com/ansible/awx/issues/7400
|
|
||||||
- Fixed a bug that prevents the deletion of Workflow Approval records - https://github.com/ansible/awx/issues/8305
|
|
||||||
- Fixed a bug that broke the selection of webhook credentials - https://github.com/ansible/awx/issues/7892
|
|
||||||
- Fixed a bug which can cause confusing behavior for social auth logins across distinct browser tabs - https://github.com/ansible/awx/issues/8154
|
|
||||||
- Fixed several bugs in the output of Workflow Job Templates using the `awx export` tool - https://github.com/ansible/awx/issues/7798 https://github.com/ansible/awx/pull/7847
|
|
||||||
- Fixed a race condition that can lead to missing hosts when running parallel inventory syncs - https://github.com/ansible/awx/issues/5571
|
|
||||||
- Fixed an HTTP 500 error when certain LDAP group parameters aren't properly set - https://github.com/ansible/awx/issues/7622
|
|
||||||
- Updated a few dependencies in response to several CVEs:
|
|
||||||
* CVE-2020-7720
|
|
||||||
* CVE-2020-7743
|
|
||||||
* CVE-2020-7676
|
|
||||||
|
|
||||||
## 15.0.0 (September 30, 2020)
|
|
||||||
- Added improved support for fetching Ansible collections from private Galaxy content sources (such as https://github.com/ansible/galaxy_ng) - https://github.com/ansible/awx/issues/7813
|
|
||||||
**Note:** as part of this change, new Organizations created in the AWX API will _no longer_ automatically synchronize roles and collections from galaxy.ansible.com by default. More details on this change can be found at: https://github.com/ansible/awx/issues/8341#issuecomment-707310633
|
|
||||||
- AWX now utilizes a version of certifi that auto-discovers certificates in the system certificate store - https://github.com/ansible/awx/pull/8242
|
|
||||||
- Added support for arbitrary custom inventory plugin configuration: https://github.com/ansible/awx/issues/5150
|
|
||||||
- Added an optional setting to disable the auto-creation of organizations and teams on successful SAML login. - https://github.com/ansible/awx/pull/8069
|
|
||||||
- Added a number of optimizations to AWX's callback receiver to improve the speed of stdout processing for simultaneous playbooks runs - https://github.com/ansible/awx/pull/8193 https://github.com/ansible/awx/pull/8191
|
|
||||||
- Added the ability to use `!include` and `!import` constructors when constructing YAML for use with the AWX CLI - https://github.com/ansible/awx/issues/8135
|
|
||||||
- Fixed a bug that prevented certain users from being able to edit approval nodes in Workflows - https://github.com/ansible/awx/pull/8253
|
|
||||||
- Fixed a bug that broke password prompting for credentials in certain cases - https://github.com/ansible/awx/issues/8202
|
|
||||||
- Fixed a bug which can cause PostgreSQL deadlocks when running many parallel playbooks against large shared inventories - https://github.com/ansible/awx/issues/8145
|
|
||||||
- Fixed a bug which can cause delays in AWX's task manager when large numbers of simultaneous jobs are scheduled - https://github.com/ansible/awx/issues/7655
|
|
||||||
- Fixed a bug which can cause certain scheduled jobs - those that run every X minute(s) or hour(s) - to fail to run at the proper time - https://github.com/ansible/awx/issues/8071
|
|
||||||
- Fixed a performance issue for playbooks that store large amounts of data using the `set_stats` module - https://github.com/ansible/awx/issues/8006
|
|
||||||
- Fixed a bug related to AWX's handling of the auth_path argument for the HashiVault KeyValue credential plugin - https://github.com/ansible/awx/pull/7991
|
|
||||||
- Fixed a bug that broke support for Remote Archive SCM Type project syncs on platforms that utilize Python2 - https://github.com/ansible/awx/pull/8057
|
|
||||||
- Updated to the latest version of Django Rest Framework to address CVE-2020-25626
|
|
||||||
- Updated to the latest version of Django to address CVE-2020-24583 and CVE-2020-24584
|
|
||||||
- Updated to the latest verson of channels_redis to address a bug that slowly causes Daphne processes to leak memory over time - https://github.com/django/channels_redis/issues/212
|
|
||||||
|
|
||||||
## 14.1.0 (Aug 25, 2020)
|
|
||||||
- AWX images can now be built on ARM64 - https://github.com/ansible/awx/pull/7607
|
|
||||||
- Added the Remote Archive SCM Type to support using immutable artifacts and releases (such as tarballs and zip files) as projects - https://github.com/ansible/awx/issues/7954
|
|
||||||
- Deprecated official support for Mercurial-based project updates - https://github.com/ansible/awx/issues/7932
|
|
||||||
- Added resource import/export support to the official AWX collection - https://github.com/ansible/awx/issues/7329
|
|
||||||
- Added the ability to import YAML-based resources (instead of just JSON) when using the AWX CLI - https://github.com/ansible/awx/pull/7808
|
|
||||||
- Users upgrading from older versions of AWX may encounter an issue that causes their postgres container to restart in a loop (https://github.com/ansible/awx/issues/7854) - if you encounter this, bring your containers down and then back up (e.g., `docker-compose down && docker-compose up -d`) after upgrading to 14.1.0.
|
|
||||||
- Updated the AWX CLI to export labels associated with Workflow Job Templates - https://github.com/ansible/awx/pull/7847
|
|
||||||
- Updated to the latest python-ldap to address a bug - https://github.com/ansible/awx/issues/7868
|
|
||||||
- Upgraded git-python to fix a bug that caused workflows to sometimes fail - https://github.com/ansible/awx/issues/6119
|
|
||||||
- Worked around a bug in the channels_redis library that slowly causes Daphne processes to leak memory over time - https://github.com/django/channels_redis/issues/212
|
|
||||||
- Fixed a bug in the AWX CLI that prevented Workflow nodes from importing properly - https://github.com/ansible/awx/issues/7793
|
|
||||||
- Fixed a bug in the awx.awx collection release process that templated the wrong version - https://github.com/ansible/awx/issues/7870
|
|
||||||
- Fixed a bug that caused errors rendering stdout that contained UTF-16 surrogate pairs - https://github.com/ansible/awx/pull/7918
|
|
||||||
|
|
||||||
## 14.0.0 (Aug 6, 2020)
|
|
||||||
- As part of our commitment to inclusivity in open source, we recently took some time to audit AWX's source code and user interface and replace certain terminology with more inclusive language. Strictly speaking, this isn't a bug or a feature, but we think it's important and worth calling attention to:
|
|
||||||
* https://github.com/ansible/awx/commit/78229f58715fbfbf88177e54031f532543b57acc
|
|
||||||
* https://www.redhat.com/en/blog/making-open-source-more-inclusive-eradicating-problematic-language
|
|
||||||
- Installing roles and collections via requirements.yml as part of Project Updates now requires at least Ansible 2.9 - https://github.com/ansible/awx/issues/7769
|
|
||||||
- Deprecated the use of the `PRIMARY_GALAXY_USERNAME` and `PRIMARY_GALAXY_PASSWORD` settings. We recommend using tokens to access Galaxy or Automation Hub.
|
|
||||||
- Added local caching for downloaded roles and collections so they are not re-downloaded on nodes where they are up to date with the project - https://github.com/ansible/awx/issues/5518
|
|
||||||
- Added the ability to associate K8S/OpenShift credentials to Job Template for playbook interaction with the `community.kubernetes` collection - https://github.com/ansible/awx/issues/5735
|
|
||||||
- Added the ability to include HTML in the Custom Login Info presented on the login page - https://github.com/ansible/awx/issues/7600
|
|
||||||
- Fixed https://access.redhat.com/security/cve/cve-2020-14327 - Server-side request forgery on credentials
|
|
||||||
- Fixed https://access.redhat.com/security/cve/cve-2020-14328 - Server-side request forgery on webhooks
|
|
||||||
- Fixed https://access.redhat.com/security/cve/cve-2020-14329 - Sensitive data exposure on labels
|
|
||||||
- Fixed https://access.redhat.com/security/cve/cve-2020-14337 - Named URLs allow for testing the presence or absence of objects
|
|
||||||
- Fixed a number of bugs in the user interface related to an upgrade of jQuery:
|
|
||||||
* https://github.com/ansible/awx/issues/7530
|
|
||||||
* https://github.com/ansible/awx/issues/7546
|
|
||||||
* https://github.com/ansible/awx/issues/7534
|
|
||||||
* https://github.com/ansible/awx/issues/7606
|
|
||||||
- Fixed a bug that caused the `-f yaml` flag of the AWX CLI to not print properly formatted YAML - https://github.com/ansible/awx/issues/7795
|
|
||||||
- Fixed a bug in the installer that caused errors when `docker_registry_password` was set - https://github.com/ansible/awx/issues/7695
|
|
||||||
- Fixed a permissions error that prevented certain users from starting AWX services - https://github.com/ansible/awx/issues/7545
|
|
||||||
- Fixed a bug that allows superusers to run unsafe Jinja code when defining custom Credential Types - https://github.com/ansible/awx/pull/7584/
|
|
||||||
- Fixed a bug that prevented users from creating (or editing) custom Credential Types containing boolean fields - https://github.com/ansible/awx/issues/7483
|
|
||||||
- Fixed a bug that prevented users with postgres usernames containing uppercase letters from restoring backups succesfully - https://github.com/ansible/awx/pull/7519
|
|
||||||
- Fixed a bug which allowed the creation (in the Tower API) of Groups and Hosts with the same name - https://github.com/ansible/awx/issues/4680
|
|
||||||
|
|
||||||
## 13.0.0 (Jun 23, 2020)
|
|
||||||
- Added import and export commands to the official AWX CLI, replacing send and receive from the old tower-cli (https://github.com/ansible/awx/pull/6125).
|
|
||||||
- Removed scripts as a means of running inventory updates of built-in types (https://github.com/ansible/awx/pull/6911)
|
|
||||||
- Ansible 2.8 is now partially unsupported; some inventory source types are known to no longer work.
|
|
||||||
- Fixed an issue where the vmware inventory source ssl_verify source variable was not recognized (https://github.com/ansible/awx/pull/7360)
|
|
||||||
- Fixed a bug that caused redis' listen socket to have too-permissive file permissions (https://github.com/ansible/awx/pull/7317)
|
|
||||||
- Fixed a bug that caused rsyslogd's configuration file to have world-readable file permissions, potentially leaking secrets (CVE-2020-10782)
|
|
||||||
|
|
||||||
## 12.0.0 (Jun 9, 2020)
|
|
||||||
- Removed memcached as a dependency of AWX (https://github.com/ansible/awx/pull/7240)
|
|
||||||
- Moved to a single container image build instead of separate awx_web and awx_task images. The container image is just `awx` (https://github.com/ansible/awx/pull/7228)
|
|
||||||
- Official AWX container image builds now use a two-stage container build process that notably reduces the size of our published images (https://github.com/ansible/awx/pull/7017)
|
|
||||||
- Removed support for HipChat notifications ([EoL announcement](https://www.atlassian.com/partnerships/slack/faq#faq-98b17ca3-247f-423b-9a78-70a91681eff0)); all previously-created HipChat notification templates will be deleted due to this removal.
|
|
||||||
- Fixed a bug which broke AWX installations with oc version 4.3 (https://github.com/ansible/awx/pull/6948/)
|
|
||||||
- Fixed a performance issue that caused notable delay of stdout processing for playbooks run against large numbers of hosts (https://github.com/ansible/awx/issues/6991)
|
|
||||||
- Fixed a bug that caused CyberArk AIM credential plugin looks to hang forever in some environments (https://github.com/ansible/awx/issues/6986)
|
|
||||||
- Fixed a bug that caused ANY/ALL converage settings not to properly save when editing approval nodes in the UI (https://github.com/ansible/awx/issues/6998)
|
|
||||||
- Fixed a bug that broke support for the satellite6_group_prefix source variable (https://github.com/ansible/awx/issues/7031)
|
|
||||||
- Fixed a bug that prevented changes to workflow node convergence settings when approval nodes were in use (https://github.com/ansible/awx/issues/7063)
|
|
||||||
- Fixed a bug that caused notifications to fail on newer version of Mattermost (https://github.com/ansible/awx/issues/7264)
|
|
||||||
- Fixed a bug (by upgrading to 0.8.1 of the foreman collection) that prevented host_filters from working properly with Foreman-based inventory (https://github.com/ansible/awx/issues/7225)
|
|
||||||
- Fixed a bug that prevented the usage of the Conjur credential plugin with secrets that contain spaces (https://github.com/ansible/awx/issues/7191)
|
|
||||||
- Fixed a bug in awx-manage run_wsbroadcast --status in kubernetes (https://github.com/ansible/awx/pull/7009)
|
|
||||||
- Fixed a bug that broke notification toggles for system jobs in the UI (https://github.com/ansible/awx/pull/7042)
|
|
||||||
- Fixed a bug that broke local pip installs of awxkit (https://github.com/ansible/awx/issues/7107)
|
|
||||||
- Fixed a bug that prevented PagerDuty notifications from sending for workflow job template approvals (https://github.com/ansible/awx/issues/7094)
|
|
||||||
- Fixed a bug that broke external log aggregation support for URL paths that include the = character (such as the tokens for SumoLogic) (https://github.com/ansible/awx/issues/7139)
|
|
||||||
- Fixed a bug that prevented organization admins from removing labels from workflow job templates (https://github.com/ansible/awx/pull/7143)
|
|
||||||
|
|
||||||
## 11.2.0 (Apr 29, 2020)
|
|
||||||
|
|
||||||
- Inventory updates now use collection-based plugins by default (in Ansible 2.9+):
|
|
||||||
- amazon.aws.aws_ec2
|
|
||||||
- community.vmware.vmware_vm_inventory
|
|
||||||
- azure.azcollection.azure_rm
|
|
||||||
- google.cloud.gcp_compute
|
|
||||||
- theforeman.foreman.foreman
|
|
||||||
- openstack.cloud.openstack
|
|
||||||
- ovirt.ovirt_collection.ovirt
|
|
||||||
- awx.awx.tower
|
|
||||||
- Added support for Approle and LDAP/AD mechanisms to the Hashicorp Vault credential plugin (https://github.com/ansible/awx/issues/5076)
|
|
||||||
- Added Project (Domain Name) support for the OpenStack Keystone v3 API (https://github.com/ansible/awx/issues/6831)
|
|
||||||
- Added a new setting for raising log verbosity for rsyslogd (https://github.com/ansible/awx/pull/6818)
|
|
||||||
- Added the ability to monitor stdout in the CLI for running jobs and workflow jobs (https://github.com/ansible/awx/issues/6165)
|
|
||||||
- Fixed a bug which prevented the AWX CLI from properly installing with newer versions of pip (https://github.com/ansible/awx/issues/6870)
|
|
||||||
- Fixed a bug which broke AWX's external logging support when configured with HTTPS endpoints that utilize self-signed certificates (https://github.com/ansible/awx/issues/6851)
|
|
||||||
- Fixed a local docker installer bug that mistakenly attempted to upgrade PostgreSQL when an external pg_hostname is specified (https://github.com/ansible/awx/pull/5398)
|
|
||||||
- Fixed a race condition that caused task container crashes when pods are quickly brought down and back up (https://github.com/ansible/awx/issues/6750)
|
|
||||||
- Fixed a bug that caused 404 errors when attempting to view the second page of the workflow approvals view (https://github.com/ansible/awx/issues/6803)
|
|
||||||
- Fixed a bug that prevented the use of ANSIBLE_SSH_ARGS for ad-hoc-commands (https://github.com/ansible/awx/pull/6811)
|
|
||||||
- Fixed a bug that broke AWX installs/upgrades on Red Hat OpenShift (https://github.com/ansible/awx/issues/6791)
|
|
||||||
|
|
||||||
|
|
||||||
## 11.1.0 (Apr 22, 2020)
|
|
||||||
- Changed rsyslogd to persist queued events to disk (to prevent a risk of out-of-memory errors) (https://github.com/ansible/awx/issues/6746)
|
|
||||||
- Added the ability to configure the destination and maximum disk size of rsyslogd spool (in the event of a log aggregator outage) (https://github.com/ansible/awx/pull/6763)
|
|
||||||
- Added the ability to discover playbooks in project clones from symlinked directories (https://github.com/ansible/awx/pull/6773)
|
|
||||||
- Fixed a bug that caused certain log aggregator settings to break logging integration (https://github.com/ansible/awx/issues/6760)
|
|
||||||
- Fixed a bug that caused playbook execution in container groups to sometimes unexpectedly deadlock (https://github.com/ansible/awx/issues/6692)
|
|
||||||
- Improved stability of the new redis clustering implementation (https://github.com/ansible/awx/pull/6739 https://github.com/ansible/awx/pull/6720)
|
|
||||||
- Improved stability of the new rsyslogd-based logging implementation (https://github.com/ansible/awx/pull/6796)
|
|
||||||
|
|
||||||
## 11.0.0 (Apr 16, 2020)
|
|
||||||
- As of AWX 11.0.0, Kubernetes-based deployments use a Deployment rather than a StatefulSet.
|
|
||||||
- Reimplemented external logging support using rsyslogd to improve reliability and address a number of issues (https://github.com/ansible/awx/issues/5155)
|
|
||||||
- Changed activity stream logs to include summary fields for related objects (https://github.com/ansible/awx/issues/1761)
|
|
||||||
- Added code to more gracefully attempt to reconnect to redis if it restarts/becomes unavailable (https://github.com/ansible/awx/pull/6670)
|
|
||||||
- Fixed a bug that caused REFRESH_TOKEN_EXPIRE_SECONDS to not properly be respected for OAuth2.0 refresh tokens generated by AWX (https://github.com/ansible/awx/issues/6630)
|
|
||||||
- Fixed a bug that broke schedules containing RRULES with very old DTSTART dates (https://github.com/ansible/awx/pull/6550)
|
|
||||||
- Fixed a bug that broke installs on older versions of Ansible packaged with certain Linux distributions (https://github.com/ansible/awx/issues/5501)
|
|
||||||
- Fixed a bug that caused the activity stream to sometimes report the incorrect actor when associating user membership on SAML login (https://github.com/ansible/awx/pull/6525)
|
|
||||||
- Fixed a bug in AWX's Grafana notification support when annotation tags are omitted (https://github.com/ansible/awx/issues/6580)
|
|
||||||
- Fixed a bug that prevented some users from searching for Source Control credentials in the AWX user interface (https://github.com/ansible/awx/issues/6600)
|
|
||||||
- Fixed a bug that prevented disassociating orphaned users from credentials (https://github.com/ansible/awx/pull/6554)
|
|
||||||
- Updated Twisted to address CVE-2020-10108 and CVE-2020-10109.
|
|
||||||
|
|
||||||
## 10.0.0 (Mar 30, 2020)
|
|
||||||
- As of AWX 10.0.0, the official AWX CLI no longer supports Python 2 (it requires at least Python 3.6) (https://github.com/ansible/awx/pull/6327)
|
|
||||||
- AWX no longer relies on RabbitMQ; Redis is added as a new dependency (https://github.com/ansible/awx/issues/5443)
|
|
||||||
- Altered AWX's event tables to allow more than ~2 billion total events (https://github.com/ansible/awx/issues/6010)
|
|
||||||
- Improved the performance (time to execute, and memory consumption) of the periodic job cleanup system job (https://github.com/ansible/awx/pull/6166)
|
|
||||||
- Updated Job Templates so they now have an explicit Organization field (it is no longer inferred from the associated Project) (https://github.com/ansible/awx/issues/3903)
|
|
||||||
- Updated social-auth-core to address an upcoming GitHub API deprecation (https://github.com/ansible/awx/issues/5970)
|
|
||||||
- Updated to ansible-runner 1.4.6 to address various bugs.
|
|
||||||
- Updated Django to address CVE-2020-9402
|
|
||||||
- Updated pyyaml version to address CVE-2017-18342
|
|
||||||
- Fixed a bug which prevented the new `scm_branch` field from being used in custom notification templates (https://github.com/ansible/awx/issues/6258)
|
|
||||||
- Fixed a race condition that sometimes causes success/failure notifications to include an incomplete list of hosts (https://github.com/ansible/awx/pull/6290)
|
|
||||||
- Fixed a bug that can cause certain setting pages to lose unsaved form edits when a playbook is launched (https://github.com/ansible/awx/issues/5265)
|
|
||||||
- Fixed a bug that can prevent the "Use TLS/SSL" field from properly saving when editing email notification templates (https://github.com/ansible/awx/issues/6383)
|
|
||||||
- Fixed a race condition that sometimes broke event/stdout processing for jobs launched in container groups (https://github.com/ansible/awx/issues/6280)
|
|
||||||
|
|
||||||
## 9.3.0 (Mar 12, 2020)
|
|
||||||
- Added the ability to specify an OAuth2 token description in the AWX CLI (https://github.com/ansible/awx/issues/6122)
|
|
||||||
- Added support for K8S service account annotations to the installer (https://github.com/ansible/awx/pull/6007)
|
|
||||||
- Added support for K8S imagePullSecrets to the installer (https://github.com/ansible/awx/pull/5989)
|
|
||||||
- Launching jobs (and workflows) using the --monitor flag in the AWX CLI now returns a non-zero exit code on job failure (https://github.com/ansible/awx/issues/5920)
|
|
||||||
- Improved UI performance for various job views when many simultaneous users are logged into AWX (https://github.com/ansible/awx/issues/5883)
|
|
||||||
- Updated to the latest version of Django to address a few open CVEs (https://github.com/ansible/awx/pull/6080)
|
|
||||||
- Fixed a critical bug which can cause AWX to hang and stop launching playbooks after a periodic of time (https://github.com/ansible/awx/issues/5617)
|
|
||||||
- Fixed a bug which caused delays in project update stdout for certain large SCM clones (as of Ansible 2.9+) (https://github.com/ansible/awx/pull/6254)
|
|
||||||
- Fixed a bug which caused certain smart inventory filters to mistakenly return duplicate hosts (https://github.com/ansible/awx/pull/5972)
|
|
||||||
- Fixed an unclear server error when creating smart inventories with the AWX collection (https://github.com/ansible/awx/issues/6250)
|
|
||||||
- Fixed a bug that broke Grafana notification support (https://github.com/ansible/awx/issues/6137)
|
|
||||||
- Fixed a UI bug which prevent users with read access to an organization from editing credentials for that organization (https://github.com/ansible/awx/pull/6241)
|
|
||||||
- Fixed a bug which prevent workflow approval records from recording a `started` and `elapsed` date (https://github.com/ansible/awx/issues/6202)
|
|
||||||
- Fixed a bug which caused workflow nodes to have a confusing option for `verbosity` (https://github.com/ansible/awx/issues/6196)
|
|
||||||
- Fixed an RBAC bug which prevented projects and inventory schedules from being created by certain users in certain contexts (https://github.com/ansible/awx/issues/5717)
|
|
||||||
- Fixed a bug that caused `role_path` in a project's config to not be respected due to an error processing `/etc/ansible/ansible.cfg` (https://github.com/ansible/awx/pull/6038)
|
|
||||||
- Fixed a bug that broke inventory updates for installs with custom home directories for the awx user (https://github.com/ansible/awx/pull/6152)
|
|
||||||
- Fixed a bug that broke fact data collection when AWX encounters invalid/unexpected fact data (https://github.com/ansible/awx/issues/5935)
|
|
||||||
|
|
||||||
|
|
||||||
## 9.2.0 (Feb 12, 2020)
|
|
||||||
- Added the ability to configure the convergence behavior of workflow nodes https://github.com/ansible/awx/issues/3054
|
|
||||||
- AWX now allows for a configurable global limit for fork count (per-job run). The default maximum is 200. https://github.com/ansible/awx/pull/5604
|
|
||||||
- Added the ability to specify AZURE_PUBLIC_CLOUD (for e.g., Azure Government KeyVault support) for the Azure credential plugin https://github.com/ansible/awx/issues/5138
|
|
||||||
- Added support for several additional parameters for Satellite dynamic inventory https://github.com/ansible/awx/pull/5598
|
|
||||||
- Added a new field to jobs for tracking the date/time a job is cancelled https://github.com/ansible/awx/pull/5610
|
|
||||||
- Made a series of additional optimizations to the callback receiver to further improve stdout write speed for running playbooks https://github.com/ansible/awx/pull/5677 https://github.com/ansible/awx/pull/5739
|
|
||||||
- Updated AWX to be compatible with Helm 3.x (https://github.com/ansible/awx/pull/5776)
|
|
||||||
- Optimized AWX's job dependency/scheduling code to drastically improve processing time in scenarios where there are many pending jobs scheduled simultaneously https://github.com/ansible/awx/issues/5154
|
|
||||||
- Fixed a bug which could cause SCM authentication details (basic auth passwords) to be reported to external loggers in certain failure scenarios (e.g., when a git clone fails and ansible itself prints an error message to stdout) https://github.com/ansible/awx/pull/5812
|
|
||||||
- Fixed a k8s installer bug that caused installs to fail in certain situations https://github.com/ansible/awx/issues/5574
|
|
||||||
- Fixed a number of issues that caused analytics gathering and reporting to run more often than necessary https://github.com/ansible/awx/pull/5721
|
|
||||||
- Fixed a bug in the AWX CLI that prevented JSON-type settings from saving properly https://github.com/ansible/awx/issues/5528
|
|
||||||
- Improved support for fetching custom virtualenv dependencies when AWX is installed behind a proxy https://github.com/ansible/awx/pull/5805
|
|
||||||
- Updated the bundled version of openstacksdk to address a known issue https://github.com/ansible/awx/issues/5821
|
|
||||||
- Updated the bundled vmware_inventory plugin to the latest version to address a bug https://github.com/ansible/awx/pull/5668
|
|
||||||
- Fixed a bug that can cause inventory updates to fail to properly save their output when run within a workflow https://github.com/ansible/awx/pull/5666
|
|
||||||
- Removed a number of pre-computed fields from the Host and Group models to improve AWX performance. As part of this change, inventory group UIs throughout the interface no longer display status icons https://github.com/ansible/awx/pull/5448
|
|
||||||
|
|
||||||
## 9.1.1 (Jan 14, 2020)
|
|
||||||
|
|
||||||
- Fixed a bug that caused database migrations on Kubernetes installs to hang https://github.com/ansible/awx/pull/5579
|
|
||||||
- Upgraded Python-level app dependencies in AWX virtual environment https://github.com/ansible/awx/pull/5407
|
|
||||||
- Running jobs no longer block associated inventory updates https://github.com/ansible/awx/pull/5519
|
|
||||||
- Fixed invalid_response SAML error https://github.com/ansible/awx/pull/5577
|
|
||||||
- Optimized the callback receiver to drastically improve the write speed of stdout for parallel jobs (https://github.com/ansible/awx/pull/5618)
|
|
||||||
|
|
||||||
## 9.1.0 (Dec 17, 2019)
|
|
||||||
- Added a command to generate a new SECRET_KEY and rekey the secrets in the database
|
|
||||||
- Removed project update locking when jobs using it are running
|
|
||||||
- Fixed slow queries for /api/v2/instances and /api/v2/instance_groups when smart inventories are used
|
|
||||||
- Fixed a partial password disclosure when special characters existed in the RabbitMQ password (CVE-2019-19342)
|
|
||||||
- Fixed hang in error handling for source control checkouts
|
|
||||||
- Fixed an error on subsequent job runs that override the branch of a project on an instance that did not have a prior project checkout
|
|
||||||
- Fixed an issue where jobs launched in isolated or container groups would incorrectly timeout
|
|
||||||
- Fixed an incorrect link to instance groups documentation in the user interface
|
|
||||||
- Fixed editing of inventory on Workflow templates
|
|
||||||
- Fixed multiple issues with OAuth2 token cleanup system jobs
|
|
||||||
- Fixed a bug that broke email notifications for workflow approval/deny https://github.com/ansible/awx/issues/5401
|
|
||||||
- Updated SAML implementation to automatically login if authorization already exists
|
|
||||||
- Updated AngularJS to 1.7.9 for CVE-2019-10768
|
|
||||||
|
|
||||||
## 9.0.1 (Nov 4, 2019)
|
|
||||||
|
|
||||||
- Fixed a bug in the installer that broke certain types of k8s installs https://github.com/ansible/awx/issues/5205
|
|
||||||
|
|
||||||
## 9.0.0 (Oct 31, 2019)
|
|
||||||
|
|
||||||
- Updated AWX images to use centos:8 as the parent image.
|
|
||||||
- Updated to ansible-runner 1.4.4 to address various bugs.
|
|
||||||
- Added oc and kubectl to the AWX images to support new container-based execution introduced in 8.0.0.
|
|
||||||
- Added some optimizations to speed up the deletion of large Inventory Groups.
|
|
||||||
- Fixed a bug that broke webhook launches for Job Templates that define a survey (https://github.com/ansible/awx/issues/5062).
|
|
||||||
- Fixed a bug in the CLI which incorrectly parsed launch time arguments for `awx job_templates launch` and `awx workflow_job_templates launch` (https://github.com/ansible/awx/issues/5093).
|
|
||||||
- Fixed a bug that caused inventory updates using "sourced from a project" to stop working (https://github.com/ansible/awx/issues/4750).
|
|
||||||
- Fixed a bug that caused Slack notifications to sometimes show the wrong bot avatar (https://github.com/ansible/awx/pull/5125).
|
|
||||||
- Fixed a bug that prevented the use of digits in AWX's URL settings (https://github.com/ansible/awx/issues/5081).
|
|
||||||
|
|
||||||
## 8.0.0 (Oct 21, 2019)
|
|
||||||
|
|
||||||
- The Ansible Tower Ansible modules have been migrated to a new official Ansible AWX collection: https://galaxy.ansible.com/awx/AWX
|
|
||||||
Please note that this functionality is only supported in Ansible 2.9+
|
|
||||||
- AWX now supports the ability to launch jobs from external webhooks (GitHub and GitLab integration are supported).
|
|
||||||
- AWX now supports Container Groups, a new feature that allows you to schedule and run playbooks on single-use kubernetes pods on-demand.
|
|
||||||
- AWX now supports sending notifications when Workflow steps are approved, denied, or time out.
|
|
||||||
- AWX now records the user who approved or denied Workflow steps.
|
|
||||||
- AWX now supports fetching Ansible Collections from private galaxy servers.
|
|
||||||
- AWX now checks the user's ansible.cfg for paths where role/collections may live when running project updates.
|
|
||||||
- AWX now uses PostgreSQL 10 by default.
|
|
||||||
- AWX now warns more loudly about underlying AMQP connectivity issues (https://github.com/ansible/awx/pull/4857).
|
|
||||||
- Added a few optimizations to drastically improve dashboard performance for larger AWX installs (installs with several hundred thousand jobs or more).
|
|
||||||
- Updated to the latest version of Ansible's VMWare inventory script (which adds support for vmware_guest_facts).
|
|
||||||
- Deprecated /api/v2/inventory_scripts/ (this endpoint - and the Custom Inventory Script feature - will be removed in a future release of AWX).
|
|
||||||
- Fixed a bug which prevented Organization Admins from removing users from their own Organization (https://github.com/ansible/awx/issues/2979)
|
|
||||||
- Fixed a bug which sometimes caused cluster nodes to fail to re-join with a cryptic error, "No instance found with the current cluster host id" (https://github.com/ansible/awx/issues/4294)
|
|
||||||
- Fixed a bug that prevented the use of launch-time passphrases when using credential plugins (https://github.com/ansible/awx/pull/4807)
|
|
||||||
- Fixed a bug that caused notifications assigned at the Organization level not to take effect for Workflows in that Organization (https://github.com/ansible/awx/issues/4712)
|
|
||||||
- Fixed a bug which caused a notable amount of CPU overhead on RabbitMQ health checks (https://github.com/ansible/awx/pull/5009)
|
|
||||||
- Fixed a bug which sometimes caused the <return> key to stop functioning in <textarea> elements (https://github.com/ansible/awx/issues/4192)
|
|
||||||
- Fixed a bug which caused request contention when the same OAuth2.0 token was used in multiple simultaneous requests (https://github.com/ansible/awx/issues/4694)
|
|
||||||
- Fixed a bug related to parsing multiple choice survey options (https://github.com/ansible/awx/issues/4452).
|
|
||||||
- Fixed a bug that caused single-sign-on icons on the login page to fail to render in certain Windows browsers (https://github.com/ansible/awx/issues/3924)
|
|
||||||
- Fixed a number of bugs that caused certain OAuth2 settings to not be properly respected, such as REFRESH_TOKEN_EXPIRE_SECONDS.
|
|
||||||
- Fixed a number of bugs in the AWX CLI, including a bug which sometimes caused long lines of stdout output to be unexpectedly truncated.
|
|
||||||
- Fixed a number of bugs on the job details UI which sometimes caused auto-scrolling stdout to become stuck.
|
|
||||||
- Fixed a bug which caused LDAP authentication to fail if the TLD of the server URL contained digits (https://github.com/ansible/awx/issues/3646)
|
|
||||||
- Fixed a bug which broke HashiCorp Vault integration on older versions of HashiCorp Vault.
|
|
||||||
|
|
||||||
## 7.0.0 (Sept 4, 2019)
|
|
||||||
|
|
||||||
- AWX now detects and installs Ansible Collections defined in your project (note - this feature only works in Ansible 2.9+) (https://github.com/ansible/awx/issues/2534)
|
|
||||||
- AWX now includes an official command line client. Keep an eye out for a follow-up email on this mailing list for information on how to install it and try it out.
|
|
||||||
- Added the ability to provide a specific SCM branch on jobs (https://github.com/ansible/awx/issues/282)
|
|
||||||
- Added support for Workflow Approval Nodes, a new feature which allows you to add "pause and wait for approval" steps into your workflows (https://github.com/ansible/awx/issues/1206)
|
|
||||||
- Added the ability to specify a specific HTTP method for webhook notifications (POST vs PUT) (https://github.com/ansible/awx/pull/4124)
|
|
||||||
- Added the ability to specify a username and password for HTTP Basic Authorization for webhook notifications (https://github.com/ansible/awx/pull/4124)
|
|
||||||
- Added support for customizing the text content of notifications (https://github.com/ansible/awx/issues/79)
|
|
||||||
- Added the ability to enable and disable hosts in dynamic inventory (https://github.com/ansible/awx/pull/4420)
|
|
||||||
- Added the description (if any) to the Job Template list (https://github.com/ansible/awx/issues/4359)
|
|
||||||
- Added new metrics for instance hostnames and pending jobs to the /api/v2/metrics/ endpoint (https://github.com/ansible/awx/pull/4375)
|
|
||||||
- Changed AWX's on/off toggle buttons to a non-text based style to simplify internationalization (https://github.com/ansible/awx/pull/4425)
|
|
||||||
- Events emitted by ansible for adhoc commands are now sent to the external log aggregrator (https://github.com/ansible/awx/issues/4545)
|
|
||||||
- Fixed a bug which allowed a user to make an organization credential in another organization without permissions to that organization (https://github.com/ansible/awx/pull/4483)
|
|
||||||
- Fixed a bug that caused `extra_vars` on workflows to break when edited (https://github.com/ansible/awx/issues/4293)
|
|
||||||
- Fixed a slow SQL query that caused performance issues when large numbers of groups exist (https://github.com/ansible/awx/issues/4461)
|
|
||||||
- Fixed a few minor bugs in survey field validation (https://github.com/ansible/awx/pull/4509) (https://github.com/ansible/awx/pull/4479)
|
|
||||||
- Fixed a bug that sometimes resulted in orphaned `ansible_runner_pi` directories in `/tmp` after playbook execution (https://github.com/ansible/awx/pull/4409)
|
|
||||||
- Fixed a bug that caused the `is_system_auditor` flag in LDAP configuration to not work (https://github.com/ansible/awx/pull/4396)
|
|
||||||
- Fixed a bug which caused schedules to disappear from the UI when toggled off (https://github.com/ansible/awx/pull/4378)
|
|
||||||
- Fixed a bug that sometimes caused stdout content to contain extraneous blank lines in newer versions of Ansible (https://github.com/ansible/awx/pull/4391)
|
|
||||||
- Updated to the latest Django security release, 2.2.4 (https://github.com/ansible/awx/pull/4410) (https://www.djangoproject.com/weblog/2019/aug/01/security-releases/)
|
|
||||||
- Updated the default version of git to a version that includes support for x509 certificates (https://github.com/ansible/awx/issues/4362)
|
|
||||||
- Removed the deprecated `credential` field from `/api/v2/workflow_job_templates/N/` (as part of the `/api/v1/` removal in prior AWX versions - https://github.com/ansible/awx/pull/4490).
|
|
||||||
|
|
||||||
## 6.1.0 (Jul 18, 2019)
|
|
||||||
|
|
||||||
- Updated AWX to use Django 2.2.2.
|
|
||||||
- Updated the provided openstacksdk version to support new functionality (such as Nova scheduler_hints)
|
|
||||||
- Added the ability to specify a custom cacert for the HashiCorp Vault credential plugin
|
|
||||||
- Fixed a number of bugs related to path lookups for the HashiCorp Vault credential plugin
|
|
||||||
- Fixed a bug which prevented signed SSH certificates from working, including the HashiCorp Vault Signed SSH backend
|
|
||||||
- Fixed a bug which prevented custom logos from displaying on the login page (as a result of a new Content Security Policy in 6.0.0)
|
|
||||||
- Fixed a bug which broke websocket connectivity in Apple Safari (as a result of a new Content Security Policy in 6.0.0)
|
|
||||||
- Fixed a bug on the job output page that occasionally caused the "up" and "down" buttons to not load additional output
|
|
||||||
- Fixed a bug on the job output page that caused quoted task names to display incorrectly
|
|
||||||
|
|
||||||
## 6.0.0 (Jul 1, 2019)
|
|
||||||
|
|
||||||
- Removed support for "Any" notification templates and their API endpoints e.g., /api/v2/job_templates/N/notification_templates/any/ (https://github.com/ansible/awx/issues/4022)
|
|
||||||
- Fixed a bug which prevented credentials from properly being applied to inventory sources (https://github.com/ansible/awx/issues/4059)
|
|
||||||
- Fixed a bug which can cause the task dispatcher to hang indefinitely when external logging support (e.g., Splunk, Logstash) is enabled (https://github.com/ansible/awx/issues/4181)
|
|
||||||
- Fixed a bug which causes slow stdout display when running jobs against smart inventories. (https://github.com/ansible/awx/issues/3106)
|
|
||||||
- Fixed a bug that caused SSL verification flags to fail to be respected for LDAP authentication in certain environments. (https://github.com/ansible/awx/pull/4190)
|
|
||||||
- Added a simple Content Security Policy (https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP) to restrict access to third-party resources in the browser. (https://github.com/ansible/awx/pull/4167)
|
|
||||||
- Updated ovirt4 library dependencies to work with newer versions of oVirt (https://github.com/ansible/awx/issues/4138)
|
|
||||||
|
|
||||||
## 5.0.0 (Jun 21, 2019)
|
|
||||||
|
|
||||||
- Bump Django Rest Framework from 3.7.7 to 3.9.4
|
|
||||||
- Bump setuptools / pip dependencies
|
|
||||||
- Fixed bug where Recent Notification list would not appear
|
|
||||||
- Added notifications on job start
|
|
||||||
- Default to Ansible 2.8
|
|
||||||
|
|||||||
@@ -6,21 +6,21 @@ Have questions about this document or anything not covered here? Come chat with
|
|||||||
|
|
||||||
## Table of contents
|
## Table of contents
|
||||||
|
|
||||||
* [Things to know prior to submitting code](#things-to-know-prior-to-submitting-code)
|
- [Things to know prior to submitting code](#things-to-know-prior-to-submitting-code)
|
||||||
* [Setting up your development environment](#setting-up-your-development-environment)
|
- [Setting up your development environment](#setting-up-your-development-environment)
|
||||||
* [Prerequisites](#prerequisites)
|
- [Prerequisites](#prerequisites)
|
||||||
* [Docker](#docker)
|
- [Docker](#docker)
|
||||||
* [Docker compose](#docker-compose)
|
- [Docker compose](#docker-compose)
|
||||||
* [Frontend Development](#frontend-development)
|
- [Frontend Development](#frontend-development)
|
||||||
* [Build and Run the Development Environment](#build-and-run-the-development-environment)
|
- [Build and Run the Development Environment](#build-and-run-the-development-environment)
|
||||||
* [Fork and clone the AWX repo](#fork-and-clone-the-awx-repo)
|
- [Fork and clone the AWX repo](#fork-and-clone-the-awx-repo)
|
||||||
* [Building API Documentation](#building-api-documentation)
|
- [Building API Documentation](#building-api-documentation)
|
||||||
* [Accessing the AWX web interface](#accessing-the-awx-web-interface)
|
- [Accessing the AWX web interface](#accessing-the-awx-web-interface)
|
||||||
* [Purging containers and images](#purging-containers-and-images)
|
- [Purging containers and images](#purging-containers-and-images)
|
||||||
* [What should I work on?](#what-should-i-work-on)
|
- [What should I work on?](#what-should-i-work-on)
|
||||||
* [Submitting Pull Requests](#submitting-pull-requests)
|
- [Submitting Pull Requests](#submitting-pull-requests)
|
||||||
* [PR Checks run by Zuul](#pr-checks-run-by-zuul)
|
- [PR Checks run by Zuul](#pr-checks-run-by-zuul)
|
||||||
* [Reporting Issues](#reporting-issues)
|
- [Reporting Issues](#reporting-issues)
|
||||||
|
|
||||||
## Things to know prior to submitting code
|
## Things to know prior to submitting code
|
||||||
|
|
||||||
@@ -46,15 +46,15 @@ respectively.
|
|||||||
|
|
||||||
For Linux platforms, refer to the following from Docker:
|
For Linux platforms, refer to the following from Docker:
|
||||||
|
|
||||||
* **Fedora** - https://docs.docker.com/engine/installation/linux/docker-ce/fedora/
|
- **Fedora** - https://docs.docker.com/engine/installation/linux/docker-ce/fedora/
|
||||||
|
|
||||||
* **CentOS** - https://docs.docker.com/engine/installation/linux/docker-ce/centos/
|
- **CentOS** - https://docs.docker.com/engine/installation/linux/docker-ce/centos/
|
||||||
|
|
||||||
* **Ubuntu** - https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
- **Ubuntu** - https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||||
|
|
||||||
* **Debian** - https://docs.docker.com/engine/installation/linux/docker-ce/debian/
|
- **Debian** - https://docs.docker.com/engine/installation/linux/docker-ce/debian/
|
||||||
|
|
||||||
* **Arch** - https://wiki.archlinux.org/index.php/Docker
|
- **Arch** - https://wiki.archlinux.org/index.php/Docker
|
||||||
|
|
||||||
#### Docker Compose
|
#### Docker Compose
|
||||||
|
|
||||||
@@ -66,7 +66,7 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
|
|||||||
|
|
||||||
#### Frontend Development
|
#### Frontend Development
|
||||||
|
|
||||||
See [the ui development documentation](awx/ui_next/CONTRIBUTING.md).
|
See [the ui development documentation](awx/ui/CONTRIBUTING.md).
|
||||||
|
|
||||||
#### Fork and clone the AWX repo
|
#### Fork and clone the AWX repo
|
||||||
|
|
||||||
@@ -74,19 +74,19 @@ If you have not done so already, you'll need to fork the AWX repo on GitHub. For
|
|||||||
|
|
||||||
### Build and Run the Development Environment
|
### Build and Run the Development Environment
|
||||||
|
|
||||||
See the [README.md](./tools/docker-compose/README.md) for docs on how to build the awx_devel image and run the development environment.
|
See the [README.md](./tools/docker-compose/README.md) for docs on how to build the awx_devel image and run the development environment.
|
||||||
|
|
||||||
### Building API Documentation
|
### Building API Documentation
|
||||||
|
|
||||||
AWX includes support for building [Swagger/OpenAPI
|
AWX includes support for building [Swagger/OpenAPI
|
||||||
documentation](https://swagger.io). To build the documentation locally, run:
|
documentation](https://swagger.io). To build the documentation locally, run:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
(container)/awx_devel$ make swagger
|
(container)/awx_devel$ make swagger
|
||||||
```
|
```
|
||||||
|
|
||||||
This will write a file named `swagger.json` that contains the API specification
|
This will write a file named `swagger.json` that contains the API specification
|
||||||
in OpenAPI format. A variety of online tools are available for translating
|
in OpenAPI format. A variety of online tools are available for translating
|
||||||
this data into more consumable formats (such as HTML). http://editor.swagger.io
|
this data into more consumable formats (such as HTML). http://editor.swagger.io
|
||||||
is an example of one such service.
|
is an example of one such service.
|
||||||
|
|
||||||
@@ -110,7 +110,7 @@ For feature work, take a look at the current [Enhancements](https://github.com/a
|
|||||||
|
|
||||||
If it has someone assigned to it then that person is the person responsible for working the enhancement. If you feel like you could contribute then reach out to that person.
|
If it has someone assigned to it then that person is the person responsible for working the enhancement. If you feel like you could contribute then reach out to that person.
|
||||||
|
|
||||||
Fixing bugs, adding translations, and updating the documentation are always appreciated, so reviewing the backlog of issues is always a good place to start. For extra information on debugging tools, see [Debugging](https://github.com/ansible/awx/blob/devel/docs/debugging.md).
|
Fixing bugs, adding translations, and updating the documentation are always appreciated, so reviewing the backlog of issues is always a good place to start. For extra information on debugging tools, see [Debugging](./docs/debugging/).
|
||||||
|
|
||||||
**NOTE**
|
**NOTE**
|
||||||
|
|
||||||
@@ -126,15 +126,15 @@ Fixes and Features for AWX will go through the Github pull request process. Subm
|
|||||||
|
|
||||||
Here are a few things you can do to help the visibility of your change, and increase the likelihood that it will be accepted:
|
Here are a few things you can do to help the visibility of your change, and increase the likelihood that it will be accepted:
|
||||||
|
|
||||||
* No issues when running linters/code checkers
|
- No issues when running linters/code checkers
|
||||||
* Python: black: `(container)/awx_devel$ make black`
|
- Python: black: `(container)/awx_devel$ make black`
|
||||||
* Javascript: JsHint: `(container)/awx_devel$ make jshint`
|
- Javascript: `(container)/awx_devel$ make ui-lint`
|
||||||
* No issues from unit tests
|
- No issues from unit tests
|
||||||
* Python: py.test: `(container)/awx_devel$ make test`
|
- Python: py.test: `(container)/awx_devel$ make test`
|
||||||
* JavaScript: Jasmine: `(container)/awx_devel$ make ui-test-ci`
|
- JavaScript: `(container)/awx_devel$ make ui-test`
|
||||||
* Write tests for new functionality, update/add tests for bug fixes
|
- Write tests for new functionality, update/add tests for bug fixes
|
||||||
* Make the smallest change possible
|
- Make the smallest change possible
|
||||||
* Write good commit messages. See [How to write a Git commit message](https://chris.beams.io/posts/git-commit/).
|
- Write good commit messages. See [How to write a Git commit message](https://chris.beams.io/posts/git-commit/).
|
||||||
|
|
||||||
It's generally a good idea to discuss features with us first by engaging us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
It's generally a good idea to discuss features with us first by engaging us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||||
|
|
||||||
@@ -146,21 +146,24 @@ Sometimes it might take us a while to fully review your PR. We try to keep the `
|
|||||||
All submitted PRs will have the linter and unit tests run against them via Zuul, and the status reported in the PR.
|
All submitted PRs will have the linter and unit tests run against them via Zuul, and the status reported in the PR.
|
||||||
|
|
||||||
## PR Checks run by Zuul
|
## PR Checks run by Zuul
|
||||||
|
|
||||||
Zuul jobs for awx are defined in the [zuul-jobs](https://github.com/ansible/zuul-jobs) repo.
|
Zuul jobs for awx are defined in the [zuul-jobs](https://github.com/ansible/zuul-jobs) repo.
|
||||||
|
|
||||||
Zuul runs the following checks that must pass:
|
Zuul runs the following checks that must pass:
|
||||||
1) `tox-awx-api-lint`
|
|
||||||
2) `tox-awx-ui-lint`
|
1. `tox-awx-api-lint`
|
||||||
3) `tox-awx-api`
|
2. `tox-awx-ui-lint`
|
||||||
4) `tox-awx-ui`
|
3. `tox-awx-api`
|
||||||
5) `tox-awx-swagger`
|
4. `tox-awx-ui`
|
||||||
|
5. `tox-awx-swagger`
|
||||||
|
|
||||||
Zuul runs the following checks that are non-voting (can not pass but serve to inform PR reviewers):
|
Zuul runs the following checks that are non-voting (can not pass but serve to inform PR reviewers):
|
||||||
1) `tox-awx-detect-schema-change`
|
|
||||||
This check generates the schema and diffs it against a reference copy of the `devel` version of the schema.
|
1. `tox-awx-detect-schema-change`
|
||||||
Reviewers should inspect the `job-output.txt.gz` related to the check if their is a failure (grep for `diff -u -b` to find beginning of diff).
|
This check generates the schema and diffs it against a reference copy of the `devel` version of the schema.
|
||||||
If the schema change is expected and makes sense in relation to the changes made by the PR, then you are good to go!
|
Reviewers should inspect the `job-output.txt.gz` related to the check if their is a failure (grep for `diff -u -b` to find beginning of diff).
|
||||||
If not, the schema changes should be fixed, but this decision must be enforced by reviewers.
|
If the schema change is expected and makes sense in relation to the changes made by the PR, then you are good to go!
|
||||||
|
If not, the schema changes should be fixed, but this decision must be enforced by reviewers.
|
||||||
|
|
||||||
## Reporting Issues
|
## Reporting Issues
|
||||||
|
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ recursive-include awx *.mo
|
|||||||
recursive-include awx/static *
|
recursive-include awx/static *
|
||||||
recursive-include awx/templates *.html
|
recursive-include awx/templates *.html
|
||||||
recursive-include awx/api/templates *.md *.html
|
recursive-include awx/api/templates *.md *.html
|
||||||
recursive-include awx/ui_next/build *.html
|
recursive-include awx/ui/build *.html
|
||||||
recursive-include awx/ui_next/build *
|
recursive-include awx/ui/build *
|
||||||
recursive-include awx/playbooks *.yml
|
recursive-include awx/playbooks *.yml
|
||||||
recursive-include awx/lib/site-packages *
|
recursive-include awx/lib/site-packages *
|
||||||
recursive-include awx/plugins *.ps1
|
recursive-include awx/plugins *.ps1
|
||||||
|
|||||||
212
Makefile
212
Makefile
@@ -1,61 +1,40 @@
|
|||||||
PYTHON ?= python3.8
|
PYTHON ?= python3.9
|
||||||
PYTHON_VERSION = $(shell $(PYTHON) -c "from distutils.sysconfig import get_python_version; print(get_python_version())")
|
|
||||||
SITELIB=$(shell $(PYTHON) -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")
|
|
||||||
OFFICIAL ?= no
|
OFFICIAL ?= no
|
||||||
PACKER ?= packer
|
|
||||||
PACKER_BUILD_OPTS ?= -var 'official=$(OFFICIAL)' -var 'aw_repo_url=$(AW_REPO_URL)'
|
|
||||||
NODE ?= node
|
NODE ?= node
|
||||||
NPM_BIN ?= npm
|
NPM_BIN ?= npm
|
||||||
CHROMIUM_BIN=/tmp/chrome-linux/chrome
|
CHROMIUM_BIN=/tmp/chrome-linux/chrome
|
||||||
DEPS_SCRIPT ?= packaging/bundle/deps.py
|
|
||||||
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||||
MANAGEMENT_COMMAND ?= awx-manage
|
MANAGEMENT_COMMAND ?= awx-manage
|
||||||
IMAGE_REPOSITORY_AUTH ?=
|
VERSION := $(shell $(PYTHON) setup.py --version)
|
||||||
IMAGE_REPOSITORY_BASE ?= https://gcr.io
|
COLLECTION_VERSION := $(shell $(PYTHON) setup.py --version | cut -d . -f 1-3)
|
||||||
VERSION := $(shell cat VERSION)
|
|
||||||
|
|
||||||
# NOTE: This defaults the container image version to the branch that's active
|
# NOTE: This defaults the container image version to the branch that's active
|
||||||
COMPOSE_TAG ?= $(GIT_BRANCH)
|
COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||||
COMPOSE_HOST ?= $(shell hostname)
|
MAIN_NODE_TYPE ?= hybrid
|
||||||
|
# If set to true docker-compose will also start a keycloak instance
|
||||||
|
KEYCLOAK ?= false
|
||||||
|
|
||||||
VENV_BASE ?= /var/lib/awx/venv/
|
VENV_BASE ?= /var/lib/awx/venv
|
||||||
SCL_PREFIX ?=
|
|
||||||
CELERY_SCHEDULE_FILE ?= /var/lib/awx/beat.db
|
|
||||||
|
|
||||||
DEV_DOCKER_TAG_BASE ?= quay.io/awx
|
DEV_DOCKER_TAG_BASE ?= quay.io/awx
|
||||||
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||||
|
|
||||||
|
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||||
|
|
||||||
# Python packages to install only from source (not from binary wheels)
|
# Python packages to install only from source (not from binary wheels)
|
||||||
# Comma separated list
|
# Comma separated list
|
||||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio
|
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio
|
||||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||||
# to install the actual requirements
|
# to install the actual requirements
|
||||||
VENV_BOOTSTRAP ?= pip==19.3.1 setuptools==41.6.0 wheel==0.36.2
|
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==58.2.0 wheel==0.36.2
|
||||||
|
|
||||||
# Determine appropriate shasum command
|
|
||||||
UNAME_S := $(shell uname -s)
|
|
||||||
ifeq ($(UNAME_S),Linux)
|
|
||||||
SHASUM_BIN ?= sha256sum
|
|
||||||
endif
|
|
||||||
ifeq ($(UNAME_S),Darwin)
|
|
||||||
SHASUM_BIN ?= shasum -a 256
|
|
||||||
endif
|
|
||||||
|
|
||||||
# Get the branch information from git
|
|
||||||
GIT_DATE := $(shell git log -n 1 --format="%ai")
|
|
||||||
DATE := $(shell date -u +%Y%m%d%H%M)
|
|
||||||
|
|
||||||
NAME ?= awx
|
NAME ?= awx
|
||||||
GIT_REMOTE_URL = $(shell git config --get remote.origin.url)
|
|
||||||
|
|
||||||
# TAR build parameters
|
# TAR build parameters
|
||||||
SDIST_TAR_NAME=$(NAME)-$(VERSION)
|
SDIST_TAR_NAME=$(NAME)-$(VERSION)
|
||||||
WHEEL_NAME=$(NAME)-$(VERSION)
|
|
||||||
|
|
||||||
SDIST_COMMAND ?= sdist
|
SDIST_COMMAND ?= sdist
|
||||||
WHEEL_COMMAND ?= bdist_wheel
|
|
||||||
SDIST_TAR_FILE ?= $(SDIST_TAR_NAME).tar.gz
|
SDIST_TAR_FILE ?= $(SDIST_TAR_NAME).tar.gz
|
||||||
WHEEL_FILE ?= $(WHEEL_NAME)-py2-none-any.whl
|
|
||||||
|
|
||||||
I18N_FLAG_FILE = .i18n_built
|
I18N_FLAG_FILE = .i18n_built
|
||||||
|
|
||||||
@@ -64,7 +43,7 @@ I18N_FLAG_FILE = .i18n_built
|
|||||||
receiver test test_unit test_coverage coverage_html \
|
receiver test test_unit test_coverage coverage_html \
|
||||||
dev_build release_build sdist \
|
dev_build release_build sdist \
|
||||||
ui-release ui-devel \
|
ui-release ui-devel \
|
||||||
VERSION docker-compose-sources \
|
VERSION PYTHON_VERSION docker-compose-sources \
|
||||||
.git/hooks/pre-commit
|
.git/hooks/pre-commit
|
||||||
|
|
||||||
clean-tmp:
|
clean-tmp:
|
||||||
@@ -83,7 +62,7 @@ clean-schema:
|
|||||||
|
|
||||||
clean-languages:
|
clean-languages:
|
||||||
rm -f $(I18N_FLAG_FILE)
|
rm -f $(I18N_FLAG_FILE)
|
||||||
find . -type f -regex ".*\.mo$$" -delete
|
find ./awx/locale/ -type f -regex ".*\.mo$" -delete
|
||||||
|
|
||||||
# Remove temporary build files, compiled Python files.
|
# Remove temporary build files, compiled Python files.
|
||||||
clean: clean-ui clean-api clean-awxkit clean-dist
|
clean: clean-ui clean-api clean-awxkit clean-dist
|
||||||
@@ -166,15 +145,6 @@ version_file:
|
|||||||
fi; \
|
fi; \
|
||||||
$(PYTHON) -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \
|
$(PYTHON) -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \
|
||||||
|
|
||||||
# Do any one-time init tasks.
|
|
||||||
comma := ,
|
|
||||||
init:
|
|
||||||
if [ "$(VENV_BASE)" ]; then \
|
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
|
||||||
fi; \
|
|
||||||
$(MANAGEMENT_COMMAND) provision_instance --hostname=$(COMPOSE_HOST); \
|
|
||||||
$(MANAGEMENT_COMMAND) register_queue --queuename=controlplane --instance_percent=100;
|
|
||||||
|
|
||||||
# Refresh development environment after pulling new code.
|
# Refresh development environment after pulling new code.
|
||||||
refresh: clean requirements_dev version_file develop migrate
|
refresh: clean requirements_dev version_file develop migrate
|
||||||
|
|
||||||
@@ -295,17 +265,16 @@ api-lint:
|
|||||||
|
|
||||||
awx-link:
|
awx-link:
|
||||||
[ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/setup.py egg_info_dev
|
[ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/setup.py egg_info_dev
|
||||||
cp -f /tmp/awx.egg-link /var/lib/awx/venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.egg-link
|
cp -f /tmp/awx.egg-link /var/lib/awx/venv/awx/lib/$(PYTHON)/site-packages/awx.egg-link
|
||||||
|
|
||||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||||
|
PYTEST_ARGS ?= -n auto
|
||||||
# Run all API unit tests.
|
# Run all API unit tests.
|
||||||
test:
|
test:
|
||||||
if [ "$(VENV_BASE)" ]; then \
|
if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider -n auto $(TEST_DIRS)
|
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider $(PYTEST_ARGS) $(TEST_DIRS)
|
||||||
cmp VERSION awxkit/VERSION || "VERSION and awxkit/VERSION *must* match"
|
|
||||||
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
|
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
|
||||||
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
|
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
|
||||||
|
|
||||||
@@ -337,12 +306,16 @@ symlink_collection:
|
|||||||
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
|
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
|
||||||
|
|
||||||
build_collection:
|
build_collection:
|
||||||
ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml -e collection_package=$(COLLECTION_PACKAGE) -e collection_namespace=$(COLLECTION_NAMESPACE) -e collection_version=$(VERSION) -e '{"awx_template_version":false}'
|
ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \
|
||||||
|
-e collection_package=$(COLLECTION_PACKAGE) \
|
||||||
|
-e collection_namespace=$(COLLECTION_NAMESPACE) \
|
||||||
|
-e collection_version=$(COLLECTION_VERSION) \
|
||||||
|
-e '{"awx_template_version":false}'
|
||||||
ansible-galaxy collection build awx_collection_build --force --output-path=awx_collection_build
|
ansible-galaxy collection build awx_collection_build --force --output-path=awx_collection_build
|
||||||
|
|
||||||
install_collection: build_collection
|
install_collection: build_collection
|
||||||
rm -rf $(COLLECTION_INSTALL)
|
rm -rf $(COLLECTION_INSTALL)
|
||||||
ansible-galaxy collection install awx_collection_build/$(COLLECTION_NAMESPACE)-$(COLLECTION_PACKAGE)-$(VERSION).tar.gz
|
ansible-galaxy collection install awx_collection_build/$(COLLECTION_NAMESPACE)-$(COLLECTION_PACKAGE)-$(COLLECTION_VERSION).tar.gz
|
||||||
|
|
||||||
test_collection_sanity: install_collection
|
test_collection_sanity: install_collection
|
||||||
cd $(COLLECTION_INSTALL) && ansible-test sanity
|
cd $(COLLECTION_INSTALL) && ansible-test sanity
|
||||||
@@ -383,45 +356,51 @@ bulk_data:
|
|||||||
# UI TASKS
|
# UI TASKS
|
||||||
# --------------------------------------
|
# --------------------------------------
|
||||||
|
|
||||||
UI_BUILD_FLAG_FILE = awx/ui_next/.ui-built
|
UI_BUILD_FLAG_FILE = awx/ui/.ui-built
|
||||||
|
|
||||||
clean-ui:
|
clean-ui:
|
||||||
rm -rf node_modules
|
rm -rf node_modules
|
||||||
rm -rf awx/ui_next/node_modules
|
rm -rf awx/ui/node_modules
|
||||||
rm -rf awx/ui_next/build
|
rm -rf awx/ui/build
|
||||||
rm -rf awx/ui_next/src/locales/_build
|
rm -rf awx/ui/src/locales/_build
|
||||||
rm -rf $(UI_BUILD_FLAG_FILE)
|
rm -rf $(UI_BUILD_FLAG_FILE)
|
||||||
|
|
||||||
awx/ui_next/node_modules:
|
awx/ui/node_modules:
|
||||||
NODE_OPTIONS=--max-old-space-size=4096 $(NPM_BIN) --prefix awx/ui_next --loglevel warn ci
|
NODE_OPTIONS=--max-old-space-size=6144 $(NPM_BIN) --prefix awx/ui --loglevel warn ci
|
||||||
|
|
||||||
$(UI_BUILD_FLAG_FILE):
|
$(UI_BUILD_FLAG_FILE): awx/ui/node_modules
|
||||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run compile-strings
|
$(PYTHON) tools/scripts/compilemessages.py
|
||||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run build
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
|
||||||
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
|
||||||
mkdir -p awx/public/static/css
|
mkdir -p awx/public/static/css
|
||||||
mkdir -p awx/public/static/js
|
mkdir -p awx/public/static/js
|
||||||
mkdir -p awx/public/static/media
|
mkdir -p awx/public/static/media
|
||||||
cp -r awx/ui_next/build/static/css/* awx/public/static/css
|
cp -r awx/ui/build/static/css/* awx/public/static/css
|
||||||
cp -r awx/ui_next/build/static/js/* awx/public/static/js
|
cp -r awx/ui/build/static/js/* awx/public/static/js
|
||||||
cp -r awx/ui_next/build/static/media/* awx/public/static/media
|
cp -r awx/ui/build/static/media/* awx/public/static/media
|
||||||
touch $@
|
touch $@
|
||||||
|
|
||||||
ui-release: awx/ui_next/node_modules $(UI_BUILD_FLAG_FILE)
|
|
||||||
|
|
||||||
ui-devel: awx/ui_next/node_modules
|
|
||||||
|
ui-release: $(UI_BUILD_FLAG_FILE)
|
||||||
|
|
||||||
|
ui-devel: awx/ui/node_modules
|
||||||
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
|
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
|
||||||
|
|
||||||
ui-devel-instrumented: awx/ui_next/node_modules
|
ui-devel-instrumented: awx/ui/node_modules
|
||||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run start-instrumented
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run start-instrumented
|
||||||
|
|
||||||
ui-devel-test: awx/ui_next/node_modules
|
ui-devel-test: awx/ui/node_modules
|
||||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run start
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run start
|
||||||
|
|
||||||
ui-zuul-lint-and-test:
|
ui-lint:
|
||||||
$(NPM_BIN) --prefix awx/ui_next install
|
$(NPM_BIN) --prefix awx/ui install
|
||||||
$(NPM_BIN) run --prefix awx/ui_next lint
|
$(NPM_BIN) run --prefix awx/ui lint
|
||||||
$(NPM_BIN) run --prefix awx/ui_next prettier-check
|
$(NPM_BIN) run --prefix awx/ui prettier-check
|
||||||
$(NPM_BIN) run --prefix awx/ui_next test -- --coverage --watchAll=false
|
|
||||||
|
ui-test:
|
||||||
|
$(NPM_BIN) --prefix awx/ui install
|
||||||
|
$(NPM_BIN) run --prefix awx/ui test
|
||||||
|
|
||||||
|
|
||||||
# Build a pip-installable package into dist/ with a timestamped version number.
|
# Build a pip-installable package into dist/ with a timestamped version number.
|
||||||
@@ -432,33 +411,22 @@ dev_build:
|
|||||||
release_build:
|
release_build:
|
||||||
$(PYTHON) setup.py release_build
|
$(PYTHON) setup.py release_build
|
||||||
|
|
||||||
dist/$(SDIST_TAR_FILE): ui-release VERSION
|
HEADLESS ?= no
|
||||||
|
ifeq ($(HEADLESS), yes)
|
||||||
|
dist/$(SDIST_TAR_FILE):
|
||||||
|
else
|
||||||
|
dist/$(SDIST_TAR_FILE): $(UI_BUILD_FLAG_FILE)
|
||||||
|
endif
|
||||||
$(PYTHON) setup.py $(SDIST_COMMAND)
|
$(PYTHON) setup.py $(SDIST_COMMAND)
|
||||||
|
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
|
||||||
dist/$(WHEEL_FILE): ui-release
|
|
||||||
$(PYTHON) setup.py $(WHEEL_COMMAND)
|
|
||||||
|
|
||||||
sdist: dist/$(SDIST_TAR_FILE)
|
sdist: dist/$(SDIST_TAR_FILE)
|
||||||
|
echo $(HEADLESS)
|
||||||
@echo "#############################################"
|
@echo "#############################################"
|
||||||
@echo "Artifacts:"
|
@echo "Artifacts:"
|
||||||
@echo dist/$(SDIST_TAR_FILE)
|
@echo dist/$(SDIST_TAR_FILE)
|
||||||
@echo "#############################################"
|
@echo "#############################################"
|
||||||
|
|
||||||
wheel: dist/$(WHEEL_FILE)
|
|
||||||
@echo "#############################################"
|
|
||||||
@echo "Artifacts:"
|
|
||||||
@echo dist/$(WHEEL_FILE)
|
|
||||||
@echo "#############################################"
|
|
||||||
|
|
||||||
# Build setup bundle tarball
|
|
||||||
setup-bundle-build:
|
|
||||||
mkdir -p $@
|
|
||||||
|
|
||||||
docker-auth:
|
|
||||||
@if [ "$(IMAGE_REPOSITORY_AUTH)" ]; then \
|
|
||||||
echo "$(IMAGE_REPOSITORY_AUTH)" | docker login -u oauth2accesstoken --password-stdin $(IMAGE_REPOSITORY_BASE); \
|
|
||||||
fi;
|
|
||||||
|
|
||||||
# This directory is bind-mounted inside of the development container and
|
# This directory is bind-mounted inside of the development container and
|
||||||
# needs to be pre-created for permissions to be set correctly. Otherwise,
|
# needs to be pre-created for permissions to be set correctly. Otherwise,
|
||||||
# Docker will create this directory as root.
|
# Docker will create this directory as root.
|
||||||
@@ -466,22 +434,34 @@ awx/projects:
|
|||||||
@mkdir -p $@
|
@mkdir -p $@
|
||||||
|
|
||||||
COMPOSE_UP_OPTS ?=
|
COMPOSE_UP_OPTS ?=
|
||||||
CLUSTER_NODE_COUNT ?= 1
|
COMPOSE_OPTS ?=
|
||||||
|
CONTROL_PLANE_NODE_COUNT ?= 1
|
||||||
|
EXECUTION_NODE_COUNT ?= 2
|
||||||
|
MINIKUBE_CONTAINER_GROUP ?= false
|
||||||
|
|
||||||
docker-compose-sources: .git/hooks/pre-commit
|
docker-compose-sources: .git/hooks/pre-commit
|
||||||
|
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||||
|
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose-minikube/deploy.yml; \
|
||||||
|
fi;
|
||||||
|
|
||||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||||
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
||||||
-e awx_image_tag=$(COMPOSE_TAG) \
|
-e awx_image_tag=$(COMPOSE_TAG) \
|
||||||
-e cluster_node_count=$(CLUSTER_NODE_COUNT)
|
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||||
|
-e control_plane_node_count=$(CONTROL_PLANE_NODE_COUNT) \
|
||||||
|
-e execution_node_count=$(EXECUTION_NODE_COUNT) \
|
||||||
|
-e minikube_container_group=$(MINIKUBE_CONTAINER_GROUP) \
|
||||||
|
-e enable_keycloak=$(KEYCLOAK)
|
||||||
|
|
||||||
docker-compose: docker-auth awx/projects docker-compose-sources
|
|
||||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_UP_OPTS) up
|
|
||||||
|
|
||||||
docker-compose-credential-plugins: docker-auth awx/projects docker-compose-sources
|
docker-compose: awx/projects docker-compose-sources
|
||||||
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||||
|
|
||||||
|
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
||||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||||
|
|
||||||
docker-compose-test: docker-auth awx/projects docker-compose-sources
|
docker-compose-test: awx/projects docker-compose-sources
|
||||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash
|
||||||
|
|
||||||
docker-compose-runtest: awx/projects docker-compose-sources
|
docker-compose-runtest: awx/projects docker-compose-sources
|
||||||
@@ -490,40 +470,52 @@ docker-compose-runtest: awx/projects docker-compose-sources
|
|||||||
docker-compose-build-swagger: awx/projects docker-compose-sources
|
docker-compose-build-swagger: awx/projects docker-compose-sources
|
||||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
||||||
|
|
||||||
|
SCHEMA_DIFF_BASE_BRANCH ?= devel
|
||||||
detect-schema-change: genschema
|
detect-schema-change: genschema
|
||||||
curl https://s3.amazonaws.com/awx-public-ci-files/schema.json -o reference-schema.json
|
curl https://s3.amazonaws.com/awx-public-ci-files/$(SCHEMA_DIFF_BASE_BRANCH)/schema.json -o reference-schema.json
|
||||||
# Ignore differences in whitespace with -b
|
# Ignore differences in whitespace with -b
|
||||||
diff -u -b reference-schema.json schema.json
|
diff -u -b reference-schema.json schema.json
|
||||||
|
|
||||||
docker-compose-clean: awx/projects
|
docker-compose-clean: awx/projects
|
||||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||||
|
|
||||||
|
docker-compose-container-group-clean:
|
||||||
|
@if [ -f "tools/docker-compose-minikube/_sources/minikube" ]; then \
|
||||||
|
tools/docker-compose-minikube/_sources/minikube delete; \
|
||||||
|
fi
|
||||||
|
rm -rf tools/docker-compose-minikube/_sources/
|
||||||
|
|
||||||
# Base development image build
|
# Base development image build
|
||||||
docker-compose-build:
|
docker-compose-build:
|
||||||
ansible-playbook tools/ansible/dockerfile.yml -e build_dev=True
|
ansible-playbook tools/ansible/dockerfile.yml -e build_dev=True -e receptor_image=$(RECEPTOR_IMAGE)
|
||||||
DOCKER_BUILDKIT=1 docker build -t $(DEVEL_IMAGE_NAME) \
|
DOCKER_BUILDKIT=1 docker build -t $(DEVEL_IMAGE_NAME) \
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||||
|
|
||||||
docker-clean:
|
docker-clean:
|
||||||
$(foreach container_id,$(shell docker ps -f name=tools_awx -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||||
docker images | grep "awx_devel" | awk '{print $$1 ":" $$2}' | xargs docker rmi
|
if [ "$(shell docker images | grep awx_devel)" ]; then \
|
||||||
|
docker images | grep awx_devel | awk '{print $$3}' | xargs docker rmi --force; \
|
||||||
|
fi
|
||||||
|
|
||||||
docker-clean-volumes: docker-compose-clean
|
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
||||||
docker volume rm tools_awx_db
|
docker volume rm tools_awx_db
|
||||||
|
|
||||||
docker-refresh: docker-clean docker-compose
|
docker-refresh: docker-clean docker-compose
|
||||||
|
|
||||||
# Docker Development Environment with Elastic Stack Connected
|
# Docker Development Environment with Elastic Stack Connected
|
||||||
docker-compose-elk: docker-auth awx/projects docker-compose-sources
|
docker-compose-elk: awx/projects docker-compose-sources
|
||||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||||
|
|
||||||
docker-compose-cluster-elk: docker-auth awx/projects docker-compose-sources
|
docker-compose-cluster-elk: awx/projects docker-compose-sources
|
||||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||||
|
|
||||||
prometheus:
|
prometheus:
|
||||||
docker run -u0 --net=tools_default --link=`docker ps | egrep -o "tools_awx(_run)?_([^ ]+)?"`:awxweb --volume `pwd`/tools/prometheus:/prometheus --name prometheus -d -p 0.0.0.0:9090:9090 prom/prometheus --web.enable-lifecycle --config.file=/prometheus/prometheus.yml
|
docker run -u0 --net=tools_default --link=`docker ps | egrep -o "tools_awx(_run)?_([^ ]+)?"`:awxweb --volume `pwd`/tools/prometheus:/prometheus --name prometheus -d -p 0.0.0.0:9090:9090 prom/prometheus --web.enable-lifecycle --config.file=/prometheus/prometheus.yml
|
||||||
|
|
||||||
|
docker-compose-container-group:
|
||||||
|
MINIKUBE_CONTAINER_GROUP=true make docker-compose
|
||||||
|
|
||||||
clean-elk:
|
clean-elk:
|
||||||
docker stop tools_kibana_1
|
docker stop tools_kibana_1
|
||||||
docker stop tools_logstash_1
|
docker stop tools_logstash_1
|
||||||
@@ -538,14 +530,18 @@ psql-container:
|
|||||||
VERSION:
|
VERSION:
|
||||||
@echo "awx: $(VERSION)"
|
@echo "awx: $(VERSION)"
|
||||||
|
|
||||||
|
PYTHON_VERSION:
|
||||||
|
@echo "$(PYTHON)" | sed 's:python::'
|
||||||
|
|
||||||
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||||
ansible-playbook tools/ansible/dockerfile.yml
|
ansible-playbook tools/ansible/dockerfile.yml -e receptor_image=$(RECEPTOR_IMAGE)
|
||||||
|
|
||||||
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||||
ansible-playbook tools/ansible/dockerfile.yml \
|
ansible-playbook tools/ansible/dockerfile.yml \
|
||||||
-e dockerfile_name=Dockerfile.kube-dev \
|
-e dockerfile_name=Dockerfile.kube-dev \
|
||||||
-e kube_dev=True \
|
-e kube_dev=True \
|
||||||
-e template_dest=_build_kube_dev
|
-e template_dest=_build_kube_dev \
|
||||||
|
-e receptor_image=$(RECEPTOR_IMAGE)
|
||||||
|
|
||||||
awx-kube-dev-build: Dockerfile.kube-dev
|
awx-kube-dev-build: Dockerfile.kube-dev
|
||||||
docker build -f Dockerfile.kube-dev \
|
docker build -f Dockerfile.kube-dev \
|
||||||
@@ -558,11 +554,11 @@ awx-kube-dev-build: Dockerfile.kube-dev
|
|||||||
|
|
||||||
# generate UI .pot file, an empty template of strings yet to be translated
|
# generate UI .pot file, an empty template of strings yet to be translated
|
||||||
pot: $(UI_BUILD_FLAG_FILE)
|
pot: $(UI_BUILD_FLAG_FILE)
|
||||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-template --clean
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-template --clean
|
||||||
|
|
||||||
# generate UI .po files for each locale (will update translated strings for `en`)
|
# generate UI .po files for each locale (will update translated strings for `en`)
|
||||||
po: $(UI_BUILD_FLAG_FILE)
|
po: $(UI_BUILD_FLAG_FILE)
|
||||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-strings -- --clean
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean
|
||||||
|
|
||||||
# generate API django .pot .po
|
# generate API django .pot .po
|
||||||
LANG = "en-us"
|
LANG = "en-us"
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
[](https://ansible.softwarefactory-project.io/zuul/status) [](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) [](https://github.com/ansible/awx/blob/devel/LICENSE.md) [](https://groups.google.com/g/awx-project)
|
[](https://github.com/ansible/awx/actions/workflows/ci.yml) [](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) [](https://github.com/ansible/awx/blob/devel/LICENSE.md) [](https://groups.google.com/g/awx-project)
|
||||||
[](https://libera.chat)
|
[](https://libera.chat)
|
||||||
|
|
||||||
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
|
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
|
||||||
|
|||||||
@@ -151,7 +151,7 @@ def manage():
|
|||||||
from django.core.management import execute_from_command_line
|
from django.core.management import execute_from_command_line
|
||||||
|
|
||||||
# enforce the postgres version is equal to 12. if not, then terminate program with exit code of 1
|
# enforce the postgres version is equal to 12. if not, then terminate program with exit code of 1
|
||||||
if not MODE == 'development':
|
if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development':
|
||||||
if (connection.pg_version // 10000) < 12:
|
if (connection.pg_version // 10000) < 12:
|
||||||
sys.stderr.write("Postgres version 12 is required\n")
|
sys.stderr.write("Postgres version 12 is required\n")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|||||||
@@ -44,6 +44,7 @@ from awx.main.views import ApiErrorView
|
|||||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
|
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
|
||||||
from awx.api.versioning import URLPathVersioning
|
from awx.api.versioning import URLPathVersioning
|
||||||
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
|
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
|
||||||
|
from awx.conf import settings_registry
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'APIView',
|
'APIView',
|
||||||
@@ -208,19 +209,35 @@ class APIView(views.APIView):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
if response.status_code >= 400:
|
if response.status_code >= 400:
|
||||||
status_msg = "status %s received by user %s attempting to access %s from %s" % (
|
msg_data = {
|
||||||
response.status_code,
|
'status_code': response.status_code,
|
||||||
request.user,
|
'user_name': request.user,
|
||||||
request.path,
|
'url_path': request.path,
|
||||||
request.META.get('REMOTE_ADDR', None),
|
'remote_addr': request.META.get('REMOTE_ADDR', None),
|
||||||
)
|
}
|
||||||
|
|
||||||
|
if type(response.data) is dict:
|
||||||
|
msg_data['error'] = response.data.get('error', response.status_text)
|
||||||
|
elif type(response.data) is list:
|
||||||
|
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
|
||||||
|
else:
|
||||||
|
msg_data['error'] = response.status_text
|
||||||
|
|
||||||
|
try:
|
||||||
|
status_msg = getattr(settings, 'API_400_ERROR_LOG_FORMAT').format(**msg_data)
|
||||||
|
except Exception as e:
|
||||||
|
if getattr(settings, 'API_400_ERROR_LOG_FORMAT', None):
|
||||||
|
logger.error("Unable to format API_400_ERROR_LOG_FORMAT setting, defaulting log message: {}".format(e))
|
||||||
|
status_msg = settings_registry.get_setting_field('API_400_ERROR_LOG_FORMAT').get_default().format(**msg_data)
|
||||||
|
|
||||||
if hasattr(self, '__init_request_error__'):
|
if hasattr(self, '__init_request_error__'):
|
||||||
response = self.handle_exception(self.__init_request_error__)
|
response = self.handle_exception(self.__init_request_error__)
|
||||||
if response.status_code == 401:
|
if response.status_code == 401:
|
||||||
response.data['detail'] += ' To establish a login session, visit /api/login/.'
|
response.data['detail'] += _(' To establish a login session, visit') + ' /api/login/.'
|
||||||
logger.info(status_msg)
|
logger.info(status_msg)
|
||||||
else:
|
else:
|
||||||
logger.warning(status_msg)
|
logger.warning(status_msg)
|
||||||
|
|
||||||
response = super(APIView, self).finalize_response(request, response, *args, **kwargs)
|
response = super(APIView, self).finalize_response(request, response, *args, **kwargs)
|
||||||
time_started = getattr(self, 'time_started', None)
|
time_started = getattr(self, 'time_started', None)
|
||||||
response['X-API-Product-Version'] = get_awx_version()
|
response['X-API-Product-Version'] = get_awx_version()
|
||||||
@@ -817,7 +834,7 @@ class ResourceAccessList(ParentMixin, ListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
def trigger_delayed_deep_copy(*args, **kwargs):
|
def trigger_delayed_deep_copy(*args, **kwargs):
|
||||||
from awx.main.tasks import deep_copy_model_obj
|
from awx.main.tasks.system import deep_copy_model_obj
|
||||||
|
|
||||||
connection.on_commit(lambda: deep_copy_model_obj.delay(*args, **kwargs))
|
connection.on_commit(lambda: deep_copy_model_obj.delay(*args, **kwargs))
|
||||||
|
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ __all__ = [
|
|||||||
'ProjectUpdatePermission',
|
'ProjectUpdatePermission',
|
||||||
'InventoryInventorySourcesUpdatePermission',
|
'InventoryInventorySourcesUpdatePermission',
|
||||||
'UserPermission',
|
'UserPermission',
|
||||||
'IsSuperUser',
|
'IsSystemAdminOrAuditor',
|
||||||
'InstanceGroupTowerPermission',
|
'InstanceGroupTowerPermission',
|
||||||
'WorkflowApprovalPermission',
|
'WorkflowApprovalPermission',
|
||||||
]
|
]
|
||||||
@@ -236,13 +236,18 @@ class UserPermission(ModelAccessPermission):
|
|||||||
raise PermissionDenied()
|
raise PermissionDenied()
|
||||||
|
|
||||||
|
|
||||||
class IsSuperUser(permissions.BasePermission):
|
class IsSystemAdminOrAuditor(permissions.BasePermission):
|
||||||
"""
|
"""
|
||||||
Allows access only to admin users.
|
Allows write access only to system admin users.
|
||||||
|
Allows read access only to system auditor users.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
return request.user and request.user.is_superuser
|
if not (request.user and request.user.is_authenticated):
|
||||||
|
return False
|
||||||
|
if request.method == 'GET':
|
||||||
|
return request.user.is_superuser or request.user.is_system_auditor
|
||||||
|
return request.user.is_superuser
|
||||||
|
|
||||||
|
|
||||||
class InstanceGroupTowerPermission(ModelAccessPermission):
|
class InstanceGroupTowerPermission(ModelAccessPermission):
|
||||||
|
|||||||
@@ -57,6 +57,7 @@ from awx.main.models import (
|
|||||||
Host,
|
Host,
|
||||||
Instance,
|
Instance,
|
||||||
InstanceGroup,
|
InstanceGroup,
|
||||||
|
InstanceLink,
|
||||||
Inventory,
|
Inventory,
|
||||||
InventorySource,
|
InventorySource,
|
||||||
InventoryUpdate,
|
InventoryUpdate,
|
||||||
@@ -378,19 +379,22 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
|||||||
def _get_related(self, obj):
|
def _get_related(self, obj):
|
||||||
return {} if obj is None else self.get_related(obj)
|
return {} if obj is None else self.get_related(obj)
|
||||||
|
|
||||||
def _generate_named_url(self, url_path, obj, node):
|
def _generate_friendly_id(self, obj, node):
|
||||||
url_units = url_path.split('/')
|
|
||||||
reset_counters()
|
reset_counters()
|
||||||
named_url = node.generate_named_url(obj)
|
return node.generate_named_url(obj)
|
||||||
url_units[4] = named_url
|
|
||||||
return '/'.join(url_units)
|
|
||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
res = OrderedDict()
|
res = OrderedDict()
|
||||||
view = self.context.get('view', None)
|
view = self.context.get('view', None)
|
||||||
if view and (hasattr(view, 'retrieve') or view.request.method == 'POST') and type(obj) in settings.NAMED_URL_GRAPH:
|
if view and (hasattr(view, 'retrieve') or view.request.method == 'POST') and type(obj) in settings.NAMED_URL_GRAPH:
|
||||||
original_url = self.get_url(obj)
|
original_path = self.get_url(obj)
|
||||||
res['named_url'] = self._generate_named_url(original_url, obj, settings.NAMED_URL_GRAPH[type(obj)])
|
path_components = original_path.lstrip('/').rstrip('/').split('/')
|
||||||
|
|
||||||
|
friendly_id = self._generate_friendly_id(obj, settings.NAMED_URL_GRAPH[type(obj)])
|
||||||
|
path_components[-1] = friendly_id
|
||||||
|
|
||||||
|
new_path = '/' + '/'.join(path_components) + '/'
|
||||||
|
res['named_url'] = new_path
|
||||||
if getattr(obj, 'created_by', None):
|
if getattr(obj, 'created_by', None):
|
||||||
res['created_by'] = self.reverse('api:user_detail', kwargs={'pk': obj.created_by.pk})
|
res['created_by'] = self.reverse('api:user_detail', kwargs={'pk': obj.created_by.pk})
|
||||||
if getattr(obj, 'modified_by', None):
|
if getattr(obj, 'modified_by', None):
|
||||||
@@ -861,7 +865,7 @@ class UnifiedJobSerializer(BaseSerializer):
|
|||||||
if 'elapsed' in ret:
|
if 'elapsed' in ret:
|
||||||
if obj and obj.pk and obj.started and not obj.finished:
|
if obj and obj.pk and obj.started and not obj.finished:
|
||||||
td = now() - obj.started
|
td = now() - obj.started
|
||||||
ret['elapsed'] = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / (10 ** 6 * 1.0)
|
ret['elapsed'] = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / (10**6 * 1.0)
|
||||||
ret['elapsed'] = float(ret['elapsed'])
|
ret['elapsed'] = float(ret['elapsed'])
|
||||||
# Because this string is saved in the db in the source language,
|
# Because this string is saved in the db in the source language,
|
||||||
# it must be marked for translation after it is pulled from the db, not when set
|
# it must be marked for translation after it is pulled from the db, not when set
|
||||||
@@ -948,7 +952,6 @@ class UserSerializer(BaseSerializer):
|
|||||||
'*',
|
'*',
|
||||||
'-name',
|
'-name',
|
||||||
'-description',
|
'-description',
|
||||||
'-modified',
|
|
||||||
'username',
|
'username',
|
||||||
'first_name',
|
'first_name',
|
||||||
'last_name',
|
'last_name',
|
||||||
@@ -1640,7 +1643,25 @@ class BaseSerializerWithVariables(BaseSerializer):
|
|||||||
return vars_validate_or_raise(value)
|
return vars_validate_or_raise(value)
|
||||||
|
|
||||||
|
|
||||||
class InventorySerializer(BaseSerializerWithVariables):
|
class LabelsListMixin(object):
|
||||||
|
def _summary_field_labels(self, obj):
|
||||||
|
label_list = [{'id': x.id, 'name': x.name} for x in obj.labels.all()[:10]]
|
||||||
|
if has_model_field_prefetched(obj, 'labels'):
|
||||||
|
label_ct = len(obj.labels.all())
|
||||||
|
else:
|
||||||
|
if len(label_list) < 10:
|
||||||
|
label_ct = len(label_list)
|
||||||
|
else:
|
||||||
|
label_ct = obj.labels.count()
|
||||||
|
return {'count': label_ct, 'results': label_list}
|
||||||
|
|
||||||
|
def get_summary_fields(self, obj):
|
||||||
|
res = super(LabelsListMixin, self).get_summary_fields(obj)
|
||||||
|
res['labels'] = self._summary_field_labels(obj)
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables):
|
||||||
show_capabilities = ['edit', 'delete', 'adhoc', 'copy']
|
show_capabilities = ['edit', 'delete', 'adhoc', 'copy']
|
||||||
capabilities_prefetch = ['admin', 'adhoc', {'copy': 'organization.inventory_admin'}]
|
capabilities_prefetch = ['admin', 'adhoc', {'copy': 'organization.inventory_admin'}]
|
||||||
|
|
||||||
@@ -1681,6 +1702,7 @@ class InventorySerializer(BaseSerializerWithVariables):
|
|||||||
object_roles=self.reverse('api:inventory_object_roles_list', kwargs={'pk': obj.pk}),
|
object_roles=self.reverse('api:inventory_object_roles_list', kwargs={'pk': obj.pk}),
|
||||||
instance_groups=self.reverse('api:inventory_instance_groups_list', kwargs={'pk': obj.pk}),
|
instance_groups=self.reverse('api:inventory_instance_groups_list', kwargs={'pk': obj.pk}),
|
||||||
copy=self.reverse('api:inventory_copy', kwargs={'pk': obj.pk}),
|
copy=self.reverse('api:inventory_copy', kwargs={'pk': obj.pk}),
|
||||||
|
labels=self.reverse('api:inventory_label_list', kwargs={'pk': obj.pk}),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if obj.organization:
|
if obj.organization:
|
||||||
@@ -2750,24 +2772,6 @@ class OrganizationCredentialSerializerCreate(CredentialSerializerCreate):
|
|||||||
fields = ('*', '-user', '-team')
|
fields = ('*', '-user', '-team')
|
||||||
|
|
||||||
|
|
||||||
class LabelsListMixin(object):
|
|
||||||
def _summary_field_labels(self, obj):
|
|
||||||
label_list = [{'id': x.id, 'name': x.name} for x in obj.labels.all()[:10]]
|
|
||||||
if has_model_field_prefetched(obj, 'labels'):
|
|
||||||
label_ct = len(obj.labels.all())
|
|
||||||
else:
|
|
||||||
if len(label_list) < 10:
|
|
||||||
label_ct = len(label_list)
|
|
||||||
else:
|
|
||||||
label_ct = obj.labels.count()
|
|
||||||
return {'count': label_ct, 'results': label_list}
|
|
||||||
|
|
||||||
def get_summary_fields(self, obj):
|
|
||||||
res = super(LabelsListMixin, self).get_summary_fields(obj)
|
|
||||||
res['labels'] = self._summary_field_labels(obj)
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
|
class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
fields = (
|
fields = (
|
||||||
@@ -4768,6 +4772,28 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
|||||||
return super(ScheduleSerializer, self).validate(attrs)
|
return super(ScheduleSerializer, self).validate(attrs)
|
||||||
|
|
||||||
|
|
||||||
|
class InstanceLinkSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = InstanceLink
|
||||||
|
fields = ('source', 'target')
|
||||||
|
|
||||||
|
source = serializers.SlugRelatedField(slug_field="hostname", read_only=True)
|
||||||
|
target = serializers.SlugRelatedField(slug_field="hostname", read_only=True)
|
||||||
|
|
||||||
|
|
||||||
|
class InstanceNodeSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Instance
|
||||||
|
fields = ('id', 'hostname', 'node_type', 'node_state')
|
||||||
|
|
||||||
|
node_state = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
def get_node_state(self, obj):
|
||||||
|
if not obj.enabled:
|
||||||
|
return "disabled"
|
||||||
|
return "error" if obj.errors else "healthy"
|
||||||
|
|
||||||
|
|
||||||
class InstanceSerializer(BaseSerializer):
|
class InstanceSerializer(BaseSerializer):
|
||||||
|
|
||||||
consumed_capacity = serializers.SerializerMethodField()
|
consumed_capacity = serializers.SerializerMethodField()
|
||||||
@@ -4777,7 +4803,7 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Instance
|
model = Instance
|
||||||
read_only_fields = ('uuid', 'hostname', 'version')
|
read_only_fields = ('uuid', 'hostname', 'version', 'node_type')
|
||||||
fields = (
|
fields = (
|
||||||
"id",
|
"id",
|
||||||
"type",
|
"type",
|
||||||
@@ -4787,6 +4813,9 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
"hostname",
|
"hostname",
|
||||||
"created",
|
"created",
|
||||||
"modified",
|
"modified",
|
||||||
|
"last_seen",
|
||||||
|
"last_health_check",
|
||||||
|
"errors",
|
||||||
'capacity_adjustment',
|
'capacity_adjustment',
|
||||||
"version",
|
"version",
|
||||||
"capacity",
|
"capacity",
|
||||||
@@ -4800,12 +4829,16 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
"mem_capacity",
|
"mem_capacity",
|
||||||
"enabled",
|
"enabled",
|
||||||
"managed_by_policy",
|
"managed_by_policy",
|
||||||
|
"node_type",
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
res = super(InstanceSerializer, self).get_related(obj)
|
res = super(InstanceSerializer, self).get_related(obj)
|
||||||
res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk})
|
res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk})
|
||||||
res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk})
|
res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk})
|
||||||
|
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
||||||
|
if obj.node_type != 'hop':
|
||||||
|
res['health_check'] = self.reverse('api:instance_health_check', kwargs={'pk': obj.pk})
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def get_consumed_capacity(self, obj):
|
def get_consumed_capacity(self, obj):
|
||||||
@@ -4818,6 +4851,13 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
return float("{0:.2f}".format(((float(obj.capacity) - float(obj.consumed_capacity)) / (float(obj.capacity))) * 100))
|
return float("{0:.2f}".format(((float(obj.capacity) - float(obj.consumed_capacity)) / (float(obj.capacity))) * 100))
|
||||||
|
|
||||||
|
|
||||||
|
class InstanceHealthCheckSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Instance
|
||||||
|
read_only_fields = ('uuid', 'hostname', 'version', 'last_health_check', 'errors', 'cpu', 'memory', 'cpu_capacity', 'mem_capacity', 'capacity')
|
||||||
|
fields = read_only_fields
|
||||||
|
|
||||||
|
|
||||||
class InstanceGroupSerializer(BaseSerializer):
|
class InstanceGroupSerializer(BaseSerializer):
|
||||||
|
|
||||||
show_capabilities = ['edit', 'delete']
|
show_capabilities = ['edit', 'delete']
|
||||||
@@ -4991,6 +5031,7 @@ class ActivityStreamSerializer(BaseSerializer):
|
|||||||
('credential_type', ('id', 'name', 'description', 'kind', 'managed')),
|
('credential_type', ('id', 'name', 'description', 'kind', 'managed')),
|
||||||
('ad_hoc_command', ('id', 'name', 'status', 'limit')),
|
('ad_hoc_command', ('id', 'name', 'status', 'limit')),
|
||||||
('workflow_approval', ('id', 'name', 'unified_job_id')),
|
('workflow_approval', ('id', 'name', 'unified_job_id')),
|
||||||
|
('instance', ('id', 'hostname')),
|
||||||
]
|
]
|
||||||
return field_list
|
return field_list
|
||||||
|
|
||||||
|
|||||||
33
awx/api/templates/api/instance_health_check.md
Normal file
33
awx/api/templates/api/instance_health_check.md
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
{% ifmeth GET %}
|
||||||
|
# Health Check Data
|
||||||
|
|
||||||
|
Health checks are used to obtain important data about an instance.
|
||||||
|
Instance fields affected by the health check are shown in this view.
|
||||||
|
Fundamentally, health checks require running code on the machine in question.
|
||||||
|
|
||||||
|
- For instances with `node_type` of "control" or "hybrid", health checks are
|
||||||
|
performed as part of a periodic task that runs in the background.
|
||||||
|
- For instances with `node_type` of "execution", health checks are done by submitting
|
||||||
|
a work unit through the receptor mesh.
|
||||||
|
|
||||||
|
If ran through the receptor mesh, the invoked command is:
|
||||||
|
|
||||||
|
```
|
||||||
|
ansible-runner worker --worker-info
|
||||||
|
```
|
||||||
|
|
||||||
|
For execution nodes, these checks are _not_ performed on a regular basis.
|
||||||
|
Health checks against functional nodes will be ran when the node is first discovered.
|
||||||
|
Health checks against nodes with errors will be repeated at a reduced frequency.
|
||||||
|
|
||||||
|
{% endifmeth %}
|
||||||
|
|
||||||
|
{% ifmeth POST %}
|
||||||
|
# Manually Initiate a Health Check
|
||||||
|
For purposes of error remediation or debugging, a health check can be
|
||||||
|
manually initiated by making a POST request to this endpoint.
|
||||||
|
|
||||||
|
This will submit the work unit to the target node through the receptor mesh and wait for it to finish.
|
||||||
|
The model will be updated with the result.
|
||||||
|
Up-to-date values of the fields will be returned in the response data.
|
||||||
|
{% endifmeth %}
|
||||||
1
awx/api/templates/api/mesh_visualizer.md
Normal file
1
awx/api/templates/api/mesh_visualizer.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Make a GET request to this resource to obtain a list all Receptor Nodes and their links.
|
||||||
@@ -3,11 +3,10 @@
|
|||||||
|
|
||||||
from django.conf.urls import url
|
from django.conf.urls import url
|
||||||
|
|
||||||
from awx.api.views import AdHocCommandEventList, AdHocCommandEventDetail
|
from awx.api.views import AdHocCommandEventDetail
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
urls = [
|
||||||
url(r'^$', AdHocCommandEventList.as_view(), name='ad_hoc_command_event_list'),
|
|
||||||
url(r'^(?P<pk>[0-9]+)/$', AdHocCommandEventDetail.as_view(), name='ad_hoc_command_event_detail'),
|
url(r'^(?P<pk>[0-9]+)/$', AdHocCommandEventDetail.as_view(), name='ad_hoc_command_event_detail'),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
from django.conf.urls import url
|
from django.conf.urls import url
|
||||||
|
|
||||||
from awx.api.views import InstanceList, InstanceDetail, InstanceUnifiedJobsList, InstanceInstanceGroupsList
|
from awx.api.views import InstanceList, InstanceDetail, InstanceUnifiedJobsList, InstanceInstanceGroupsList, InstanceHealthCheck
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
urls = [
|
||||||
@@ -11,6 +11,7 @@ urls = [
|
|||||||
url(r'^(?P<pk>[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'),
|
url(r'^(?P<pk>[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'),
|
||||||
url(r'^(?P<pk>[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'),
|
url(r'^(?P<pk>[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'),
|
||||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
|
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
|
||||||
|
url(r'^(?P<pk>[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'),
|
||||||
]
|
]
|
||||||
|
|
||||||
__all__ = ['urls']
|
__all__ = ['urls']
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ from awx.api.views import (
|
|||||||
InventoryAccessList,
|
InventoryAccessList,
|
||||||
InventoryObjectRolesList,
|
InventoryObjectRolesList,
|
||||||
InventoryInstanceGroupsList,
|
InventoryInstanceGroupsList,
|
||||||
|
InventoryLabelList,
|
||||||
InventoryCopy,
|
InventoryCopy,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -41,6 +42,7 @@ urls = [
|
|||||||
url(r'^(?P<pk>[0-9]+)/access_list/$', InventoryAccessList.as_view(), name='inventory_access_list'),
|
url(r'^(?P<pk>[0-9]+)/access_list/$', InventoryAccessList.as_view(), name='inventory_access_list'),
|
||||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', InventoryObjectRolesList.as_view(), name='inventory_object_roles_list'),
|
url(r'^(?P<pk>[0-9]+)/object_roles/$', InventoryObjectRolesList.as_view(), name='inventory_object_roles_list'),
|
||||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InventoryInstanceGroupsList.as_view(), name='inventory_instance_groups_list'),
|
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InventoryInstanceGroupsList.as_view(), name='inventory_instance_groups_list'),
|
||||||
|
url(r'^(?P<pk>[0-9]+)/labels/$', InventoryLabelList.as_view(), name='inventory_label_list'),
|
||||||
url(r'^(?P<pk>[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'),
|
url(r'^(?P<pk>[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ from awx.api.views import (
|
|||||||
OAuth2TokenList,
|
OAuth2TokenList,
|
||||||
ApplicationOAuth2TokenList,
|
ApplicationOAuth2TokenList,
|
||||||
OAuth2ApplicationDetail,
|
OAuth2ApplicationDetail,
|
||||||
|
MeshVisualizer,
|
||||||
)
|
)
|
||||||
|
|
||||||
from awx.api.views.metrics import MetricsView
|
from awx.api.views.metrics import MetricsView
|
||||||
@@ -95,6 +96,7 @@ v2_urls = [
|
|||||||
url(r'^me/$', UserMeList.as_view(), name='user_me_list'),
|
url(r'^me/$', UserMeList.as_view(), name='user_me_list'),
|
||||||
url(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'),
|
url(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'),
|
||||||
url(r'^dashboard/graphs/jobs/$', DashboardJobsGraphView.as_view(), name='dashboard_jobs_graph_view'),
|
url(r'^dashboard/graphs/jobs/$', DashboardJobsGraphView.as_view(), name='dashboard_jobs_graph_view'),
|
||||||
|
url(r'^mesh_visualizer/', MeshVisualizer.as_view(), name='mesh_visualizer_view'),
|
||||||
url(r'^settings/', include('awx.conf.urls')),
|
url(r'^settings/', include('awx.conf.urls')),
|
||||||
url(r'^instances/', include(instance_urls)),
|
url(r'^instances/', include(instance_urls)),
|
||||||
url(r'^instance_groups/', include(instance_group_urls)),
|
url(r'^instance_groups/', include(instance_group_urls)),
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ import pytz
|
|||||||
from wsgiref.util import FileWrapper
|
from wsgiref.util import FileWrapper
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.tasks import send_notifications, update_inventory_computed_fields
|
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
||||||
from awx.main.access import get_user_queryset, HostAccess
|
from awx.main.access import get_user_queryset, HostAccess
|
||||||
from awx.api.generics import (
|
from awx.api.generics import (
|
||||||
APIView,
|
APIView,
|
||||||
@@ -108,6 +108,7 @@ from awx.api.permissions import (
|
|||||||
InstanceGroupTowerPermission,
|
InstanceGroupTowerPermission,
|
||||||
VariableDataPermission,
|
VariableDataPermission,
|
||||||
WorkflowApprovalPermission,
|
WorkflowApprovalPermission,
|
||||||
|
IsSystemAdminOrAuditor,
|
||||||
)
|
)
|
||||||
from awx.api import renderers
|
from awx.api import renderers
|
||||||
from awx.api import serializers
|
from awx.api import serializers
|
||||||
@@ -156,8 +157,10 @@ from awx.api.views.inventory import ( # noqa
|
|||||||
InventoryAccessList,
|
InventoryAccessList,
|
||||||
InventoryObjectRolesList,
|
InventoryObjectRolesList,
|
||||||
InventoryJobTemplateList,
|
InventoryJobTemplateList,
|
||||||
|
InventoryLabelList,
|
||||||
InventoryCopy,
|
InventoryCopy,
|
||||||
)
|
)
|
||||||
|
from awx.api.views.mesh_visualizer import MeshVisualizer # noqa
|
||||||
from awx.api.views.root import ( # noqa
|
from awx.api.views.root import ( # noqa
|
||||||
ApiRootView,
|
ApiRootView,
|
||||||
ApiOAuthAuthorizationRootView,
|
ApiOAuthAuthorizationRootView,
|
||||||
@@ -374,8 +377,8 @@ class InstanceDetail(RetrieveUpdateAPIView):
|
|||||||
r = super(InstanceDetail, self).update(request, *args, **kwargs)
|
r = super(InstanceDetail, self).update(request, *args, **kwargs)
|
||||||
if status.is_success(r.status_code):
|
if status.is_success(r.status_code):
|
||||||
obj = self.get_object()
|
obj = self.get_object()
|
||||||
obj.refresh_capacity()
|
obj.set_capacity_value()
|
||||||
obj.save()
|
obj.save(update_fields=['capacity'])
|
||||||
r.data = serializers.InstanceSerializer(obj, context=self.get_serializer_context()).to_representation(obj)
|
r.data = serializers.InstanceSerializer(obj, context=self.get_serializer_context()).to_representation(obj)
|
||||||
return r
|
return r
|
||||||
|
|
||||||
@@ -402,6 +405,67 @@ class InstanceInstanceGroupsList(InstanceGroupMembershipMixin, SubListCreateAtta
|
|||||||
parent_model = models.Instance
|
parent_model = models.Instance
|
||||||
relationship = 'rampart_groups'
|
relationship = 'rampart_groups'
|
||||||
|
|
||||||
|
def is_valid_relation(self, parent, sub, created=False):
|
||||||
|
if parent.node_type == 'control':
|
||||||
|
return {'msg': _(f"Cannot change instance group membership of control-only node: {parent.hostname}.")}
|
||||||
|
if parent.node_type == 'hop':
|
||||||
|
return {'msg': _(f"Cannot change instance group membership of hop node: {parent.hostname}.")}
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class InstanceHealthCheck(GenericAPIView):
|
||||||
|
|
||||||
|
name = _('Instance Health Check')
|
||||||
|
model = models.Instance
|
||||||
|
serializer_class = serializers.InstanceHealthCheckSerializer
|
||||||
|
permission_classes = (IsSystemAdminOrAuditor,)
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
# FIXME: For now, we don't have a good way of checking the health of a hop node.
|
||||||
|
return super().get_queryset().exclude(node_type='hop')
|
||||||
|
|
||||||
|
def get(self, request, *args, **kwargs):
|
||||||
|
obj = self.get_object()
|
||||||
|
data = self.get_serializer(data=request.data).to_representation(obj)
|
||||||
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
def post(self, request, *args, **kwargs):
|
||||||
|
obj = self.get_object()
|
||||||
|
|
||||||
|
if obj.node_type == 'execution':
|
||||||
|
from awx.main.tasks.system import execution_node_health_check
|
||||||
|
|
||||||
|
runner_data = execution_node_health_check(obj.hostname)
|
||||||
|
obj.refresh_from_db()
|
||||||
|
data = self.get_serializer(data=request.data).to_representation(obj)
|
||||||
|
# Add in some extra unsaved fields
|
||||||
|
for extra_field in ('transmit_timing', 'run_timing'):
|
||||||
|
if extra_field in runner_data:
|
||||||
|
data[extra_field] = runner_data[extra_field]
|
||||||
|
else:
|
||||||
|
from awx.main.tasks.system import cluster_node_health_check
|
||||||
|
|
||||||
|
if settings.CLUSTER_HOST_ID == obj.hostname:
|
||||||
|
cluster_node_health_check(obj.hostname)
|
||||||
|
else:
|
||||||
|
cluster_node_health_check.apply_async([obj.hostname], queue=obj.hostname)
|
||||||
|
start_time = time.time()
|
||||||
|
prior_check_time = obj.last_health_check
|
||||||
|
while time.time() - start_time < 50.0:
|
||||||
|
obj.refresh_from_db(fields=['last_health_check'])
|
||||||
|
if obj.last_health_check != prior_check_time:
|
||||||
|
break
|
||||||
|
if time.time() - start_time < 1.0:
|
||||||
|
time.sleep(0.1)
|
||||||
|
else:
|
||||||
|
time.sleep(1.0)
|
||||||
|
else:
|
||||||
|
obj.mark_offline(errors=_('Health check initiated by user determined this instance to be unresponsive'))
|
||||||
|
obj.refresh_from_db()
|
||||||
|
data = self.get_serializer(data=request.data).to_representation(obj)
|
||||||
|
|
||||||
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
class InstanceGroupList(ListCreateAPIView):
|
class InstanceGroupList(ListCreateAPIView):
|
||||||
|
|
||||||
@@ -444,6 +508,13 @@ class InstanceGroupInstanceList(InstanceGroupMembershipMixin, SubListAttachDetac
|
|||||||
relationship = "instances"
|
relationship = "instances"
|
||||||
search_fields = ('hostname',)
|
search_fields = ('hostname',)
|
||||||
|
|
||||||
|
def is_valid_relation(self, parent, sub, created=False):
|
||||||
|
if sub.node_type == 'control':
|
||||||
|
return {'msg': _(f"Cannot change instance group membership of control-only node: {sub.hostname}.")}
|
||||||
|
if sub.node_type == 'hop':
|
||||||
|
return {'msg': _(f"Cannot change instance group membership of hop node: {sub.hostname}.")}
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
class ScheduleList(ListCreateAPIView):
|
class ScheduleList(ListCreateAPIView):
|
||||||
|
|
||||||
@@ -3906,18 +3977,6 @@ class AdHocCommandRelaunch(GenericAPIView):
|
|||||||
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
|
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
|
||||||
|
|
||||||
|
|
||||||
class AdHocCommandEventList(NoTruncateMixin, ListAPIView):
|
|
||||||
|
|
||||||
model = models.AdHocCommandEvent
|
|
||||||
serializer_class = serializers.AdHocCommandEventSerializer
|
|
||||||
search_fields = ('stdout',)
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
adhoc = self.get_parent_object()
|
|
||||||
self.check_parent_access(adhoc)
|
|
||||||
return adhoc.get_event_queryset()
|
|
||||||
|
|
||||||
|
|
||||||
class AdHocCommandEventDetail(RetrieveAPIView):
|
class AdHocCommandEventDetail(RetrieveAPIView):
|
||||||
|
|
||||||
model = models.AdHocCommandEvent
|
model = models.AdHocCommandEvent
|
||||||
|
|||||||
@@ -16,17 +16,21 @@ from rest_framework.response import Response
|
|||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.models import (
|
from awx.main.models import ActivityStream, Inventory, JobTemplate, Role, User, InstanceGroup, InventoryUpdateEvent, InventoryUpdate
|
||||||
ActivityStream,
|
|
||||||
Inventory,
|
from awx.main.models.label import Label
|
||||||
JobTemplate,
|
|
||||||
Role,
|
from awx.api.generics import (
|
||||||
User,
|
ListCreateAPIView,
|
||||||
InstanceGroup,
|
RetrieveUpdateDestroyAPIView,
|
||||||
InventoryUpdateEvent,
|
SubListAPIView,
|
||||||
InventoryUpdate,
|
SubListAttachDetachAPIView,
|
||||||
|
ResourceAccessList,
|
||||||
|
CopyAPIView,
|
||||||
|
DeleteLastUnattachLabelMixin,
|
||||||
|
SubListCreateAttachDetachAPIView,
|
||||||
)
|
)
|
||||||
from awx.api.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView, SubListAPIView, SubListAttachDetachAPIView, ResourceAccessList, CopyAPIView
|
|
||||||
|
|
||||||
from awx.api.serializers import (
|
from awx.api.serializers import (
|
||||||
InventorySerializer,
|
InventorySerializer,
|
||||||
@@ -35,6 +39,7 @@ from awx.api.serializers import (
|
|||||||
InstanceGroupSerializer,
|
InstanceGroupSerializer,
|
||||||
InventoryUpdateEventSerializer,
|
InventoryUpdateEventSerializer,
|
||||||
JobTemplateSerializer,
|
JobTemplateSerializer,
|
||||||
|
LabelSerializer,
|
||||||
)
|
)
|
||||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, ControlledByScmMixin
|
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, ControlledByScmMixin
|
||||||
|
|
||||||
@@ -152,6 +157,30 @@ class InventoryJobTemplateList(SubListAPIView):
|
|||||||
return qs.filter(inventory=parent)
|
return qs.filter(inventory=parent)
|
||||||
|
|
||||||
|
|
||||||
|
class InventoryLabelList(DeleteLastUnattachLabelMixin, SubListCreateAttachDetachAPIView, SubListAPIView):
|
||||||
|
|
||||||
|
model = Label
|
||||||
|
serializer_class = LabelSerializer
|
||||||
|
parent_model = Inventory
|
||||||
|
relationship = 'labels'
|
||||||
|
|
||||||
|
def post(self, request, *args, **kwargs):
|
||||||
|
# If a label already exists in the database, attach it instead of erroring out
|
||||||
|
# that it already exists
|
||||||
|
if 'id' not in request.data and 'name' in request.data and 'organization' in request.data:
|
||||||
|
existing = Label.objects.filter(name=request.data['name'], organization_id=request.data['organization'])
|
||||||
|
if existing.exists():
|
||||||
|
existing = existing[0]
|
||||||
|
request.data['id'] = existing.id
|
||||||
|
del request.data['name']
|
||||||
|
del request.data['organization']
|
||||||
|
if Label.objects.filter(inventory_labels=self.kwargs['pk']).count() > 100:
|
||||||
|
return Response(
|
||||||
|
dict(msg=_('Maximum number of labels for {} reached.'.format(self.parent_model._meta.verbose_name_raw))), status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
|
return super(InventoryLabelList, self).post(request, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class InventoryCopy(CopyAPIView):
|
class InventoryCopy(CopyAPIView):
|
||||||
|
|
||||||
model = Inventory
|
model = Inventory
|
||||||
|
|||||||
25
awx/api/views/mesh_visualizer.py
Normal file
25
awx/api/views/mesh_visualizer.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# Copyright (c) 2018 Red Hat, Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
from awx.api.generics import APIView, Response
|
||||||
|
from awx.api.permissions import IsSystemAdminOrAuditor
|
||||||
|
from awx.api.serializers import InstanceLinkSerializer, InstanceNodeSerializer
|
||||||
|
from awx.main.models import InstanceLink, Instance
|
||||||
|
|
||||||
|
|
||||||
|
class MeshVisualizer(APIView):
|
||||||
|
|
||||||
|
name = _("Mesh Visualizer")
|
||||||
|
permission_classes = (IsSystemAdminOrAuditor,)
|
||||||
|
swagger_topic = "System Configuration"
|
||||||
|
|
||||||
|
def get(self, request, format=None):
|
||||||
|
|
||||||
|
data = {
|
||||||
|
'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data,
|
||||||
|
'links': InstanceLinkSerializer(InstanceLink.objects.all(), many=True).data,
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response(data)
|
||||||
@@ -68,13 +68,23 @@ class InstanceGroupMembershipMixin(object):
|
|||||||
membership.
|
membership.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
def attach_validate(self, request):
|
||||||
|
parent = self.get_parent_object()
|
||||||
|
sub_id, res = super().attach_validate(request)
|
||||||
|
if res: # handle an error
|
||||||
|
return sub_id, res
|
||||||
|
sub = get_object_or_400(self.model, pk=sub_id)
|
||||||
|
attach_errors = self.is_valid_relation(parent, sub)
|
||||||
|
if attach_errors:
|
||||||
|
return sub_id, Response(attach_errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
return sub_id, res
|
||||||
|
|
||||||
def attach(self, request, *args, **kwargs):
|
def attach(self, request, *args, **kwargs):
|
||||||
response = super(InstanceGroupMembershipMixin, self).attach(request, *args, **kwargs)
|
response = super(InstanceGroupMembershipMixin, self).attach(request, *args, **kwargs)
|
||||||
sub_id, res = self.attach_validate(request)
|
sub_id, res = self.attach_validate(request)
|
||||||
if status.is_success(response.status_code):
|
if status.is_success(response.status_code):
|
||||||
if self.parent_model is Instance:
|
if self.parent_model is Instance:
|
||||||
ig_obj = get_object_or_400(self.model, pk=sub_id)
|
inst_name = self.get_parent_object().hostname
|
||||||
inst_name = ig_obj.hostname
|
|
||||||
else:
|
else:
|
||||||
inst_name = get_object_or_400(self.model, pk=sub_id).hostname
|
inst_name = get_object_or_400(self.model, pk=sub_id).hostname
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
@@ -91,11 +101,12 @@ class InstanceGroupMembershipMixin(object):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
def unattach_validate(self, request):
|
def unattach_validate(self, request):
|
||||||
|
parent = self.get_parent_object()
|
||||||
(sub_id, res) = super(InstanceGroupMembershipMixin, self).unattach_validate(request)
|
(sub_id, res) = super(InstanceGroupMembershipMixin, self).unattach_validate(request)
|
||||||
if res:
|
if res:
|
||||||
return (sub_id, res)
|
return (sub_id, res)
|
||||||
sub = get_object_or_400(self.model, pk=sub_id)
|
sub = get_object_or_400(self.model, pk=sub_id)
|
||||||
attach_errors = self.is_valid_relation(None, sub)
|
attach_errors = self.is_valid_relation(parent, sub)
|
||||||
if attach_errors:
|
if attach_errors:
|
||||||
return (sub_id, Response(attach_errors, status=status.HTTP_400_BAD_REQUEST))
|
return (sub_id, Response(attach_errors, status=status.HTTP_400_BAD_REQUEST))
|
||||||
return (sub_id, res)
|
return (sub_id, res)
|
||||||
|
|||||||
@@ -123,6 +123,7 @@ class ApiVersionRootView(APIView):
|
|||||||
data['workflow_approvals'] = reverse('api:workflow_approval_list', request=request)
|
data['workflow_approvals'] = reverse('api:workflow_approval_list', request=request)
|
||||||
data['workflow_job_template_nodes'] = reverse('api:workflow_job_template_node_list', request=request)
|
data['workflow_job_template_nodes'] = reverse('api:workflow_job_template_node_list', request=request)
|
||||||
data['workflow_job_nodes'] = reverse('api:workflow_job_node_list', request=request)
|
data['workflow_job_nodes'] = reverse('api:workflow_job_node_list', request=request)
|
||||||
|
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
|
||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
|
|
||||||
@@ -149,16 +150,24 @@ class ApiV2PingView(APIView):
|
|||||||
response = {'ha': is_ha_environment(), 'version': get_awx_version(), 'active_node': settings.CLUSTER_HOST_ID, 'install_uuid': settings.INSTALL_UUID}
|
response = {'ha': is_ha_environment(), 'version': get_awx_version(), 'active_node': settings.CLUSTER_HOST_ID, 'install_uuid': settings.INSTALL_UUID}
|
||||||
|
|
||||||
response['instances'] = []
|
response['instances'] = []
|
||||||
for instance in Instance.objects.all():
|
for instance in Instance.objects.exclude(node_type='hop'):
|
||||||
response['instances'].append(
|
response['instances'].append(
|
||||||
dict(node=instance.hostname, uuid=instance.uuid, heartbeat=instance.modified, capacity=instance.capacity, version=instance.version)
|
dict(
|
||||||
|
node=instance.hostname,
|
||||||
|
node_type=instance.node_type,
|
||||||
|
uuid=instance.uuid,
|
||||||
|
heartbeat=instance.last_seen,
|
||||||
|
capacity=instance.capacity,
|
||||||
|
version=instance.version,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
sorted(response['instances'], key=operator.itemgetter('node'))
|
response['instances'] = sorted(response['instances'], key=operator.itemgetter('node'))
|
||||||
response['instance_groups'] = []
|
response['instance_groups'] = []
|
||||||
for instance_group in InstanceGroup.objects.prefetch_related('instances'):
|
for instance_group in InstanceGroup.objects.prefetch_related('instances'):
|
||||||
response['instance_groups'].append(
|
response['instance_groups'].append(
|
||||||
dict(name=instance_group.name, capacity=instance_group.capacity, instances=[x.hostname for x in instance_group.instances.all()])
|
dict(name=instance_group.name, capacity=instance_group.capacity, instances=[x.hostname for x in instance_group.instances.all()])
|
||||||
)
|
)
|
||||||
|
response['instance_groups'] = sorted(response['instance_groups'], key=lambda x: x['name'].lower())
|
||||||
return Response(response)
|
return Response(response)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -13,6 +13,9 @@ from django.utils.translation import ugettext_lazy as _
|
|||||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField, NullBooleanField # noqa
|
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField, NullBooleanField # noqa
|
||||||
from rest_framework.serializers import PrimaryKeyRelatedField # noqa
|
from rest_framework.serializers import PrimaryKeyRelatedField # noqa
|
||||||
|
|
||||||
|
# AWX
|
||||||
|
from awx.main.constants import CONTAINER_VOLUMES_MOUNT_TYPES, MAX_ISOLATED_PATH_COLON_DELIMITER
|
||||||
|
|
||||||
logger = logging.getLogger('awx.conf.fields')
|
logger = logging.getLogger('awx.conf.fields')
|
||||||
|
|
||||||
# Use DRF fields to convert/validate settings:
|
# Use DRF fields to convert/validate settings:
|
||||||
@@ -109,6 +112,49 @@ class StringListPathField(StringListField):
|
|||||||
self.fail('type_error', input_type=type(paths))
|
self.fail('type_error', input_type=type(paths))
|
||||||
|
|
||||||
|
|
||||||
|
class StringListIsolatedPathField(StringListField):
|
||||||
|
# Valid formats
|
||||||
|
# '/etc/pki/ca-trust'
|
||||||
|
# '/etc/pki/ca-trust:/etc/pki/ca-trust'
|
||||||
|
# '/etc/pki/ca-trust:/etc/pki/ca-trust:O'
|
||||||
|
|
||||||
|
default_error_messages = {
|
||||||
|
'type_error': _('Expected list of strings but got {input_type} instead.'),
|
||||||
|
'path_error': _('{path} is not a valid path choice. You must provide an absolute path.'),
|
||||||
|
'mount_error': _('{scontext} is not a valid mount option. Allowed types are {mount_types}'),
|
||||||
|
'syntax_error': _('Invalid syntax. A string HOST-DIR[:CONTAINER-DIR[:OPTIONS]] is expected but got {path}.'),
|
||||||
|
}
|
||||||
|
|
||||||
|
def to_internal_value(self, paths):
|
||||||
|
|
||||||
|
if isinstance(paths, (list, tuple)):
|
||||||
|
for p in paths:
|
||||||
|
if not isinstance(p, str):
|
||||||
|
self.fail('type_error', input_type=type(p))
|
||||||
|
if not p.startswith('/'):
|
||||||
|
self.fail('path_error', path=p)
|
||||||
|
|
||||||
|
if p.count(':'):
|
||||||
|
if p.count(':') > MAX_ISOLATED_PATH_COLON_DELIMITER:
|
||||||
|
self.fail('syntax_error', path=p)
|
||||||
|
try:
|
||||||
|
src, dest, scontext = p.split(':')
|
||||||
|
except ValueError:
|
||||||
|
scontext = 'z'
|
||||||
|
src, dest = p.split(':')
|
||||||
|
finally:
|
||||||
|
for sp in [src, dest]:
|
||||||
|
if not len(sp):
|
||||||
|
self.fail('syntax_error', path=sp)
|
||||||
|
if not sp.startswith('/'):
|
||||||
|
self.fail('path_error', path=sp)
|
||||||
|
if scontext not in CONTAINER_VOLUMES_MOUNT_TYPES:
|
||||||
|
self.fail('mount_error', scontext=scontext, mount_types=CONTAINER_VOLUMES_MOUNT_TYPES)
|
||||||
|
return super(StringListIsolatedPathField, self).to_internal_value(sorted(paths))
|
||||||
|
else:
|
||||||
|
self.fail('type_error', input_type=type(paths))
|
||||||
|
|
||||||
|
|
||||||
class URLField(CharField):
|
class URLField(CharField):
|
||||||
# these lines set up a custom regex that allow numbers in the
|
# these lines set up a custom regex that allow numbers in the
|
||||||
# top-level domain
|
# top-level domain
|
||||||
|
|||||||
@@ -1,62 +0,0 @@
|
|||||||
import base64
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
from cryptography.hazmat.backends import default_backend
|
|
||||||
from cryptography.hazmat.primitives.ciphers import Cipher
|
|
||||||
from cryptography.hazmat.primitives.ciphers.algorithms import AES
|
|
||||||
from cryptography.hazmat.primitives.ciphers.modes import ECB
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['get_encryption_key', 'decrypt_field']
|
|
||||||
|
|
||||||
|
|
||||||
def get_encryption_key(field_name, pk=None):
|
|
||||||
"""
|
|
||||||
Generate key for encrypted password based on field name,
|
|
||||||
``settings.SECRET_KEY``, and instance pk (if available).
|
|
||||||
|
|
||||||
:param pk: (optional) the primary key of the ``awx.conf.model.Setting``;
|
|
||||||
can be omitted in situations where you're encrypting a setting
|
|
||||||
that is not database-persistent (like a read-only setting)
|
|
||||||
"""
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
h = hashlib.sha1()
|
|
||||||
h.update(settings.SECRET_KEY)
|
|
||||||
if pk is not None:
|
|
||||||
h.update(str(pk))
|
|
||||||
h.update(field_name)
|
|
||||||
return h.digest()[:16]
|
|
||||||
|
|
||||||
|
|
||||||
def decrypt_value(encryption_key, value):
|
|
||||||
raw_data = value[len('$encrypted$') :]
|
|
||||||
# If the encrypted string contains a UTF8 marker, discard it
|
|
||||||
utf8 = raw_data.startswith('UTF8$')
|
|
||||||
if utf8:
|
|
||||||
raw_data = raw_data[len('UTF8$') :]
|
|
||||||
algo, b64data = raw_data.split('$', 1)
|
|
||||||
if algo != 'AES':
|
|
||||||
raise ValueError('unsupported algorithm: %s' % algo)
|
|
||||||
encrypted = base64.b64decode(b64data)
|
|
||||||
decryptor = Cipher(AES(encryption_key), ECB(), default_backend()).decryptor()
|
|
||||||
value = decryptor.update(encrypted) + decryptor.finalize()
|
|
||||||
value = value.rstrip('\x00')
|
|
||||||
# If the encrypted string contained a UTF8 marker, decode the data
|
|
||||||
if utf8:
|
|
||||||
value = value.decode('utf-8')
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def decrypt_field(instance, field_name, subfield=None):
|
|
||||||
"""
|
|
||||||
Return content of the given instance and field name decrypted.
|
|
||||||
"""
|
|
||||||
value = getattr(instance, field_name)
|
|
||||||
if isinstance(value, dict) and subfield is not None:
|
|
||||||
value = value[subfield]
|
|
||||||
if not value or not value.startswith('$encrypted$'):
|
|
||||||
return value
|
|
||||||
key = get_encryption_key(field_name, getattr(instance, 'pk', None))
|
|
||||||
|
|
||||||
return decrypt_value(key, value)
|
|
||||||
@@ -25,7 +25,6 @@ from awx.main.utils import encrypt_field, decrypt_field
|
|||||||
from awx.conf import settings_registry
|
from awx.conf import settings_registry
|
||||||
from awx.conf.fields import PrimaryKeyRelatedField
|
from awx.conf.fields import PrimaryKeyRelatedField
|
||||||
from awx.conf.models import Setting
|
from awx.conf.models import Setting
|
||||||
from awx.conf.migrations._reencrypt import decrypt_field as old_decrypt_field
|
|
||||||
|
|
||||||
# FIXME: Gracefully handle when settings are accessed before the database is
|
# FIXME: Gracefully handle when settings are accessed before the database is
|
||||||
# ready (or during migrations).
|
# ready (or during migrations).
|
||||||
@@ -299,13 +298,7 @@ class SettingsWrapper(UserSettingsHolder):
|
|||||||
continue
|
continue
|
||||||
if self.registry.is_setting_encrypted(setting.key):
|
if self.registry.is_setting_encrypted(setting.key):
|
||||||
setting_ids[setting.key] = setting.id
|
setting_ids[setting.key] = setting.id
|
||||||
try:
|
value = decrypt_field(setting, 'value')
|
||||||
value = decrypt_field(setting, 'value')
|
|
||||||
except ValueError as e:
|
|
||||||
# TODO: Remove in Tower 3.3
|
|
||||||
logger.debug('encountered error decrypting field: %s - attempting fallback to old', e)
|
|
||||||
value = old_decrypt_field(setting, 'value')
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
value = setting.value
|
value = setting.value
|
||||||
settings_to_cache[setting.key] = get_cache_value(value)
|
settings_to_cache[setting.key] = get_cache_value(value)
|
||||||
|
|||||||
@@ -23,10 +23,10 @@ from rest_framework import status
|
|||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.api.generics import APIView, GenericAPIView, ListAPIView, RetrieveUpdateDestroyAPIView
|
from awx.api.generics import APIView, GenericAPIView, ListAPIView, RetrieveUpdateDestroyAPIView
|
||||||
from awx.api.permissions import IsSuperUser
|
from awx.api.permissions import IsSystemAdminOrAuditor
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main.utils import camelcase_to_underscore
|
from awx.main.utils import camelcase_to_underscore
|
||||||
from awx.main.tasks import handle_setting_changes
|
from awx.main.tasks.system import handle_setting_changes
|
||||||
from awx.conf.models import Setting
|
from awx.conf.models import Setting
|
||||||
from awx.conf.serializers import SettingCategorySerializer, SettingSingletonSerializer
|
from awx.conf.serializers import SettingCategorySerializer, SettingSingletonSerializer
|
||||||
from awx.conf import settings_registry
|
from awx.conf import settings_registry
|
||||||
@@ -150,7 +150,7 @@ class SettingLoggingTest(GenericAPIView):
|
|||||||
name = _('Logging Connectivity Test')
|
name = _('Logging Connectivity Test')
|
||||||
model = Setting
|
model = Setting
|
||||||
serializer_class = SettingSingletonSerializer
|
serializer_class = SettingSingletonSerializer
|
||||||
permission_classes = (IsSuperUser,)
|
permission_classes = (IsSystemAdminOrAuditor,)
|
||||||
filter_backends = []
|
filter_backends = []
|
||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def post(self, request, *args, **kwargs):
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -853,7 +853,12 @@ class InventoryAccess(BaseAccess):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
model = Inventory
|
model = Inventory
|
||||||
prefetch_related = ('created_by', 'modified_by', 'organization')
|
prefetch_related = (
|
||||||
|
'created_by',
|
||||||
|
'modified_by',
|
||||||
|
'organization',
|
||||||
|
Prefetch('labels', queryset=Label.objects.all().order_by('name')),
|
||||||
|
)
|
||||||
|
|
||||||
def filtered_queryset(self, allowed=None, ad_hoc=None):
|
def filtered_queryset(self, allowed=None, ad_hoc=None):
|
||||||
return self.model.accessible_objects(self.user, 'read_role')
|
return self.model.accessible_objects(self.user, 'read_role')
|
||||||
|
|||||||
@@ -211,7 +211,7 @@ def projects_by_scm_type(since, **kwargs):
|
|||||||
return counts
|
return counts
|
||||||
|
|
||||||
|
|
||||||
@register('instance_info', '1.1', description=_('Cluster topology and capacity'))
|
@register('instance_info', '1.2', description=_('Cluster topology and capacity'))
|
||||||
def instance_info(since, include_hostnames=False, **kwargs):
|
def instance_info(since, include_hostnames=False, **kwargs):
|
||||||
info = {}
|
info = {}
|
||||||
instances = models.Instance.objects.values_list('hostname').values(
|
instances = models.Instance.objects.values_list('hostname').values(
|
||||||
@@ -337,7 +337,11 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create
|
|||||||
{tbl}.parent_uuid,
|
{tbl}.parent_uuid,
|
||||||
{tbl}.event,
|
{tbl}.event,
|
||||||
task_action,
|
task_action,
|
||||||
(CASE WHEN event = 'playbook_on_stats' THEN event_data END) as playbook_on_stats,
|
-- '-' operator listed here:
|
||||||
|
-- https://www.postgresql.org/docs/12/functions-json.html
|
||||||
|
-- note that operator is only supported by jsonb objects
|
||||||
|
-- https://www.postgresql.org/docs/current/datatype-json.html
|
||||||
|
(CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats,
|
||||||
{tbl}.failed,
|
{tbl}.failed,
|
||||||
{tbl}.changed,
|
{tbl}.changed,
|
||||||
{tbl}.playbook,
|
{tbl}.playbook,
|
||||||
@@ -352,14 +356,14 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create
|
|||||||
x.duration AS duration,
|
x.duration AS duration,
|
||||||
x.res->'warnings' AS warnings,
|
x.res->'warnings' AS warnings,
|
||||||
x.res->'deprecations' AS deprecations
|
x.res->'deprecations' AS deprecations
|
||||||
FROM {tbl}, json_to_record({event_data}) AS x("res" json, "duration" text, "task_action" text, "start" text, "end" text)
|
FROM {tbl}, jsonb_to_record({event_data}) AS x("res" json, "duration" text, "task_action" text, "start" text, "end" text)
|
||||||
WHERE ({tbl}.{where_column} > '{since.isoformat()}' AND {tbl}.{where_column} <= '{until.isoformat()}')) TO STDOUT WITH CSV HEADER'''
|
WHERE ({tbl}.{where_column} > '{since.isoformat()}' AND {tbl}.{where_column} <= '{until.isoformat()}')) TO STDOUT WITH CSV HEADER'''
|
||||||
return query
|
return query
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return _copy_table(table='events', query=query(f"{tbl}.event_data::json"), path=full_path)
|
return _copy_table(table='events', query=query(f"{tbl}.event_data::jsonb"), path=full_path)
|
||||||
except UntranslatableCharacter:
|
except UntranslatableCharacter:
|
||||||
return _copy_table(table='events', query=query(f"replace({tbl}.event_data::text, '\\u0000', '')::json"), path=full_path)
|
return _copy_table(table='events', query=query(f"replace({tbl}.event_data::text, '\\u0000', '')::jsonb"), path=full_path)
|
||||||
|
|
||||||
|
|
||||||
@register('events_table', '1.3', format='csv', description=_('Automation task records'), expensive=four_hour_slicing)
|
@register('events_table', '1.3', format='csv', description=_('Automation task records'), expensive=four_hour_slicing)
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ def package(target, data, timestamp):
|
|||||||
if isinstance(item, str):
|
if isinstance(item, str):
|
||||||
f.add(item, arcname=f'./{name}')
|
f.add(item, arcname=f'./{name}')
|
||||||
else:
|
else:
|
||||||
buf = json.dumps(item).encode('utf-8')
|
buf = json.dumps(item, cls=DjangoJSONEncoder).encode('utf-8')
|
||||||
info = tarfile.TarInfo(f'./{name}')
|
info = tarfile.TarInfo(f'./{name}')
|
||||||
info.size = len(buf)
|
info.size = len(buf)
|
||||||
info.mtime = timestamp.timestamp()
|
info.mtime = timestamp.timestamp()
|
||||||
@@ -230,7 +230,7 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti
|
|||||||
try:
|
try:
|
||||||
last_entry = max(last_entries.get(key) or last_gather, until - timedelta(weeks=4))
|
last_entry = max(last_entries.get(key) or last_gather, until - timedelta(weeks=4))
|
||||||
results = (func(since or last_entry, collection_type=collection_type, until=until), func.__awx_analytics_version__)
|
results = (func(since or last_entry, collection_type=collection_type, until=until), func.__awx_analytics_version__)
|
||||||
json.dumps(results) # throwaway check to see if the data is json-serializable
|
json.dumps(results, cls=DjangoJSONEncoder) # throwaway check to see if the data is json-serializable
|
||||||
data[filename] = results
|
data[filename] = results
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception("Could not generate metric {}".format(filename))
|
logger.exception("Could not generate metric {}".format(filename))
|
||||||
|
|||||||
@@ -160,6 +160,7 @@ class Metrics:
|
|||||||
IntM('callback_receiver_batch_events_errors', 'Number of times batch insertion failed'),
|
IntM('callback_receiver_batch_events_errors', 'Number of times batch insertion failed'),
|
||||||
FloatM('callback_receiver_events_insert_db_seconds', 'Time spent saving events to database'),
|
FloatM('callback_receiver_events_insert_db_seconds', 'Time spent saving events to database'),
|
||||||
IntM('callback_receiver_events_insert_db', 'Number of events batch inserted into database'),
|
IntM('callback_receiver_events_insert_db', 'Number of events batch inserted into database'),
|
||||||
|
IntM('callback_receiver_events_broadcast', 'Number of events broadcast to other control plane nodes'),
|
||||||
HistogramM(
|
HistogramM(
|
||||||
'callback_receiver_batch_events_insert_db', 'Number of events batch inserted into database', settings.SUBSYSTEM_METRICS_BATCH_INSERT_BUCKETS
|
'callback_receiver_batch_events_insert_db', 'Number of events batch inserted into database', settings.SUBSYSTEM_METRICS_BATCH_INSERT_BUCKETS
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -72,8 +72,8 @@ register(
|
|||||||
'HTTP headers and meta keys to search to determine remote host '
|
'HTTP headers and meta keys to search to determine remote host '
|
||||||
'name or IP. Add additional items to this list, such as '
|
'name or IP. Add additional items to this list, such as '
|
||||||
'"HTTP_X_FORWARDED_FOR", if behind a reverse proxy. '
|
'"HTTP_X_FORWARDED_FOR", if behind a reverse proxy. '
|
||||||
'See the "Proxy Support" section of the Adminstrator guide for '
|
'See the "Proxy Support" section of the AAP Installation guide '
|
||||||
'more details.'
|
'for more details.'
|
||||||
),
|
),
|
||||||
category=_('System'),
|
category=_('System'),
|
||||||
category_slug='system',
|
category_slug='system',
|
||||||
@@ -259,10 +259,14 @@ register(
|
|||||||
|
|
||||||
register(
|
register(
|
||||||
'AWX_ISOLATION_SHOW_PATHS',
|
'AWX_ISOLATION_SHOW_PATHS',
|
||||||
field_class=fields.StringListField,
|
field_class=fields.StringListIsolatedPathField,
|
||||||
required=False,
|
required=False,
|
||||||
label=_('Paths to expose to isolated jobs'),
|
label=_('Paths to expose to isolated jobs'),
|
||||||
help_text=_('List of paths that would otherwise be hidden to expose to isolated jobs. Enter one path per line.'),
|
help_text=_(
|
||||||
|
'List of paths that would otherwise be hidden to expose to isolated jobs. Enter one path per line. '
|
||||||
|
'Volumes will be mounted from the execution node to the container. '
|
||||||
|
'The supported format is HOST-DIR[:CONTAINER-DIR[:OPTIONS]]. '
|
||||||
|
),
|
||||||
category=_('Jobs'),
|
category=_('Jobs'),
|
||||||
category_slug='jobs',
|
category_slug='jobs',
|
||||||
)
|
)
|
||||||
@@ -408,6 +412,21 @@ register(
|
|||||||
unit=_('seconds'),
|
unit=_('seconds'),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
register(
|
||||||
|
'DEFAULT_JOB_IDLE_TIMEOUT',
|
||||||
|
field_class=fields.IntegerField,
|
||||||
|
min_value=0,
|
||||||
|
default=0,
|
||||||
|
label=_('Default Job Idle Timeout'),
|
||||||
|
help_text=_(
|
||||||
|
'If no output is detected from ansible in this number of seconds the execution will be terminated. '
|
||||||
|
'Use value of 0 to used default idle_timeout is 600s.'
|
||||||
|
),
|
||||||
|
category=_('Jobs'),
|
||||||
|
category_slug='jobs',
|
||||||
|
unit=_('seconds'),
|
||||||
|
)
|
||||||
|
|
||||||
register(
|
register(
|
||||||
'DEFAULT_INVENTORY_UPDATE_TIMEOUT',
|
'DEFAULT_INVENTORY_UPDATE_TIMEOUT',
|
||||||
field_class=fields.IntegerField,
|
field_class=fields.IntegerField,
|
||||||
@@ -659,6 +678,24 @@ register(
|
|||||||
category=_('Logging'),
|
category=_('Logging'),
|
||||||
category_slug='logging',
|
category_slug='logging',
|
||||||
)
|
)
|
||||||
|
register(
|
||||||
|
'API_400_ERROR_LOG_FORMAT',
|
||||||
|
field_class=fields.CharField,
|
||||||
|
default='status {status_code} received by user {user_name} attempting to access {url_path} from {remote_addr}',
|
||||||
|
label=_('Log Format For API 4XX Errors'),
|
||||||
|
help_text=_(
|
||||||
|
'The format of logged messages when an API 4XX error occurs, '
|
||||||
|
'the following variables will be substituted: \n'
|
||||||
|
'status_code - The HTTP status code of the error\n'
|
||||||
|
'user_name - The user name attempting to use the API\n'
|
||||||
|
'url_path - The URL path to the API endpoint called\n'
|
||||||
|
'remote_addr - The remote address seen for the user\n'
|
||||||
|
'error - The error set by the api endpoint\n'
|
||||||
|
'Variables need to be in the format {<variable name>}.'
|
||||||
|
),
|
||||||
|
category=_('Logging'),
|
||||||
|
category_slug='logging',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
register(
|
register(
|
||||||
@@ -672,7 +709,7 @@ register(
|
|||||||
register(
|
register(
|
||||||
'AUTOMATION_ANALYTICS_LAST_ENTRIES',
|
'AUTOMATION_ANALYTICS_LAST_ENTRIES',
|
||||||
field_class=fields.CharField,
|
field_class=fields.CharField,
|
||||||
label=_('Last gathered entries for expensive collectors for Insights for Ansible Automation Platform.'),
|
label=_('Last gathered entries from the data collection service of Insights for Ansible Automation Platform'),
|
||||||
default='',
|
default='',
|
||||||
allow_blank=True,
|
allow_blank=True,
|
||||||
category=_('System'),
|
category=_('System'),
|
||||||
|
|||||||
@@ -77,3 +77,18 @@ LOGGER_BLOCKLIST = (
|
|||||||
# loggers that may be called getting logging settings
|
# loggers that may be called getting logging settings
|
||||||
'awx.conf',
|
'awx.conf',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Reported version for node seen in receptor mesh but for which capacity check
|
||||||
|
# failed or is in progress
|
||||||
|
RECEPTOR_PENDING = 'ansible-runner-???'
|
||||||
|
|
||||||
|
# Naming pattern for AWX jobs in /tmp folder, like /tmp/awx_42_xiwm
|
||||||
|
# also update awxkit.api.pages.unified_jobs if changed
|
||||||
|
JOB_FOLDER_PREFIX = 'awx_%s_'
|
||||||
|
|
||||||
|
# :z option tells Podman that two containers share the volume content with r/w
|
||||||
|
# :O option tells Podman to mount the directory from the host as a temporary storage using the overlay file system.
|
||||||
|
# see podman-run manpage for further details
|
||||||
|
# /HOST-DIR:/CONTAINER-DIR:OPTIONS
|
||||||
|
CONTAINER_VOLUMES_MOUNT_TYPES = ['z', 'O']
|
||||||
|
MAX_ISOLATED_PATH_COLON_DELIMITER = 2
|
||||||
|
|||||||
59
awx/main/credential_plugins/tss.py
Normal file
59
awx/main/credential_plugins/tss.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
from .plugin import CredentialPlugin
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
from thycotic.secrets.server import PasswordGrantAuthorizer, SecretServer, ServerSecret
|
||||||
|
|
||||||
|
tss_inputs = {
|
||||||
|
'fields': [
|
||||||
|
{
|
||||||
|
'id': 'server_url',
|
||||||
|
'label': _('Secret Server URL'),
|
||||||
|
'help_text': _('The Base URL of Secret Server e.g. https://myserver/SecretServer or https://mytenant.secretservercloud.com'),
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'username',
|
||||||
|
'label': _('Username'),
|
||||||
|
'help_text': _('The (Application) user username'),
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'password',
|
||||||
|
'label': _('Password'),
|
||||||
|
'help_text': _('The corresponding password'),
|
||||||
|
'type': 'string',
|
||||||
|
'secret': True,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'metadata': [
|
||||||
|
{
|
||||||
|
'id': 'secret_id',
|
||||||
|
'label': _('Secret ID'),
|
||||||
|
'help_text': _('The integer ID of the secret'),
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'secret_field',
|
||||||
|
'label': _('Secret Field'),
|
||||||
|
'help_text': _('The field to extract from the secret'),
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'required': ['server_url', 'username', 'password', 'secret_id', 'secret_field'],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def tss_backend(**kwargs):
|
||||||
|
authorizer = PasswordGrantAuthorizer(kwargs['server_url'], kwargs['username'], kwargs['password'])
|
||||||
|
secret_server = SecretServer(kwargs['server_url'], authorizer)
|
||||||
|
secret_dict = secret_server.get_secret(kwargs['secret_id'])
|
||||||
|
secret = ServerSecret(**secret_dict)
|
||||||
|
|
||||||
|
return secret.fields[kwargs['secret_field']]
|
||||||
|
|
||||||
|
|
||||||
|
tss_plugin = CredentialPlugin(
|
||||||
|
'Thycotic Secret Server',
|
||||||
|
tss_inputs,
|
||||||
|
tss_backend,
|
||||||
|
)
|
||||||
@@ -22,6 +22,7 @@ import psutil
|
|||||||
|
|
||||||
from awx.main.models import UnifiedJob
|
from awx.main.models import UnifiedJob
|
||||||
from awx.main.dispatch import reaper
|
from awx.main.dispatch import reaper
|
||||||
|
from awx.main.utils.common import convert_mem_str_to_bytes
|
||||||
|
|
||||||
if 'run_callback_receiver' in sys.argv:
|
if 'run_callback_receiver' in sys.argv:
|
||||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||||
@@ -248,7 +249,7 @@ class WorkerPool(object):
|
|||||||
except Exception:
|
except Exception:
|
||||||
logger.exception('could not fork')
|
logger.exception('could not fork')
|
||||||
else:
|
else:
|
||||||
logger.warn('scaling up worker pid:{}'.format(worker.pid))
|
logger.debug('scaling up worker pid:{}'.format(worker.pid))
|
||||||
return idx, worker
|
return idx, worker
|
||||||
|
|
||||||
def debug(self, *args, **kwargs):
|
def debug(self, *args, **kwargs):
|
||||||
@@ -319,7 +320,8 @@ class AutoscalePool(WorkerPool):
|
|||||||
if self.max_workers is None:
|
if self.max_workers is None:
|
||||||
settings_absmem = getattr(settings, 'SYSTEM_TASK_ABS_MEM', None)
|
settings_absmem = getattr(settings, 'SYSTEM_TASK_ABS_MEM', None)
|
||||||
if settings_absmem is not None:
|
if settings_absmem is not None:
|
||||||
total_memory_gb = int(settings_absmem)
|
# There are 1073741824 bytes in a gigabyte. Convert bytes to gigabytes by dividing by 2**30
|
||||||
|
total_memory_gb = convert_mem_str_to_bytes(settings_absmem) // 2**30
|
||||||
else:
|
else:
|
||||||
total_memory_gb = (psutil.virtual_memory().total >> 30) + 1 # noqa: round up
|
total_memory_gb = (psutil.virtual_memory().total >> 30) + 1 # noqa: round up
|
||||||
# 5 workers per GB of total memory
|
# 5 workers per GB of total memory
|
||||||
@@ -387,7 +389,7 @@ class AutoscalePool(WorkerPool):
|
|||||||
# more processes in the pool than we need (> min)
|
# more processes in the pool than we need (> min)
|
||||||
# send this process a message so it will exit gracefully
|
# send this process a message so it will exit gracefully
|
||||||
# at the next opportunity
|
# at the next opportunity
|
||||||
logger.warn('scaling down worker pid:{}'.format(w.pid))
|
logger.debug('scaling down worker pid:{}'.format(w.pid))
|
||||||
w.quit()
|
w.quit()
|
||||||
self.workers.remove(w)
|
self.workers.remove(w)
|
||||||
if w.alive:
|
if w.alive:
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ class AWXConsumerBase(object):
|
|||||||
return f'listening on {self.queues}'
|
return f'listening on {self.queues}'
|
||||||
|
|
||||||
def control(self, body):
|
def control(self, body):
|
||||||
logger.warn(body)
|
logger.warn(f'Received control signal:\n{body}')
|
||||||
control = body.get('control')
|
control = body.get('control')
|
||||||
if control in ('status', 'running'):
|
if control in ('status', 'running'):
|
||||||
reply_queue = body['reply_to']
|
reply_queue = body['reply_to']
|
||||||
@@ -137,7 +137,7 @@ class AWXConsumerPG(AWXConsumerBase):
|
|||||||
def run(self, *args, **kwargs):
|
def run(self, *args, **kwargs):
|
||||||
super(AWXConsumerPG, self).run(*args, **kwargs)
|
super(AWXConsumerPG, self).run(*args, **kwargs)
|
||||||
|
|
||||||
logger.warn(f"Running worker {self.name} listening to queues {self.queues}")
|
logger.info(f"Running worker {self.name} listening to queues {self.queues}")
|
||||||
init = False
|
init = False
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
@@ -188,7 +188,7 @@ class BaseWorker(object):
|
|||||||
if 'uuid' in body:
|
if 'uuid' in body:
|
||||||
uuid = body['uuid']
|
uuid = body['uuid']
|
||||||
finished.put(uuid)
|
finished.put(uuid)
|
||||||
logger.warn('worker exiting gracefully pid:{}'.format(os.getpid()))
|
logger.debug('worker exiting gracefully pid:{}'.format(os.getpid()))
|
||||||
|
|
||||||
def perform_work(self, body):
|
def perform_work(self, body):
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ import redis
|
|||||||
|
|
||||||
from awx.main.consumers import emit_channel_notification
|
from awx.main.consumers import emit_channel_notification
|
||||||
from awx.main.models import JobEvent, AdHocCommandEvent, ProjectUpdateEvent, InventoryUpdateEvent, SystemJobEvent, UnifiedJob, Job
|
from awx.main.models import JobEvent, AdHocCommandEvent, ProjectUpdateEvent, InventoryUpdateEvent, SystemJobEvent, UnifiedJob, Job
|
||||||
from awx.main.tasks import handle_success_and_failure_notifications
|
from awx.main.tasks.system import handle_success_and_failure_notifications
|
||||||
from awx.main.models.events import emit_event_detail
|
from awx.main.models.events import emit_event_detail
|
||||||
from awx.main.utils.profiling import AWXProfiler
|
from awx.main.utils.profiling import AWXProfiler
|
||||||
import awx.main.analytics.subsystem_metrics as s_metrics
|
import awx.main.analytics.subsystem_metrics as s_metrics
|
||||||
@@ -116,19 +116,20 @@ class CallbackBrokerWorker(BaseWorker):
|
|||||||
def flush(self, force=False):
|
def flush(self, force=False):
|
||||||
now = tz_now()
|
now = tz_now()
|
||||||
if force or (time.time() - self.last_flush) > settings.JOB_EVENT_BUFFER_SECONDS or any([len(events) >= 1000 for events in self.buff.values()]):
|
if force or (time.time() - self.last_flush) > settings.JOB_EVENT_BUFFER_SECONDS or any([len(events) >= 1000 for events in self.buff.values()]):
|
||||||
bulk_events_saved = 0
|
metrics_bulk_events_saved = 0
|
||||||
singular_events_saved = 0
|
metrics_singular_events_saved = 0
|
||||||
metrics_events_batch_save_errors = 0
|
metrics_events_batch_save_errors = 0
|
||||||
|
metrics_events_broadcast = 0
|
||||||
for cls, events in self.buff.items():
|
for cls, events in self.buff.items():
|
||||||
logger.debug(f'{cls.__name__}.objects.bulk_create({len(events)})')
|
logger.debug(f'{cls.__name__}.objects.bulk_create({len(events)})')
|
||||||
for e in events:
|
for e in events:
|
||||||
if not e.created:
|
if not e.created:
|
||||||
e.created = now
|
e.created = now
|
||||||
e.modified = now
|
e.modified = now
|
||||||
duration_to_save = time.perf_counter()
|
metrics_duration_to_save = time.perf_counter()
|
||||||
try:
|
try:
|
||||||
cls.objects.bulk_create(events)
|
cls.objects.bulk_create(events)
|
||||||
bulk_events_saved += len(events)
|
metrics_bulk_events_saved += len(events)
|
||||||
except Exception:
|
except Exception:
|
||||||
# if an exception occurs, we should re-attempt to save the
|
# if an exception occurs, we should re-attempt to save the
|
||||||
# events one-by-one, because something in the list is
|
# events one-by-one, because something in the list is
|
||||||
@@ -137,22 +138,24 @@ class CallbackBrokerWorker(BaseWorker):
|
|||||||
for e in events:
|
for e in events:
|
||||||
try:
|
try:
|
||||||
e.save()
|
e.save()
|
||||||
singular_events_saved += 1
|
metrics_singular_events_saved += 1
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception('Database Error Saving Job Event')
|
logger.exception('Database Error Saving Job Event')
|
||||||
duration_to_save = time.perf_counter() - duration_to_save
|
metrics_duration_to_save = time.perf_counter() - metrics_duration_to_save
|
||||||
for e in events:
|
for e in events:
|
||||||
if not getattr(e, '_skip_websocket_message', False):
|
if not getattr(e, '_skip_websocket_message', False):
|
||||||
|
metrics_events_broadcast += 1
|
||||||
emit_event_detail(e)
|
emit_event_detail(e)
|
||||||
self.buff = {}
|
self.buff = {}
|
||||||
self.last_flush = time.time()
|
self.last_flush = time.time()
|
||||||
# only update metrics if we saved events
|
# only update metrics if we saved events
|
||||||
if (bulk_events_saved + singular_events_saved) > 0:
|
if (metrics_bulk_events_saved + metrics_singular_events_saved) > 0:
|
||||||
self.subsystem_metrics.inc('callback_receiver_batch_events_errors', metrics_events_batch_save_errors)
|
self.subsystem_metrics.inc('callback_receiver_batch_events_errors', metrics_events_batch_save_errors)
|
||||||
self.subsystem_metrics.inc('callback_receiver_events_insert_db_seconds', duration_to_save)
|
self.subsystem_metrics.inc('callback_receiver_events_insert_db_seconds', metrics_duration_to_save)
|
||||||
self.subsystem_metrics.inc('callback_receiver_events_insert_db', bulk_events_saved + singular_events_saved)
|
self.subsystem_metrics.inc('callback_receiver_events_insert_db', metrics_bulk_events_saved + metrics_singular_events_saved)
|
||||||
self.subsystem_metrics.observe('callback_receiver_batch_events_insert_db', bulk_events_saved)
|
self.subsystem_metrics.observe('callback_receiver_batch_events_insert_db', metrics_bulk_events_saved)
|
||||||
self.subsystem_metrics.inc('callback_receiver_events_in_memory', -(bulk_events_saved + singular_events_saved))
|
self.subsystem_metrics.inc('callback_receiver_events_in_memory', -(metrics_bulk_events_saved + metrics_singular_events_saved))
|
||||||
|
self.subsystem_metrics.inc('callback_receiver_events_broadcast', metrics_events_broadcast)
|
||||||
if self.subsystem_metrics.should_pipe_execute() is True:
|
if self.subsystem_metrics.should_pipe_execute() is True:
|
||||||
self.subsystem_metrics.pipe_execute()
|
self.subsystem_metrics.pipe_execute()
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from kubernetes.config import kube_config
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django_guid.middleware import GuidMiddleware
|
from django_guid.middleware import GuidMiddleware
|
||||||
|
|
||||||
from awx.main.tasks import dispatch_startup, inform_cluster_of_shutdown
|
from awx.main.tasks.system import dispatch_startup, inform_cluster_of_shutdown
|
||||||
|
|
||||||
from .base import BaseWorker
|
from .base import BaseWorker
|
||||||
|
|
||||||
@@ -30,8 +30,8 @@ class TaskWorker(BaseWorker):
|
|||||||
"""
|
"""
|
||||||
Transform a dotted notation task into an imported, callable function, e.g.,
|
Transform a dotted notation task into an imported, callable function, e.g.,
|
||||||
|
|
||||||
awx.main.tasks.delete_inventory
|
awx.main.tasks.system.delete_inventory
|
||||||
awx.main.tasks.RunProjectUpdate
|
awx.main.tasks.jobs.RunProjectUpdate
|
||||||
"""
|
"""
|
||||||
if not task.startswith('awx.'):
|
if not task.startswith('awx.'):
|
||||||
raise ValueError('{} is not a valid awx task'.format(task))
|
raise ValueError('{} is not a valid awx task'.format(task))
|
||||||
@@ -73,15 +73,15 @@ class TaskWorker(BaseWorker):
|
|||||||
'callbacks': [{
|
'callbacks': [{
|
||||||
'args': [],
|
'args': [],
|
||||||
'kwargs': {}
|
'kwargs': {}
|
||||||
'task': u'awx.main.tasks.handle_work_success'
|
'task': u'awx.main.tasks.system.handle_work_success'
|
||||||
}],
|
}],
|
||||||
'errbacks': [{
|
'errbacks': [{
|
||||||
'args': [],
|
'args': [],
|
||||||
'kwargs': {},
|
'kwargs': {},
|
||||||
'task': 'awx.main.tasks.handle_work_error'
|
'task': 'awx.main.tasks.system.handle_work_error'
|
||||||
}],
|
}],
|
||||||
'kwargs': {},
|
'kwargs': {},
|
||||||
'task': u'awx.main.tasks.RunProjectUpdate'
|
'task': u'awx.main.tasks.jobs.RunProjectUpdate'
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
settings.__clean_on_fork__()
|
settings.__clean_on_fork__()
|
||||||
|
|||||||
@@ -36,3 +36,7 @@ class PostRunError(Exception):
|
|||||||
self.status = status
|
self.status = status
|
||||||
self.tb = tb
|
self.tb = tb
|
||||||
super(PostRunError, self).__init__(msg)
|
super(PostRunError, self).__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class ReceptorNodeNotFound(RuntimeError):
|
||||||
|
pass
|
||||||
|
|||||||
@@ -10,6 +10,6 @@ def is_ha_environment():
|
|||||||
otherwise.
|
otherwise.
|
||||||
"""
|
"""
|
||||||
# If there are two or more instances, then we are in an HA environment.
|
# If there are two or more instances, then we are in an HA environment.
|
||||||
if Instance.objects.count() > 1:
|
if Instance.objects.filter(node_type__in=('control', 'hybrid')).count() > 1:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -23,44 +23,54 @@ class Command(BaseCommand):
|
|||||||
with impersonate(superuser):
|
with impersonate(superuser):
|
||||||
with disable_computed_fields():
|
with disable_computed_fields():
|
||||||
if not Organization.objects.exists():
|
if not Organization.objects.exists():
|
||||||
o = Organization.objects.create(name='Default')
|
o, _ = Organization.objects.get_or_create(name='Default')
|
||||||
|
|
||||||
p = Project(
|
# Avoid calling directly the get_or_create() to bypass project update
|
||||||
name='Demo Project',
|
p = Project.objects.filter(name='Demo Project', scm_type='git').first()
|
||||||
scm_type='git',
|
if not p:
|
||||||
scm_url='https://github.com/ansible/ansible-tower-samples',
|
p = Project(
|
||||||
scm_update_on_launch=True,
|
name='Demo Project',
|
||||||
scm_update_cache_timeout=0,
|
scm_type='git',
|
||||||
organization=o,
|
scm_url='https://github.com/ansible/ansible-tower-samples',
|
||||||
)
|
scm_update_on_launch=True,
|
||||||
|
scm_update_cache_timeout=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
p.organization = o
|
||||||
p.save(skip_update=True)
|
p.save(skip_update=True)
|
||||||
|
|
||||||
ssh_type = CredentialType.objects.filter(namespace='ssh').first()
|
ssh_type = CredentialType.objects.filter(namespace='ssh').first()
|
||||||
c = Credential.objects.create(
|
c, _ = Credential.objects.get_or_create(
|
||||||
credential_type=ssh_type, name='Demo Credential', inputs={'username': superuser.username}, created_by=superuser
|
credential_type=ssh_type, name='Demo Credential', inputs={'username': superuser.username}, created_by=superuser
|
||||||
)
|
)
|
||||||
|
|
||||||
c.admin_role.members.add(superuser)
|
c.admin_role.members.add(superuser)
|
||||||
|
|
||||||
public_galaxy_credential = Credential(
|
public_galaxy_credential, _ = Credential.objects.get_or_create(
|
||||||
name='Ansible Galaxy',
|
name='Ansible Galaxy',
|
||||||
managed=True,
|
managed=True,
|
||||||
credential_type=CredentialType.objects.get(kind='galaxy'),
|
credential_type=CredentialType.objects.get(kind='galaxy'),
|
||||||
inputs={'url': 'https://galaxy.ansible.com/'},
|
inputs={'url': 'https://galaxy.ansible.com/'},
|
||||||
)
|
)
|
||||||
public_galaxy_credential.save()
|
|
||||||
o.galaxy_credentials.add(public_galaxy_credential)
|
o.galaxy_credentials.add(public_galaxy_credential)
|
||||||
|
|
||||||
i = Inventory.objects.create(name='Demo Inventory', organization=o, created_by=superuser)
|
i, _ = Inventory.objects.get_or_create(name='Demo Inventory', organization=o, created_by=superuser)
|
||||||
|
|
||||||
Host.objects.create(
|
Host.objects.get_or_create(
|
||||||
name='localhost',
|
name='localhost',
|
||||||
inventory=i,
|
inventory=i,
|
||||||
variables="ansible_connection: local\nansible_python_interpreter: '{{ ansible_playbook_python }}'",
|
variables="ansible_connection: local\nansible_python_interpreter: '{{ ansible_playbook_python }}'",
|
||||||
created_by=superuser,
|
created_by=superuser,
|
||||||
)
|
)
|
||||||
|
|
||||||
jt = JobTemplate.objects.create(name='Demo Job Template', playbook='hello_world.yml', project=p, inventory=i)
|
jt = JobTemplate.objects.filter(name='Demo Job Template').first()
|
||||||
|
if jt:
|
||||||
|
jt.project = p
|
||||||
|
jt.inventory = i
|
||||||
|
jt.playbook = 'hello_world.yml'
|
||||||
|
jt.save()
|
||||||
|
else:
|
||||||
|
jt, _ = JobTemplate.objects.get_or_create(name='Demo Job Template', playbook='hello_world.yml', project=p, inventory=i)
|
||||||
jt.credentials.add(c)
|
jt.credentials.add(c)
|
||||||
|
|
||||||
print('Default organization added.')
|
print('Default organization added.')
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@@ -75,7 +76,24 @@ class AnsibleInventoryLoader(object):
|
|||||||
bargs.extend(['-v', '{0}:{0}:Z'.format(self.source)])
|
bargs.extend(['-v', '{0}:{0}:Z'.format(self.source)])
|
||||||
for key, value in STANDARD_INVENTORY_UPDATE_ENV.items():
|
for key, value in STANDARD_INVENTORY_UPDATE_ENV.items():
|
||||||
bargs.extend(['-e', '{0}={1}'.format(key, value)])
|
bargs.extend(['-e', '{0}={1}'.format(key, value)])
|
||||||
bargs.extend([get_default_execution_environment().image])
|
ee = get_default_execution_environment()
|
||||||
|
|
||||||
|
if settings.IS_K8S:
|
||||||
|
logger.warn('This command is not able to run on kubernetes-based deployment. This action should be done using the API.')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if ee.credential:
|
||||||
|
process = subprocess.run(['podman', 'image', 'exists', ee.image], capture_output=True)
|
||||||
|
if process.returncode != 0:
|
||||||
|
logger.warn(
|
||||||
|
f'The default execution environment (id={ee.id}, name={ee.name}, image={ee.image}) is not available on this node. '
|
||||||
|
'The image needs to be available locally before using this command, due to registry authentication. '
|
||||||
|
'To pull this image, either run a job on this node or manually pull the image.'
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
bargs.extend([ee.image])
|
||||||
|
|
||||||
bargs.extend(['ansible-inventory', '-i', self.source])
|
bargs.extend(['ansible-inventory', '-i', self.source])
|
||||||
bargs.extend(['--playbook-dir', functioning_dir(self.source)])
|
bargs.extend(['--playbook-dir', functioning_dir(self.source)])
|
||||||
if self.verbosity:
|
if self.verbosity:
|
||||||
@@ -110,9 +128,7 @@ class AnsibleInventoryLoader(object):
|
|||||||
|
|
||||||
def load(self):
|
def load(self):
|
||||||
base_args = self.get_base_args()
|
base_args = self.get_base_args()
|
||||||
|
|
||||||
logger.info('Reading Ansible inventory source: %s', self.source)
|
logger.info('Reading Ansible inventory source: %s', self.source)
|
||||||
|
|
||||||
return self.command_to_json(base_args)
|
return self.command_to_json(base_args)
|
||||||
|
|
||||||
|
|
||||||
@@ -137,7 +153,7 @@ class Command(BaseCommand):
|
|||||||
type=str,
|
type=str,
|
||||||
default=None,
|
default=None,
|
||||||
metavar='v',
|
metavar='v',
|
||||||
help='host variable used to ' 'set/clear enabled flag when host is online/offline, may ' 'be specified as "foo.bar" to traverse nested dicts.',
|
help='host variable used to set/clear enabled flag when host is online/offline, may be specified as "foo.bar" to traverse nested dicts.',
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--enabled-value',
|
'--enabled-value',
|
||||||
@@ -145,7 +161,7 @@ class Command(BaseCommand):
|
|||||||
type=str,
|
type=str,
|
||||||
default=None,
|
default=None,
|
||||||
metavar='v',
|
metavar='v',
|
||||||
help='value of host variable ' 'specified by --enabled-var that indicates host is ' 'enabled/online.',
|
help='value of host variable specified by --enabled-var that indicates host is enabled/online.',
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--group-filter',
|
'--group-filter',
|
||||||
@@ -153,7 +169,7 @@ class Command(BaseCommand):
|
|||||||
type=str,
|
type=str,
|
||||||
default=None,
|
default=None,
|
||||||
metavar='regex',
|
metavar='regex',
|
||||||
help='regular expression ' 'to filter group name(s); only matches are imported.',
|
help='regular expression to filter group name(s); only matches are imported.',
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--host-filter',
|
'--host-filter',
|
||||||
@@ -161,14 +177,14 @@ class Command(BaseCommand):
|
|||||||
type=str,
|
type=str,
|
||||||
default=None,
|
default=None,
|
||||||
metavar='regex',
|
metavar='regex',
|
||||||
help='regular expression ' 'to filter host name(s); only matches are imported.',
|
help='regular expression to filter host name(s); only matches are imported.',
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--exclude-empty-groups',
|
'--exclude-empty-groups',
|
||||||
dest='exclude_empty_groups',
|
dest='exclude_empty_groups',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
default=False,
|
default=False,
|
||||||
help='when set, ' 'exclude all groups that have no child groups, hosts, or ' 'variables.',
|
help='when set, exclude all groups that have no child groups, hosts, or variables.',
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--instance-id-var',
|
'--instance-id-var',
|
||||||
@@ -176,7 +192,7 @@ class Command(BaseCommand):
|
|||||||
type=str,
|
type=str,
|
||||||
default=None,
|
default=None,
|
||||||
metavar='v',
|
metavar='v',
|
||||||
help='host variable that ' 'specifies the unique, immutable instance ID, may be ' 'specified as "foo.bar" to traverse nested dicts.',
|
help='host variable that specifies the unique, immutable instance ID, may be specified as "foo.bar" to traverse nested dicts.',
|
||||||
)
|
)
|
||||||
|
|
||||||
def set_logging_level(self, verbosity):
|
def set_logging_level(self, verbosity):
|
||||||
@@ -269,12 +285,13 @@ class Command(BaseCommand):
|
|||||||
self.db_instance_id_map = {}
|
self.db_instance_id_map = {}
|
||||||
if self.instance_id_var:
|
if self.instance_id_var:
|
||||||
host_qs = self.inventory_source.hosts.all()
|
host_qs = self.inventory_source.hosts.all()
|
||||||
host_qs = host_qs.filter(instance_id='', variables__contains=self.instance_id_var.split('.')[0])
|
for instance_id_part in reversed(self.instance_id_var.split(',')):
|
||||||
for host in host_qs:
|
host_qs = host_qs.filter(instance_id='', variables__contains=instance_id_part.split('.')[0])
|
||||||
instance_id = self._get_instance_id(host.variables_dict)
|
for host in host_qs:
|
||||||
if not instance_id:
|
instance_id = self._get_instance_id(host.variables_dict)
|
||||||
continue
|
if not instance_id:
|
||||||
self.db_instance_id_map[instance_id] = host.pk
|
continue
|
||||||
|
self.db_instance_id_map[instance_id] = host.pk
|
||||||
|
|
||||||
def _build_mem_instance_id_map(self):
|
def _build_mem_instance_id_map(self):
|
||||||
"""
|
"""
|
||||||
@@ -300,7 +317,7 @@ class Command(BaseCommand):
|
|||||||
self._cached_host_pk_set = frozenset(self.inventory_source.hosts.values_list('pk', flat=True))
|
self._cached_host_pk_set = frozenset(self.inventory_source.hosts.values_list('pk', flat=True))
|
||||||
return self._cached_host_pk_set
|
return self._cached_host_pk_set
|
||||||
|
|
||||||
def _delete_hosts(self):
|
def _delete_hosts(self, pk_mem_host_map):
|
||||||
"""
|
"""
|
||||||
For each host in the database that is NOT in the local list, delete
|
For each host in the database that is NOT in the local list, delete
|
||||||
it. When importing from a cloud inventory source attached to a
|
it. When importing from a cloud inventory source attached to a
|
||||||
@@ -309,25 +326,10 @@ class Command(BaseCommand):
|
|||||||
"""
|
"""
|
||||||
if settings.SQL_DEBUG:
|
if settings.SQL_DEBUG:
|
||||||
queries_before = len(connection.queries)
|
queries_before = len(connection.queries)
|
||||||
|
|
||||||
hosts_qs = self.inventory_source.hosts
|
hosts_qs = self.inventory_source.hosts
|
||||||
# Build list of all host pks, remove all that should not be deleted.
|
del_host_pks = hosts_qs.exclude(pk__in=pk_mem_host_map.keys()).values_list('pk', flat=True)
|
||||||
del_host_pks = set(self._existing_host_pks()) # makes mutable copy
|
|
||||||
if self.instance_id_var:
|
|
||||||
all_instance_ids = list(self.mem_instance_id_map.keys())
|
|
||||||
instance_ids = []
|
|
||||||
for offset in range(0, len(all_instance_ids), self._batch_size):
|
|
||||||
instance_ids = all_instance_ids[offset : (offset + self._batch_size)]
|
|
||||||
for host_pk in hosts_qs.filter(instance_id__in=instance_ids).values_list('pk', flat=True):
|
|
||||||
del_host_pks.discard(host_pk)
|
|
||||||
for host_pk in set([v for k, v in self.db_instance_id_map.items() if k in instance_ids]):
|
|
||||||
del_host_pks.discard(host_pk)
|
|
||||||
all_host_names = list(set(self.mem_instance_id_map.values()) - set(self.all_group.all_hosts.keys()))
|
|
||||||
else:
|
|
||||||
all_host_names = list(self.all_group.all_hosts.keys())
|
|
||||||
for offset in range(0, len(all_host_names), self._batch_size):
|
|
||||||
host_names = all_host_names[offset : (offset + self._batch_size)]
|
|
||||||
for host_pk in hosts_qs.filter(name__in=host_names).values_list('pk', flat=True):
|
|
||||||
del_host_pks.discard(host_pk)
|
|
||||||
# Now delete all remaining hosts in batches.
|
# Now delete all remaining hosts in batches.
|
||||||
all_del_pks = sorted(list(del_host_pks))
|
all_del_pks = sorted(list(del_host_pks))
|
||||||
for offset in range(0, len(all_del_pks), self._batch_size):
|
for offset in range(0, len(all_del_pks), self._batch_size):
|
||||||
@@ -568,7 +570,63 @@ class Command(BaseCommand):
|
|||||||
logger.debug('Host "%s" is now disabled', mem_host.name)
|
logger.debug('Host "%s" is now disabled', mem_host.name)
|
||||||
self._batch_add_m2m(self.inventory_source.hosts, db_host)
|
self._batch_add_m2m(self.inventory_source.hosts, db_host)
|
||||||
|
|
||||||
def _create_update_hosts(self):
|
def _build_pk_mem_host_map(self):
|
||||||
|
"""
|
||||||
|
Creates and returns a data structure that maps DB hosts to in-memory host that
|
||||||
|
they correspond to - meaning that those hosts will be updated to in-memory host values
|
||||||
|
"""
|
||||||
|
mem_host_pk_map = OrderedDict() # keys are mem_host name, values are matching DB host pk
|
||||||
|
host_pks_updated = set() # same as items of mem_host_pk_map but used for efficiency
|
||||||
|
mem_host_pk_map_by_id = {} # incomplete mapping by new instance_id to be sorted and pushed to mem_host_pk_map
|
||||||
|
mem_host_instance_id_map = {}
|
||||||
|
for k, v in self.all_group.all_hosts.items():
|
||||||
|
instance_id = self._get_instance_id(v.variables)
|
||||||
|
if instance_id in self.db_instance_id_map:
|
||||||
|
mem_host_pk_map_by_id[self.db_instance_id_map[instance_id]] = v
|
||||||
|
elif instance_id:
|
||||||
|
mem_host_instance_id_map[instance_id] = v
|
||||||
|
|
||||||
|
# Update all existing hosts where we know the PK based on instance_id.
|
||||||
|
all_host_pks = sorted(mem_host_pk_map_by_id.keys())
|
||||||
|
for offset in range(0, len(all_host_pks), self._batch_size):
|
||||||
|
host_pks = all_host_pks[offset : (offset + self._batch_size)]
|
||||||
|
for db_host in self.inventory.hosts.only('pk').filter(pk__in=host_pks):
|
||||||
|
if db_host.pk in host_pks_updated:
|
||||||
|
continue
|
||||||
|
mem_host = mem_host_pk_map_by_id[db_host.pk]
|
||||||
|
mem_host_pk_map[mem_host.name] = db_host.pk
|
||||||
|
host_pks_updated.add(db_host.pk)
|
||||||
|
|
||||||
|
# Update all existing hosts where we know the DB (the prior) instance_id.
|
||||||
|
all_instance_ids = sorted(mem_host_instance_id_map.keys())
|
||||||
|
for offset in range(0, len(all_instance_ids), self._batch_size):
|
||||||
|
instance_ids = all_instance_ids[offset : (offset + self._batch_size)]
|
||||||
|
for db_host in self.inventory.hosts.only('pk', 'instance_id').filter(instance_id__in=instance_ids):
|
||||||
|
if db_host.pk in host_pks_updated:
|
||||||
|
continue
|
||||||
|
mem_host = mem_host_instance_id_map[db_host.instance_id]
|
||||||
|
mem_host_pk_map[mem_host.name] = db_host.pk
|
||||||
|
host_pks_updated.add(db_host.pk)
|
||||||
|
|
||||||
|
# Update all existing hosts by name.
|
||||||
|
all_host_names = sorted(self.all_group.all_hosts.keys())
|
||||||
|
for offset in range(0, len(all_host_names), self._batch_size):
|
||||||
|
host_names = all_host_names[offset : (offset + self._batch_size)]
|
||||||
|
for db_host in self.inventory.hosts.only('pk', 'name').filter(name__in=host_names):
|
||||||
|
if db_host.pk in host_pks_updated:
|
||||||
|
continue
|
||||||
|
mem_host = self.all_group.all_hosts[db_host.name]
|
||||||
|
mem_host_pk_map[mem_host.name] = db_host.pk
|
||||||
|
host_pks_updated.add(db_host.pk)
|
||||||
|
|
||||||
|
# Rotate the dictionary so that lookups are done by the host pk
|
||||||
|
pk_mem_host_map = OrderedDict()
|
||||||
|
for name, host_pk in mem_host_pk_map.items():
|
||||||
|
pk_mem_host_map[host_pk] = name
|
||||||
|
|
||||||
|
return pk_mem_host_map # keys are DB host pk, keys are matching mem host name
|
||||||
|
|
||||||
|
def _create_update_hosts(self, pk_mem_host_map):
|
||||||
"""
|
"""
|
||||||
For each host in the local list, create it if it doesn't exist in the
|
For each host in the local list, create it if it doesn't exist in the
|
||||||
database. Otherwise, update/replace database variables from the
|
database. Otherwise, update/replace database variables from the
|
||||||
@@ -577,57 +635,22 @@ class Command(BaseCommand):
|
|||||||
"""
|
"""
|
||||||
if settings.SQL_DEBUG:
|
if settings.SQL_DEBUG:
|
||||||
queries_before = len(connection.queries)
|
queries_before = len(connection.queries)
|
||||||
host_pks_updated = set()
|
|
||||||
mem_host_pk_map = {}
|
|
||||||
mem_host_instance_id_map = {}
|
|
||||||
mem_host_name_map = {}
|
|
||||||
mem_host_names_to_update = set(self.all_group.all_hosts.keys())
|
|
||||||
for k, v in self.all_group.all_hosts.items():
|
|
||||||
mem_host_name_map[k] = v
|
|
||||||
instance_id = self._get_instance_id(v.variables)
|
|
||||||
if instance_id in self.db_instance_id_map:
|
|
||||||
mem_host_pk_map[self.db_instance_id_map[instance_id]] = v
|
|
||||||
elif instance_id:
|
|
||||||
mem_host_instance_id_map[instance_id] = v
|
|
||||||
|
|
||||||
# Update all existing hosts where we know the PK based on instance_id.
|
updated_mem_host_names = set()
|
||||||
all_host_pks = sorted(mem_host_pk_map.keys())
|
|
||||||
|
all_host_pks = sorted(pk_mem_host_map.keys())
|
||||||
for offset in range(0, len(all_host_pks), self._batch_size):
|
for offset in range(0, len(all_host_pks), self._batch_size):
|
||||||
host_pks = all_host_pks[offset : (offset + self._batch_size)]
|
host_pks = all_host_pks[offset : (offset + self._batch_size)]
|
||||||
for db_host in self.inventory.hosts.filter(pk__in=host_pks):
|
for db_host in self.inventory.hosts.filter(pk__in=host_pks):
|
||||||
if db_host.pk in host_pks_updated:
|
mem_host_name = pk_mem_host_map[db_host.pk]
|
||||||
continue
|
mem_host = self.all_group.all_hosts[mem_host_name]
|
||||||
mem_host = mem_host_pk_map[db_host.pk]
|
|
||||||
self._update_db_host_from_mem_host(db_host, mem_host)
|
self._update_db_host_from_mem_host(db_host, mem_host)
|
||||||
host_pks_updated.add(db_host.pk)
|
updated_mem_host_names.add(mem_host.name)
|
||||||
mem_host_names_to_update.discard(mem_host.name)
|
|
||||||
|
|
||||||
# Update all existing hosts where we know the instance_id.
|
mem_host_names_to_create = set(self.all_group.all_hosts.keys()) - updated_mem_host_names
|
||||||
all_instance_ids = sorted(mem_host_instance_id_map.keys())
|
|
||||||
for offset in range(0, len(all_instance_ids), self._batch_size):
|
|
||||||
instance_ids = all_instance_ids[offset : (offset + self._batch_size)]
|
|
||||||
for db_host in self.inventory.hosts.filter(instance_id__in=instance_ids):
|
|
||||||
if db_host.pk in host_pks_updated:
|
|
||||||
continue
|
|
||||||
mem_host = mem_host_instance_id_map[db_host.instance_id]
|
|
||||||
self._update_db_host_from_mem_host(db_host, mem_host)
|
|
||||||
host_pks_updated.add(db_host.pk)
|
|
||||||
mem_host_names_to_update.discard(mem_host.name)
|
|
||||||
|
|
||||||
# Update all existing hosts by name.
|
|
||||||
all_host_names = sorted(mem_host_name_map.keys())
|
|
||||||
for offset in range(0, len(all_host_names), self._batch_size):
|
|
||||||
host_names = all_host_names[offset : (offset + self._batch_size)]
|
|
||||||
for db_host in self.inventory.hosts.filter(name__in=host_names):
|
|
||||||
if db_host.pk in host_pks_updated:
|
|
||||||
continue
|
|
||||||
mem_host = mem_host_name_map[db_host.name]
|
|
||||||
self._update_db_host_from_mem_host(db_host, mem_host)
|
|
||||||
host_pks_updated.add(db_host.pk)
|
|
||||||
mem_host_names_to_update.discard(mem_host.name)
|
|
||||||
|
|
||||||
# Create any new hosts.
|
# Create any new hosts.
|
||||||
for mem_host_name in sorted(mem_host_names_to_update):
|
for mem_host_name in sorted(mem_host_names_to_create):
|
||||||
mem_host = self.all_group.all_hosts[mem_host_name]
|
mem_host = self.all_group.all_hosts[mem_host_name]
|
||||||
import_vars = mem_host.variables
|
import_vars = mem_host.variables
|
||||||
host_desc = import_vars.pop('_awx_description', 'imported')
|
host_desc = import_vars.pop('_awx_description', 'imported')
|
||||||
@@ -726,13 +749,14 @@ class Command(BaseCommand):
|
|||||||
self._batch_size = 500
|
self._batch_size = 500
|
||||||
self._build_db_instance_id_map()
|
self._build_db_instance_id_map()
|
||||||
self._build_mem_instance_id_map()
|
self._build_mem_instance_id_map()
|
||||||
|
pk_mem_host_map = self._build_pk_mem_host_map()
|
||||||
if self.overwrite:
|
if self.overwrite:
|
||||||
self._delete_hosts()
|
self._delete_hosts(pk_mem_host_map)
|
||||||
self._delete_groups()
|
self._delete_groups()
|
||||||
self._delete_group_children_and_hosts()
|
self._delete_group_children_and_hosts()
|
||||||
self._update_inventory()
|
self._update_inventory()
|
||||||
self._create_update_groups()
|
self._create_update_groups()
|
||||||
self._create_update_hosts()
|
self._create_update_hosts(pk_mem_host_map)
|
||||||
self._create_update_group_children()
|
self._create_update_group_children()
|
||||||
self._create_update_group_hosts()
|
self._create_update_group_hosts()
|
||||||
|
|
||||||
@@ -1008,4 +1032,4 @@ class Command(BaseCommand):
|
|||||||
if settings.SQL_DEBUG:
|
if settings.SQL_DEBUG:
|
||||||
queries_this_import = connection.queries[queries_before:]
|
queries_this_import = connection.queries[queries_before:]
|
||||||
sqltime = sum(float(x['time']) for x in queries_this_import)
|
sqltime = sum(float(x['time']) for x in queries_this_import)
|
||||||
logger.warning('Inventory import required %d queries ' 'taking %0.3fs', len(queries_this_import), sqltime)
|
logger.warning('Inventory import required %d queries taking %0.3fs', len(queries_this_import), sqltime)
|
||||||
|
|||||||
@@ -11,13 +11,16 @@ class Ungrouped(object):
|
|||||||
policy_instance_percentage = None
|
policy_instance_percentage = None
|
||||||
policy_instance_minimum = None
|
policy_instance_minimum = None
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.qs = Instance.objects.filter(rampart_groups__isnull=True)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def instances(self):
|
def instances(self):
|
||||||
return Instance.objects.filter(rampart_groups__isnull=True)
|
return self.qs
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def capacity(self):
|
def capacity(self):
|
||||||
return sum(x.capacity for x in self.instances)
|
return sum(x.capacity for x in self.instances.all())
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
@@ -25,27 +28,33 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
super(Command, self).__init__()
|
super(Command, self).__init__()
|
||||||
|
no_color = options.get("no_color", False)
|
||||||
|
|
||||||
groups = list(InstanceGroup.objects.all())
|
groups = list(InstanceGroup.objects.all())
|
||||||
ungrouped = Ungrouped()
|
ungrouped = Ungrouped()
|
||||||
if len(ungrouped.instances):
|
if len(ungrouped.instances.all()):
|
||||||
groups.append(ungrouped)
|
groups.append(ungrouped)
|
||||||
|
|
||||||
for instance_group in groups:
|
for ig in groups:
|
||||||
fmt = '[{0.name} capacity={0.capacity}'
|
policy = ''
|
||||||
if instance_group.policy_instance_percentage:
|
if ig.policy_instance_percentage:
|
||||||
fmt += ' policy={0.policy_instance_percentage}%'
|
policy = f' policy={ig.policy_instance_percentage}%'
|
||||||
if instance_group.policy_instance_minimum:
|
if ig.policy_instance_minimum:
|
||||||
fmt += ' policy>={0.policy_instance_minimum}'
|
policy = f' policy>={ig.policy_instance_minimum}'
|
||||||
print((fmt + ']').format(instance_group))
|
print(f'[{ig.name} capacity={ig.capacity}{policy}]')
|
||||||
for x in instance_group.instances.all():
|
|
||||||
|
for x in ig.instances.all():
|
||||||
color = '\033[92m'
|
color = '\033[92m'
|
||||||
if x.capacity == 0:
|
if x.capacity == 0 and x.node_type != 'hop':
|
||||||
color = '\033[91m'
|
color = '\033[91m'
|
||||||
if x.enabled is False:
|
if not x.enabled:
|
||||||
color = '\033[90m[DISABLED] '
|
color = '\033[90m[DISABLED] '
|
||||||
fmt = '\t' + color + '{0.hostname} capacity={0.capacity} version={1}'
|
if no_color:
|
||||||
if x.capacity:
|
color = ''
|
||||||
fmt += ' heartbeat="{0.modified:%Y-%m-%d %H:%M:%S}"'
|
|
||||||
print((fmt + '\033[0m').format(x, x.version or '?'))
|
capacity = f' capacity={x.capacity}' if x.node_type != 'hop' else ''
|
||||||
print('')
|
version = f" version={x.version or '?'}" if x.node_type != 'hop' else ''
|
||||||
|
heartbeat = f' heartbeat="{x.modified:%Y-%m-%d %H:%M:%S}"' if x.capacity or x.node_type == 'hop' else ''
|
||||||
|
print(f'\t{color}{x.hostname}{capacity} node_type={x.node_type}{version}{heartbeat}\033[0m')
|
||||||
|
|
||||||
|
print()
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
from awx.main.tasks import profile_sql
|
from awx.main.tasks.system import profile_sql
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
# Copyright (c) 2015 Ansible, Inc.
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
# All Rights Reserved
|
# All Rights Reserved
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.management.base import BaseCommand, CommandError
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
|
|
||||||
@@ -14,17 +13,19 @@ class Command(BaseCommand):
|
|||||||
Register this instance with the database for HA tracking.
|
Register this instance with the database for HA tracking.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
help = 'Add instance to the database. ' 'Specify `--hostname` to use this command.'
|
help = "Add instance to the database. Specify `--hostname` to use this command."
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
parser.add_argument('--hostname', dest='hostname', type=str, help='Hostname used during provisioning')
|
parser.add_argument('--hostname', dest='hostname', type=str, help="Hostname used during provisioning")
|
||||||
|
parser.add_argument('--node_type', type=str, default='hybrid', choices=['control', 'execution', 'hop', 'hybrid'], help="Instance Node type")
|
||||||
|
parser.add_argument('--uuid', type=str, help="Instance UUID")
|
||||||
|
|
||||||
def _register_hostname(self, hostname):
|
def _register_hostname(self, hostname, node_type, uuid):
|
||||||
if not hostname:
|
if not hostname:
|
||||||
return
|
return
|
||||||
(changed, instance) = Instance.objects.register(uuid=self.uuid, hostname=hostname)
|
(changed, instance) = Instance.objects.register(hostname=hostname, node_type=node_type, uuid=uuid)
|
||||||
if changed:
|
if changed:
|
||||||
print('Successfully registered instance {}'.format(hostname))
|
print("Successfully registered instance {}".format(hostname))
|
||||||
else:
|
else:
|
||||||
print("Instance already registered {}".format(instance.hostname))
|
print("Instance already registered {}".format(instance.hostname))
|
||||||
self.changed = changed
|
self.changed = changed
|
||||||
@@ -33,8 +34,7 @@ class Command(BaseCommand):
|
|||||||
def handle(self, **options):
|
def handle(self, **options):
|
||||||
if not options.get('hostname'):
|
if not options.get('hostname'):
|
||||||
raise CommandError("Specify `--hostname` to use this command.")
|
raise CommandError("Specify `--hostname` to use this command.")
|
||||||
self.uuid = settings.SYSTEM_UUID
|
|
||||||
self.changed = False
|
self.changed = False
|
||||||
self._register_hostname(options.get('hostname'))
|
self._register_hostname(options.get('hostname'), options.get('node_type'), options.get('uuid'))
|
||||||
if self.changed:
|
if self.changed:
|
||||||
print('(changed: True)')
|
print("(changed: True)")
|
||||||
|
|||||||
87
awx/main/management/commands/register_peers.py
Normal file
87
awx/main/management/commands/register_peers.py
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
import warnings
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from django.db import transaction
|
||||||
|
|
||||||
|
from awx.main.models import Instance, InstanceLink
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""
|
||||||
|
Internal tower command.
|
||||||
|
Register the peers of a receptor node.
|
||||||
|
"""
|
||||||
|
|
||||||
|
help = "Register or remove links between Receptor nodes."
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('source', type=str, help="Receptor node opening the connections.")
|
||||||
|
parser.add_argument('--peers', type=str, nargs='+', required=False, help="Nodes that the source node connects out to.")
|
||||||
|
parser.add_argument('--disconnect', type=str, nargs='+', required=False, help="Nodes that should no longer be connected to by the source node.")
|
||||||
|
parser.add_argument(
|
||||||
|
'--exact',
|
||||||
|
type=str,
|
||||||
|
nargs='*',
|
||||||
|
required=False,
|
||||||
|
help="The exact set of nodes the source node should connect out to. Any existing links registered in the database that do not match will be removed. May be empty.",
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, **options):
|
||||||
|
nodes = Instance.objects.in_bulk(field_name='hostname')
|
||||||
|
if options['source'] not in nodes:
|
||||||
|
raise CommandError(f"Host {options['source']} is not a registered instance.")
|
||||||
|
if not (options['peers'] or options['disconnect'] or options['exact'] is not None):
|
||||||
|
raise CommandError("One of the options --peers, --disconnect, or --exact is required.")
|
||||||
|
if options['exact'] is not None and options['peers']:
|
||||||
|
raise CommandError("The option --peers may not be used with --exact.")
|
||||||
|
if options['exact'] is not None and options['disconnect']:
|
||||||
|
raise CommandError("The option --disconnect may not be used with --exact.")
|
||||||
|
|
||||||
|
# No 1-cycles
|
||||||
|
for collection in ('peers', 'disconnect', 'exact'):
|
||||||
|
if options[collection] is not None and options['source'] in options[collection]:
|
||||||
|
raise CommandError(f"Source node {options['source']} may not also be in --{collection}.")
|
||||||
|
|
||||||
|
# No 2-cycles
|
||||||
|
if options['peers'] or options['exact'] is not None:
|
||||||
|
peers = set(options['peers'] or options['exact'])
|
||||||
|
incoming = set(InstanceLink.objects.filter(target=nodes[options['source']]).values_list('source__hostname', flat=True))
|
||||||
|
if peers & incoming:
|
||||||
|
warnings.warn(f"Source node {options['source']} should not link to nodes already peering to it: {peers & incoming}.")
|
||||||
|
|
||||||
|
if options['peers']:
|
||||||
|
missing_peers = set(options['peers']) - set(nodes)
|
||||||
|
if missing_peers:
|
||||||
|
missing = ' '.join(missing_peers)
|
||||||
|
raise CommandError(f"Peers not currently registered as instances: {missing}")
|
||||||
|
|
||||||
|
results = 0
|
||||||
|
for target in options['peers']:
|
||||||
|
_, created = InstanceLink.objects.get_or_create(source=nodes[options['source']], target=nodes[target])
|
||||||
|
if created:
|
||||||
|
results += 1
|
||||||
|
|
||||||
|
print(f"{results} new peer links added to the database.")
|
||||||
|
|
||||||
|
if options['disconnect']:
|
||||||
|
results = 0
|
||||||
|
for target in options['disconnect']:
|
||||||
|
if target not in nodes: # Be permissive, the node might have already been de-registered.
|
||||||
|
continue
|
||||||
|
n, _ = InstanceLink.objects.filter(source=nodes[options['source']], target=nodes[target]).delete()
|
||||||
|
results += n
|
||||||
|
|
||||||
|
print(f"{results} peer links removed from the database.")
|
||||||
|
|
||||||
|
if options['exact'] is not None:
|
||||||
|
additions = 0
|
||||||
|
with transaction.atomic():
|
||||||
|
peers = set(options['exact'])
|
||||||
|
links = set(InstanceLink.objects.filter(source=nodes[options['source']]).values_list('target__hostname', flat=True))
|
||||||
|
removals, _ = InstanceLink.objects.filter(source=nodes[options['source']], target__hostname__in=links - peers).delete()
|
||||||
|
for target in peers - links:
|
||||||
|
_, created = InstanceLink.objects.get_or_create(source=nodes[options['source']], target=nodes[target])
|
||||||
|
if created:
|
||||||
|
additions += 1
|
||||||
|
|
||||||
|
print(f"{additions} peer links added and {removals} deleted from the database.")
|
||||||
@@ -17,13 +17,14 @@ class InstanceNotFound(Exception):
|
|||||||
|
|
||||||
|
|
||||||
class RegisterQueue:
|
class RegisterQueue:
|
||||||
def __init__(self, queuename, instance_percent, inst_min, hostname_list, is_container_group=None):
|
def __init__(self, queuename, instance_percent, inst_min, hostname_list, is_container_group=None, pod_spec_override=None):
|
||||||
self.instance_not_found_err = None
|
self.instance_not_found_err = None
|
||||||
self.queuename = queuename
|
self.queuename = queuename
|
||||||
self.instance_percent = instance_percent
|
self.instance_percent = instance_percent
|
||||||
self.instance_min = inst_min
|
self.instance_min = inst_min
|
||||||
self.hostname_list = hostname_list
|
self.hostname_list = hostname_list
|
||||||
self.is_container_group = is_container_group
|
self.is_container_group = is_container_group
|
||||||
|
self.pod_spec_override = pod_spec_override
|
||||||
|
|
||||||
def get_create_update_instance_group(self):
|
def get_create_update_instance_group(self):
|
||||||
created = False
|
created = False
|
||||||
@@ -36,10 +37,14 @@ class RegisterQueue:
|
|||||||
ig.policy_instance_minimum = self.instance_min
|
ig.policy_instance_minimum = self.instance_min
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if self.is_container_group:
|
if self.is_container_group and (ig.is_container_group != self.is_container_group):
|
||||||
ig.is_container_group = self.is_container_group
|
ig.is_container_group = self.is_container_group
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
|
if self.pod_spec_override and (ig.pod_spec_override != self.pod_spec_override):
|
||||||
|
ig.pod_spec_override = self.pod_spec_override
|
||||||
|
changed = True
|
||||||
|
|
||||||
if changed:
|
if changed:
|
||||||
ig.save()
|
ig.save()
|
||||||
|
|
||||||
@@ -48,14 +53,14 @@ class RegisterQueue:
|
|||||||
def add_instances_to_group(self, ig):
|
def add_instances_to_group(self, ig):
|
||||||
changed = False
|
changed = False
|
||||||
|
|
||||||
instance_list_unique = set([x.strip() for x in self.hostname_list if x])
|
instance_list_unique = {x for x in (x.strip() for x in self.hostname_list) if x}
|
||||||
instances = []
|
instances = []
|
||||||
for inst_name in instance_list_unique:
|
for inst_name in instance_list_unique:
|
||||||
instance = Instance.objects.filter(hostname=inst_name)
|
instance = Instance.objects.filter(hostname=inst_name).exclude(node_type='hop')
|
||||||
if instance.exists():
|
if instance.exists():
|
||||||
instances.append(instance[0])
|
instances.append(instance[0])
|
||||||
else:
|
else:
|
||||||
raise InstanceNotFound("Instance does not exist: {}".format(inst_name), changed)
|
raise InstanceNotFound("Instance does not exist or cannot run jobs: {}".format(inst_name), changed)
|
||||||
|
|
||||||
ig.instances.add(*instances)
|
ig.instances.add(*instances)
|
||||||
|
|
||||||
|
|||||||
@@ -97,27 +97,29 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
executor = MigrationExecutor(connection)
|
executor = MigrationExecutor(connection)
|
||||||
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
|
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
|
||||||
registered = False
|
|
||||||
|
|
||||||
if not migrating:
|
# In containerized deployments, migrations happen in the task container,
|
||||||
try:
|
# and the services running there don't start until migrations are
|
||||||
Instance.objects.me()
|
# finished.
|
||||||
registered = True
|
# *This* service runs in the web container, and it's possible that it can
|
||||||
except RuntimeError:
|
# start _before_ migrations are finished, thus causing issues with the ORM
|
||||||
pass
|
# queries it makes (specifically, conf.settings queries).
|
||||||
|
# This block is meant to serve as a sort of bail-out for the situation
|
||||||
|
# where migrations aren't yet finished (similar to the migration
|
||||||
|
# detection middleware that the uwsgi processes have) or when instance
|
||||||
|
# registration isn't done yet
|
||||||
|
if migrating:
|
||||||
|
logger.info('AWX is currently migrating, retry in 10s...')
|
||||||
|
time.sleep(10)
|
||||||
|
return
|
||||||
|
|
||||||
if migrating or not registered:
|
try:
|
||||||
# In containerized deployments, migrations happen in the task container,
|
me = Instance.objects.me()
|
||||||
# and the services running there don't start until migrations are
|
logger.info('Active instance with hostname {} is registered.'.format(me.hostname))
|
||||||
# finished.
|
except RuntimeError as e:
|
||||||
# *This* service runs in the web container, and it's possible that it can
|
# the CLUSTER_HOST_ID in the task, and web instance must match and
|
||||||
# start _before_ migrations are finished, thus causing issues with the ORM
|
# ensure network connectivity between the task and web instance
|
||||||
# queries it makes (specifically, conf.settings queries).
|
logger.info('Unable to return currently active instance: {}, retry in 5s...'.format(e))
|
||||||
# This block is meant to serve as a sort of bail-out for the situation
|
|
||||||
# where migrations aren't yet finished (similar to the migration
|
|
||||||
# detection middleware that the uwsgi processes have) or when instance
|
|
||||||
# registration isn't done yet
|
|
||||||
logger.error('AWX is currently installing/upgrading. Trying again in 5s...')
|
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -4,16 +4,18 @@
|
|||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
from awx.main.utils.filters import SmartFilter
|
from awx.main.utils.filters import SmartFilter
|
||||||
from awx.main.utils.pglock import advisory_lock
|
from awx.main.utils.pglock import advisory_lock
|
||||||
|
from awx.main.utils.common import get_capacity_type
|
||||||
|
from awx.main.constants import RECEPTOR_PENDING
|
||||||
|
|
||||||
___all__ = ['HostManager', 'InstanceManager', 'InstanceGroupManager', 'DeferJobCreatedManager']
|
___all__ = ['HostManager', 'InstanceManager', 'InstanceGroupManager', 'DeferJobCreatedManager', 'UUID_DEFAULT']
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.managers')
|
logger = logging.getLogger('awx.main.managers')
|
||||||
|
UUID_DEFAULT = '00000000-0000-0000-0000-000000000000'
|
||||||
|
|
||||||
|
|
||||||
class DeferJobCreatedManager(models.Manager):
|
class DeferJobCreatedManager(models.Manager):
|
||||||
@@ -104,18 +106,17 @@ class InstanceManager(models.Manager):
|
|||||||
"""Return the currently active instance."""
|
"""Return the currently active instance."""
|
||||||
# If we are running unit tests, return a stub record.
|
# If we are running unit tests, return a stub record.
|
||||||
if settings.IS_TESTING(sys.argv) or hasattr(sys, '_called_from_test'):
|
if settings.IS_TESTING(sys.argv) or hasattr(sys, '_called_from_test'):
|
||||||
return self.model(id=1, hostname='localhost', uuid='00000000-0000-0000-0000-000000000000')
|
return self.model(id=1, hostname=settings.CLUSTER_HOST_ID, uuid=UUID_DEFAULT)
|
||||||
|
|
||||||
node = self.filter(hostname=settings.CLUSTER_HOST_ID)
|
node = self.filter(hostname=settings.CLUSTER_HOST_ID)
|
||||||
if node.exists():
|
if node.exists():
|
||||||
return node[0]
|
return node[0]
|
||||||
raise RuntimeError("No instance found with the current cluster host id")
|
raise RuntimeError("No instance found with the current cluster host id")
|
||||||
|
|
||||||
def register(self, uuid=None, hostname=None, ip_address=None):
|
def register(self, uuid=None, hostname=None, ip_address=None, node_type='hybrid', defaults=None):
|
||||||
if not uuid:
|
|
||||||
uuid = settings.SYSTEM_UUID
|
|
||||||
if not hostname:
|
if not hostname:
|
||||||
hostname = settings.CLUSTER_HOST_ID
|
hostname = settings.CLUSTER_HOST_ID
|
||||||
|
|
||||||
with advisory_lock('instance_registration_%s' % hostname):
|
with advisory_lock('instance_registration_%s' % hostname):
|
||||||
if settings.AWX_AUTO_DEPROVISION_INSTANCES:
|
if settings.AWX_AUTO_DEPROVISION_INSTANCES:
|
||||||
# detect any instances with the same IP address.
|
# detect any instances with the same IP address.
|
||||||
@@ -128,16 +129,44 @@ class InstanceManager(models.Manager):
|
|||||||
other_inst.save(update_fields=['ip_address'])
|
other_inst.save(update_fields=['ip_address'])
|
||||||
logger.warning("IP address {0} conflict detected, ip address unset for host {1}.".format(ip_address, other_hostname))
|
logger.warning("IP address {0} conflict detected, ip address unset for host {1}.".format(ip_address, other_hostname))
|
||||||
|
|
||||||
instance = self.filter(hostname=hostname)
|
# Return existing instance that matches hostname or UUID (default to UUID)
|
||||||
|
if uuid is not None and uuid != UUID_DEFAULT and self.filter(uuid=uuid).exists():
|
||||||
|
instance = self.filter(uuid=uuid)
|
||||||
|
else:
|
||||||
|
# if instance was not retrieved by uuid and hostname was, use the hostname
|
||||||
|
instance = self.filter(hostname=hostname)
|
||||||
|
|
||||||
|
# Return existing instance
|
||||||
if instance.exists():
|
if instance.exists():
|
||||||
instance = instance.get()
|
instance = instance.first() # in the unusual occasion that there is more than one, only get one
|
||||||
|
update_fields = []
|
||||||
|
# if instance was retrieved by uuid and hostname has changed, update hostname
|
||||||
|
if instance.hostname != hostname:
|
||||||
|
logger.warning("passed in hostname {0} is different from the original hostname {1}, updating to {0}".format(hostname, instance.hostname))
|
||||||
|
instance.hostname = hostname
|
||||||
|
update_fields.append('hostname')
|
||||||
|
# if any other fields are to be updated
|
||||||
if instance.ip_address != ip_address:
|
if instance.ip_address != ip_address:
|
||||||
instance.ip_address = ip_address
|
instance.ip_address = ip_address
|
||||||
instance.save(update_fields=['ip_address'])
|
if instance.node_type != node_type:
|
||||||
|
instance.node_type = node_type
|
||||||
|
update_fields.append('node_type')
|
||||||
|
if update_fields:
|
||||||
|
instance.save(update_fields=update_fields)
|
||||||
return (True, instance)
|
return (True, instance)
|
||||||
else:
|
else:
|
||||||
return (False, instance)
|
return (False, instance)
|
||||||
instance = self.create(uuid=uuid, hostname=hostname, ip_address=ip_address, capacity=0)
|
|
||||||
|
# Create new instance, and fill in default values
|
||||||
|
create_defaults = dict(capacity=0)
|
||||||
|
if defaults is not None:
|
||||||
|
create_defaults.update(defaults)
|
||||||
|
uuid_option = {}
|
||||||
|
if uuid is not None:
|
||||||
|
uuid_option = dict(uuid=uuid)
|
||||||
|
if node_type == 'execution' and 'version' not in create_defaults:
|
||||||
|
create_defaults['version'] = RECEPTOR_PENDING
|
||||||
|
instance = self.create(hostname=hostname, ip_address=ip_address, node_type=node_type, **create_defaults, **uuid_option)
|
||||||
return (True, instance)
|
return (True, instance)
|
||||||
|
|
||||||
def get_or_register(self):
|
def get_or_register(self):
|
||||||
@@ -145,17 +174,18 @@ class InstanceManager(models.Manager):
|
|||||||
from awx.main.management.commands.register_queue import RegisterQueue
|
from awx.main.management.commands.register_queue import RegisterQueue
|
||||||
|
|
||||||
pod_ip = os.environ.get('MY_POD_IP')
|
pod_ip = os.environ.get('MY_POD_IP')
|
||||||
registered = self.register(ip_address=pod_ip)
|
if settings.IS_K8S:
|
||||||
|
registered = self.register(ip_address=pod_ip, node_type='control', uuid=settings.SYSTEM_UUID)
|
||||||
|
else:
|
||||||
|
registered = self.register(ip_address=pod_ip, uuid=settings.SYSTEM_UUID)
|
||||||
RegisterQueue(settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME, 100, 0, [], is_container_group=False).register()
|
RegisterQueue(settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME, 100, 0, [], is_container_group=False).register()
|
||||||
RegisterQueue(settings.DEFAULT_EXECUTION_QUEUE_NAME, 100, 0, [], is_container_group=True).register()
|
RegisterQueue(
|
||||||
|
settings.DEFAULT_EXECUTION_QUEUE_NAME, 100, 0, [], is_container_group=True, pod_spec_override=settings.DEFAULT_EXECUTION_QUEUE_POD_SPEC_OVERRIDE
|
||||||
|
).register()
|
||||||
return registered
|
return registered
|
||||||
else:
|
else:
|
||||||
return (False, self.me())
|
return (False, self.me())
|
||||||
|
|
||||||
def active_count(self):
|
|
||||||
"""Return count of active Tower nodes for licensing."""
|
|
||||||
return self.all().count()
|
|
||||||
|
|
||||||
|
|
||||||
class InstanceGroupManager(models.Manager):
|
class InstanceGroupManager(models.Manager):
|
||||||
"""A custom manager class for the Instance model.
|
"""A custom manager class for the Instance model.
|
||||||
@@ -189,6 +219,8 @@ class InstanceGroupManager(models.Manager):
|
|||||||
if name not in graph:
|
if name not in graph:
|
||||||
graph[name] = {}
|
graph[name] = {}
|
||||||
graph[name]['consumed_capacity'] = 0
|
graph[name]['consumed_capacity'] = 0
|
||||||
|
for capacity_type in ('execution', 'control'):
|
||||||
|
graph[name][f'consumed_{capacity_type}_capacity'] = 0
|
||||||
if breakdown:
|
if breakdown:
|
||||||
graph[name]['committed_capacity'] = 0
|
graph[name]['committed_capacity'] = 0
|
||||||
graph[name]['running_capacity'] = 0
|
graph[name]['running_capacity'] = 0
|
||||||
@@ -211,7 +243,13 @@ class InstanceGroupManager(models.Manager):
|
|||||||
for t in tasks:
|
for t in tasks:
|
||||||
# TODO: dock capacity for isolated job management tasks running in queue
|
# TODO: dock capacity for isolated job management tasks running in queue
|
||||||
impact = t.task_impact
|
impact = t.task_impact
|
||||||
if t.status == 'waiting' or not t.execution_node:
|
control_groups = []
|
||||||
|
if t.controller_node:
|
||||||
|
control_groups = instance_ig_mapping.get(t.controller_node, [])
|
||||||
|
if not control_groups:
|
||||||
|
logger.warn(f"No instance group found for {t.controller_node}, capacity consumed may be innaccurate.")
|
||||||
|
|
||||||
|
if t.status == 'waiting' or (not t.execution_node and not t.is_container_group_task):
|
||||||
# Subtract capacity from any peer groups that share instances
|
# Subtract capacity from any peer groups that share instances
|
||||||
if not t.instance_group:
|
if not t.instance_group:
|
||||||
impacted_groups = []
|
impacted_groups = []
|
||||||
@@ -224,8 +262,16 @@ class InstanceGroupManager(models.Manager):
|
|||||||
if group_name not in graph:
|
if group_name not in graph:
|
||||||
self.zero_out_group(graph, group_name, breakdown)
|
self.zero_out_group(graph, group_name, breakdown)
|
||||||
graph[group_name]['consumed_capacity'] += impact
|
graph[group_name]['consumed_capacity'] += impact
|
||||||
|
capacity_type = get_capacity_type(t)
|
||||||
|
graph[group_name][f'consumed_{capacity_type}_capacity'] += impact
|
||||||
if breakdown:
|
if breakdown:
|
||||||
graph[group_name]['committed_capacity'] += impact
|
graph[group_name]['committed_capacity'] += impact
|
||||||
|
for group_name in control_groups:
|
||||||
|
if group_name not in graph:
|
||||||
|
self.zero_out_group(graph, group_name, breakdown)
|
||||||
|
graph[group_name][f'consumed_control_capacity'] += settings.AWX_CONTROL_NODE_TASK_IMPACT
|
||||||
|
if breakdown:
|
||||||
|
graph[group_name]['committed_capacity'] += settings.AWX_CONTROL_NODE_TASK_IMPACT
|
||||||
elif t.status == 'running':
|
elif t.status == 'running':
|
||||||
# Subtract capacity from all groups that contain the instance
|
# Subtract capacity from all groups that contain the instance
|
||||||
if t.execution_node not in instance_ig_mapping:
|
if t.execution_node not in instance_ig_mapping:
|
||||||
@@ -237,12 +283,21 @@ class InstanceGroupManager(models.Manager):
|
|||||||
impacted_groups = []
|
impacted_groups = []
|
||||||
else:
|
else:
|
||||||
impacted_groups = instance_ig_mapping[t.execution_node]
|
impacted_groups = instance_ig_mapping[t.execution_node]
|
||||||
|
|
||||||
for group_name in impacted_groups:
|
for group_name in impacted_groups:
|
||||||
if group_name not in graph:
|
if group_name not in graph:
|
||||||
self.zero_out_group(graph, group_name, breakdown)
|
self.zero_out_group(graph, group_name, breakdown)
|
||||||
graph[group_name]['consumed_capacity'] += impact
|
graph[group_name]['consumed_capacity'] += impact
|
||||||
|
capacity_type = get_capacity_type(t)
|
||||||
|
graph[group_name][f'consumed_{capacity_type}_capacity'] += impact
|
||||||
if breakdown:
|
if breakdown:
|
||||||
graph[group_name]['running_capacity'] += impact
|
graph[group_name]['running_capacity'] += impact
|
||||||
|
for group_name in control_groups:
|
||||||
|
if group_name not in graph:
|
||||||
|
self.zero_out_group(graph, group_name, breakdown)
|
||||||
|
graph[group_name][f'consumed_control_capacity'] += settings.AWX_CONTROL_NODE_TASK_IMPACT
|
||||||
|
if breakdown:
|
||||||
|
graph[group_name]['running_capacity'] += settings.AWX_CONTROL_NODE_TASK_IMPACT
|
||||||
else:
|
else:
|
||||||
logger.error('Programming error, %s not in ["running", "waiting"]', t.log_format)
|
logger.error('Programming error, %s not in ["running", "waiting"]', t.log_format)
|
||||||
return graph
|
return graph
|
||||||
|
|||||||
@@ -180,11 +180,7 @@ class URLModificationMiddleware(MiddlewareMixin):
|
|||||||
return '/'.join(url_units)
|
return '/'.join(url_units)
|
||||||
|
|
||||||
def process_request(self, request):
|
def process_request(self, request):
|
||||||
if hasattr(request, 'environ') and 'REQUEST_URI' in request.environ:
|
old_path = request.path_info
|
||||||
old_path = urllib.parse.urlsplit(request.environ['REQUEST_URI']).path
|
|
||||||
old_path = old_path[request.path.find(request.path_info) :]
|
|
||||||
else:
|
|
||||||
old_path = request.path_info
|
|
||||||
new_path = self._convert_named_url(old_path)
|
new_path = self._convert_named_url(old_path)
|
||||||
if request.path_info != new_path:
|
if request.path_info != new_path:
|
||||||
request.environ['awx.named_url_rewritten'] = request.path
|
request.environ['awx.named_url_rewritten'] = request.path
|
||||||
@@ -197,4 +193,4 @@ class MigrationRanCheckMiddleware(MiddlewareMixin):
|
|||||||
executor = MigrationExecutor(connection)
|
executor = MigrationExecutor(connection)
|
||||||
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
|
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
|
||||||
if bool(plan) and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
if bool(plan) and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
||||||
return redirect(reverse("ui_next:migrations_notran"))
|
return redirect(reverse("ui:migrations_notran"))
|
||||||
|
|||||||
@@ -5,10 +5,6 @@ from __future__ import unicode_literals
|
|||||||
# Django
|
# Django
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
# AWX
|
|
||||||
from awx.main.migrations import _migration_utils as migration_utils
|
|
||||||
from awx.main.migrations._reencrypt import blank_old_start_args
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
@@ -17,6 +13,8 @@ class Migration(migrations.Migration):
|
|||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations, migrations.RunPython.noop),
|
# This list is intentionally empty.
|
||||||
migrations.RunPython(blank_old_start_args, migrations.RunPython.noop),
|
# Tower 3.3 included several data migrations that are no longer
|
||||||
|
# necessary (this list is now empty because Tower 3.3 is past EOL and
|
||||||
|
# cannot be directly upgraded to modern versions)
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -9,15 +9,6 @@ def remove_iso_instances(apps, schema_editor):
|
|||||||
Instance.objects.filter(rampart_groups__controller__isnull=False).delete()
|
Instance.objects.filter(rampart_groups__controller__isnull=False).delete()
|
||||||
|
|
||||||
|
|
||||||
def remove_iso_groups(apps, schema_editor):
|
|
||||||
InstanceGroup = apps.get_model('main', 'InstanceGroup')
|
|
||||||
UnifiedJob = apps.get_model('main', 'UnifiedJob')
|
|
||||||
with transaction.atomic():
|
|
||||||
for ig in InstanceGroup.objects.filter(controller__isnull=False):
|
|
||||||
UnifiedJob.objects.filter(instance_group=ig).update(instance_group=None)
|
|
||||||
ig.delete()
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
atomic = False
|
atomic = False
|
||||||
|
|
||||||
@@ -27,7 +18,6 @@ class Migration(migrations.Migration):
|
|||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.RunPython(remove_iso_instances),
|
migrations.RunPython(remove_iso_instances),
|
||||||
migrations.RunPython(remove_iso_groups),
|
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name='instance',
|
model_name='instance',
|
||||||
name='last_isolated_check',
|
name='last_isolated_check',
|
||||||
|
|||||||
22
awx/main/migrations/0152_instance_node_type.py
Normal file
22
awx/main/migrations/0152_instance_node_type.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Generated by Django 2.2.20 on 2021-07-26 19:42
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0151_rename_managed_by_tower'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='instance',
|
||||||
|
name='node_type',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[('control', 'Control plane node'), ('execution', 'Execution plane node'), ('hybrid', 'Controller and execution')],
|
||||||
|
default='hybrid',
|
||||||
|
max_length=16,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
27
awx/main/migrations/0153_instance_last_seen.py
Normal file
27
awx/main/migrations/0153_instance_last_seen.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# Generated by Django 2.2.20 on 2021-08-12 13:55
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0152_instance_node_type'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='instance',
|
||||||
|
name='last_seen',
|
||||||
|
field=models.DateTimeField(
|
||||||
|
editable=False,
|
||||||
|
help_text='Last time instance ran its heartbeat task for main cluster nodes. Last known connection to receptor mesh for execution nodes.',
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='instance',
|
||||||
|
name='memory',
|
||||||
|
field=models.BigIntegerField(default=0, editable=False, help_text='Total system memory of this instance in bytes.'),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
awx/main/migrations/0154_set_default_uuid.py
Normal file
18
awx/main/migrations/0154_set_default_uuid.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 2.2.20 on 2021-09-01 22:53
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0153_instance_last_seen'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='instance',
|
||||||
|
name='uuid',
|
||||||
|
field=models.CharField(default='00000000-0000-0000-0000-000000000000', max_length=40),
|
||||||
|
),
|
||||||
|
]
|
||||||
25
awx/main/migrations/0155_improved_health_check.py
Normal file
25
awx/main/migrations/0155_improved_health_check.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# Generated by Django 2.2.20 on 2021-08-31 17:41
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0154_set_default_uuid'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='instance',
|
||||||
|
name='errors',
|
||||||
|
field=models.TextField(blank=True, default='', editable=False, help_text='Any error details from the last health check.'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='instance',
|
||||||
|
name='last_health_check',
|
||||||
|
field=models.DateTimeField(
|
||||||
|
editable=False, help_text='Last time a health check was ran on this instance to refresh cpu, memory, and capacity.', null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
44
awx/main/migrations/0156_capture_mesh_topology.py
Normal file
44
awx/main/migrations/0156_capture_mesh_topology.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
# Generated by Django 2.2.20 on 2021-12-17 19:26
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0155_improved_health_check'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='instance',
|
||||||
|
name='node_type',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
('control', 'Control plane node'),
|
||||||
|
('execution', 'Execution plane node'),
|
||||||
|
('hybrid', 'Controller and execution'),
|
||||||
|
('hop', 'Message-passing node, no execution capability'),
|
||||||
|
],
|
||||||
|
default='hybrid',
|
||||||
|
max_length=16,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='InstanceLink',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='main.Instance')),
|
||||||
|
('target', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reverse_peers', to='main.Instance')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'unique_together': {('source', 'target')},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='instance',
|
||||||
|
name='peers',
|
||||||
|
field=models.ManyToManyField(through='main.InstanceLink', to='main.Instance'),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
awx/main/migrations/0157_inventory_labels.py
Normal file
18
awx/main/migrations/0157_inventory_labels.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 2.2.20 on 2022-01-18 16:46
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0156_capture_mesh_topology'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='inventory',
|
||||||
|
name='labels',
|
||||||
|
field=models.ManyToManyField(blank=True, help_text='Labels associated with this inventory.', related_name='inventory_labels', to='main.Label'),
|
||||||
|
),
|
||||||
|
]
|
||||||
19
awx/main/migrations/0158_make_instance_cpu_decimal.py
Normal file
19
awx/main/migrations/0158_make_instance_cpu_decimal.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Generated by Django 2.2.24 on 2022-02-14 17:37
|
||||||
|
|
||||||
|
from decimal import Decimal
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0157_inventory_labels'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='instance',
|
||||||
|
name='cpu',
|
||||||
|
field=models.DecimalField(decimal_places=1, default=Decimal('0'), editable=False, max_digits=4),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
import logging
|
|
||||||
|
|
||||||
from awx.conf.migrations._reencrypt import (
|
|
||||||
decrypt_field,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.migrations')
|
|
||||||
|
|
||||||
__all__ = []
|
|
||||||
|
|
||||||
|
|
||||||
def blank_old_start_args(apps, schema_editor):
|
|
||||||
UnifiedJob = apps.get_model('main', 'UnifiedJob')
|
|
||||||
for uj in UnifiedJob.objects.defer('result_stdout_text').exclude(start_args='').iterator():
|
|
||||||
if uj.status in ['running', 'pending', 'new', 'waiting']:
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
args_dict = decrypt_field(uj, 'start_args')
|
|
||||||
except ValueError:
|
|
||||||
args_dict = None
|
|
||||||
if args_dict == {}:
|
|
||||||
continue
|
|
||||||
if uj.start_args:
|
|
||||||
logger.debug('Blanking job args for %s', uj.pk)
|
|
||||||
uj.start_args = ''
|
|
||||||
uj.save()
|
|
||||||
@@ -47,6 +47,7 @@ from awx.main.models.execution_environments import ExecutionEnvironment # noqa
|
|||||||
from awx.main.models.activity_stream import ActivityStream # noqa
|
from awx.main.models.activity_stream import ActivityStream # noqa
|
||||||
from awx.main.models.ha import ( # noqa
|
from awx.main.models.ha import ( # noqa
|
||||||
Instance,
|
Instance,
|
||||||
|
InstanceLink,
|
||||||
InstanceGroup,
|
InstanceGroup,
|
||||||
TowerScheduleState,
|
TowerScheduleState,
|
||||||
)
|
)
|
||||||
@@ -201,6 +202,8 @@ activity_stream_registrar.connect(Organization)
|
|||||||
activity_stream_registrar.connect(Inventory)
|
activity_stream_registrar.connect(Inventory)
|
||||||
activity_stream_registrar.connect(Host)
|
activity_stream_registrar.connect(Host)
|
||||||
activity_stream_registrar.connect(Group)
|
activity_stream_registrar.connect(Group)
|
||||||
|
activity_stream_registrar.connect(Instance)
|
||||||
|
activity_stream_registrar.connect(InstanceGroup)
|
||||||
activity_stream_registrar.connect(InventorySource)
|
activity_stream_registrar.connect(InventorySource)
|
||||||
# activity_stream_registrar.connect(InventoryUpdate)
|
# activity_stream_registrar.connect(InventoryUpdate)
|
||||||
activity_stream_registrar.connect(Credential)
|
activity_stream_registrar.connect(Credential)
|
||||||
|
|||||||
@@ -144,7 +144,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_task_class(cls):
|
def _get_task_class(cls):
|
||||||
from awx.main.tasks import RunAdHocCommand
|
from awx.main.tasks.jobs import RunAdHocCommand
|
||||||
|
|
||||||
return RunAdHocCommand
|
return RunAdHocCommand
|
||||||
|
|
||||||
@@ -152,10 +152,6 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
|||||||
def is_container_group_task(self):
|
def is_container_group_task(self):
|
||||||
return bool(self.instance_group and self.instance_group.is_container_group)
|
return bool(self.instance_group and self.instance_group.is_container_group)
|
||||||
|
|
||||||
@property
|
|
||||||
def can_run_containerized(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_absolute_url(self, request=None):
|
def get_absolute_url(self, request=None):
|
||||||
return reverse('api:ad_hoc_command_detail', kwargs={'pk': self.pk}, request=request)
|
return reverse('api:ad_hoc_command_detail', kwargs={'pk': self.pk}, request=request)
|
||||||
|
|
||||||
@@ -164,9 +160,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def notification_templates(self):
|
def notification_templates(self):
|
||||||
all_orgs = set()
|
all_orgs = {h.inventory.organization for h in self.hosts.all()}
|
||||||
for h in self.hosts.all():
|
|
||||||
all_orgs.add(h.inventory.organization)
|
|
||||||
active_templates = dict(error=set(), success=set(), started=set())
|
active_templates = dict(error=set(), success=set(), started=set())
|
||||||
base_notification_templates = NotificationTemplate.objects
|
base_notification_templates = NotificationTemplate.objects
|
||||||
for org in all_orgs:
|
for org in all_orgs:
|
||||||
|
|||||||
@@ -299,10 +299,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
|||||||
|
|
||||||
def has_inputs(self, field_names=()):
|
def has_inputs(self, field_names=()):
|
||||||
for name in field_names:
|
for name in field_names:
|
||||||
if name in self.inputs:
|
if not self.has_input(name):
|
||||||
if self.inputs[name] in ('', None):
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
raise ValueError('{} is not an input field'.format(name))
|
raise ValueError('{} is not an input field'.format(name))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -1118,6 +1115,7 @@ ManagedCredentialType(
|
|||||||
'label': ugettext_noop('Authentication URL'),
|
'label': ugettext_noop('Authentication URL'),
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'help_text': ugettext_noop('Authentication endpoint for the container registry.'),
|
'help_text': ugettext_noop('Authentication endpoint for the container registry.'),
|
||||||
|
'default': 'quay.io',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'id': 'username',
|
'id': 'username',
|
||||||
|
|||||||
@@ -388,7 +388,7 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
|||||||
job.get_event_queryset().filter(uuid__in=failed).update(failed=True)
|
job.get_event_queryset().filter(uuid__in=failed).update(failed=True)
|
||||||
|
|
||||||
# send success/failure notifications when we've finished handling the playbook_on_stats event
|
# send success/failure notifications when we've finished handling the playbook_on_stats event
|
||||||
from awx.main.tasks import handle_success_and_failure_notifications # circular import
|
from awx.main.tasks.system import handle_success_and_failure_notifications # circular import
|
||||||
|
|
||||||
def _send_notifications():
|
def _send_notifications():
|
||||||
handle_success_and_failure_notifications.apply_async([job.id])
|
handle_success_and_failure_notifications.apply_async([job.id])
|
||||||
@@ -541,8 +541,7 @@ class JobEvent(BasePlaybookEvent):
|
|||||||
return
|
return
|
||||||
job = self.job
|
job = self.job
|
||||||
|
|
||||||
from awx.main.models import Host, JobHostSummary # circular import
|
from awx.main.models import Host, JobHostSummary, HostMetric # circular import
|
||||||
from awx.main.models import Host, JobHostSummary, HostMetric
|
|
||||||
|
|
||||||
all_hosts = Host.objects.filter(pk__in=self.host_map.values()).only('id', 'name')
|
all_hosts = Host.objects.filter(pk__in=self.host_map.values()).only('id', 'name')
|
||||||
existing_host_ids = set(h.id for h in all_hosts)
|
existing_host_ids = set(h.id for h in all_hosts)
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
|
import random
|
||||||
|
import logging
|
||||||
|
|
||||||
from django.core.validators import MinValueValidator
|
from django.core.validators import MinValueValidator
|
||||||
from django.db import models, connection
|
from django.db import models, connection
|
||||||
@@ -16,14 +18,20 @@ from solo.models import SingletonModel
|
|||||||
|
|
||||||
from awx import __version__ as awx_application_version
|
from awx import __version__ as awx_application_version
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main.managers import InstanceManager, InstanceGroupManager
|
from awx.main.managers import InstanceManager, InstanceGroupManager, UUID_DEFAULT
|
||||||
from awx.main.fields import JSONField
|
from awx.main.fields import JSONField
|
||||||
|
from awx.main.constants import JOB_FOLDER_PREFIX
|
||||||
from awx.main.models.base import BaseModel, HasEditsMixin, prevent_search
|
from awx.main.models.base import BaseModel, HasEditsMixin, prevent_search
|
||||||
from awx.main.models.unified_jobs import UnifiedJob
|
from awx.main.models.unified_jobs import UnifiedJob
|
||||||
from awx.main.utils import get_cpu_capacity, get_mem_capacity, get_system_task_capacity
|
from awx.main.utils.common import get_corrected_cpu, get_cpu_effective_capacity, get_corrected_memory, get_mem_effective_capacity
|
||||||
from awx.main.models.mixins import RelatedJobsMixin
|
from awx.main.models.mixins import RelatedJobsMixin
|
||||||
|
|
||||||
__all__ = ('Instance', 'InstanceGroup', 'TowerScheduleState')
|
# ansible-runner
|
||||||
|
from ansible_runner.utils.capacity import get_cpu_count, get_mem_in_bytes
|
||||||
|
|
||||||
|
__all__ = ('Instance', 'InstanceGroup', 'InstanceLink', 'TowerScheduleState')
|
||||||
|
|
||||||
|
logger = logging.getLogger('awx.main.models.ha')
|
||||||
|
|
||||||
|
|
||||||
class HasPolicyEditsMixin(HasEditsMixin):
|
class HasPolicyEditsMixin(HasEditsMixin):
|
||||||
@@ -46,12 +54,21 @@ class HasPolicyEditsMixin(HasEditsMixin):
|
|||||||
return self._values_have_edits(new_values)
|
return self._values_have_edits(new_values)
|
||||||
|
|
||||||
|
|
||||||
|
class InstanceLink(BaseModel):
|
||||||
|
source = models.ForeignKey('Instance', on_delete=models.CASCADE, related_name='+')
|
||||||
|
target = models.ForeignKey('Instance', on_delete=models.CASCADE, related_name='reverse_peers')
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = ('source', 'target')
|
||||||
|
|
||||||
|
|
||||||
class Instance(HasPolicyEditsMixin, BaseModel):
|
class Instance(HasPolicyEditsMixin, BaseModel):
|
||||||
"""A model representing an AWX instance running against this database."""
|
"""A model representing an AWX instance running against this database."""
|
||||||
|
|
||||||
objects = InstanceManager()
|
objects = InstanceManager()
|
||||||
|
|
||||||
uuid = models.CharField(max_length=40)
|
# Fields set in instance registration
|
||||||
|
uuid = models.CharField(max_length=40, default=UUID_DEFAULT)
|
||||||
hostname = models.CharField(max_length=250, unique=True)
|
hostname = models.CharField(max_length=250, unique=True)
|
||||||
ip_address = models.CharField(
|
ip_address = models.CharField(
|
||||||
blank=True,
|
blank=True,
|
||||||
@@ -60,9 +77,39 @@ class Instance(HasPolicyEditsMixin, BaseModel):
|
|||||||
max_length=50,
|
max_length=50,
|
||||||
unique=True,
|
unique=True,
|
||||||
)
|
)
|
||||||
|
# Auto-fields, implementation is different from BaseModel
|
||||||
created = models.DateTimeField(auto_now_add=True)
|
created = models.DateTimeField(auto_now_add=True)
|
||||||
modified = models.DateTimeField(auto_now=True)
|
modified = models.DateTimeField(auto_now=True)
|
||||||
|
# Fields defined in health check or heartbeat
|
||||||
version = models.CharField(max_length=120, blank=True)
|
version = models.CharField(max_length=120, blank=True)
|
||||||
|
cpu = models.DecimalField(
|
||||||
|
default=Decimal(0.0),
|
||||||
|
max_digits=4,
|
||||||
|
decimal_places=1,
|
||||||
|
editable=False,
|
||||||
|
)
|
||||||
|
memory = models.BigIntegerField(
|
||||||
|
default=0,
|
||||||
|
editable=False,
|
||||||
|
help_text=_('Total system memory of this instance in bytes.'),
|
||||||
|
)
|
||||||
|
errors = models.TextField(
|
||||||
|
default='',
|
||||||
|
blank=True,
|
||||||
|
editable=False,
|
||||||
|
help_text=_('Any error details from the last health check.'),
|
||||||
|
)
|
||||||
|
last_seen = models.DateTimeField(
|
||||||
|
null=True,
|
||||||
|
editable=False,
|
||||||
|
help_text=_('Last time instance ran its heartbeat task for main cluster nodes. Last known connection to receptor mesh for execution nodes.'),
|
||||||
|
)
|
||||||
|
last_health_check = models.DateTimeField(
|
||||||
|
null=True,
|
||||||
|
editable=False,
|
||||||
|
help_text=_('Last time a health check was ran on this instance to refresh cpu, memory, and capacity.'),
|
||||||
|
)
|
||||||
|
# Capacity management
|
||||||
capacity = models.PositiveIntegerField(
|
capacity = models.PositiveIntegerField(
|
||||||
default=100,
|
default=100,
|
||||||
editable=False,
|
editable=False,
|
||||||
@@ -70,14 +117,7 @@ class Instance(HasPolicyEditsMixin, BaseModel):
|
|||||||
capacity_adjustment = models.DecimalField(default=Decimal(1.0), max_digits=3, decimal_places=2, validators=[MinValueValidator(0)])
|
capacity_adjustment = models.DecimalField(default=Decimal(1.0), max_digits=3, decimal_places=2, validators=[MinValueValidator(0)])
|
||||||
enabled = models.BooleanField(default=True)
|
enabled = models.BooleanField(default=True)
|
||||||
managed_by_policy = models.BooleanField(default=True)
|
managed_by_policy = models.BooleanField(default=True)
|
||||||
cpu = models.IntegerField(
|
|
||||||
default=0,
|
|
||||||
editable=False,
|
|
||||||
)
|
|
||||||
memory = models.BigIntegerField(
|
|
||||||
default=0,
|
|
||||||
editable=False,
|
|
||||||
)
|
|
||||||
cpu_capacity = models.IntegerField(
|
cpu_capacity = models.IntegerField(
|
||||||
default=0,
|
default=0,
|
||||||
editable=False,
|
editable=False,
|
||||||
@@ -86,6 +126,15 @@ class Instance(HasPolicyEditsMixin, BaseModel):
|
|||||||
default=0,
|
default=0,
|
||||||
editable=False,
|
editable=False,
|
||||||
)
|
)
|
||||||
|
NODE_TYPE_CHOICES = [
|
||||||
|
("control", "Control plane node"),
|
||||||
|
("execution", "Execution plane node"),
|
||||||
|
("hybrid", "Controller and execution"),
|
||||||
|
("hop", "Message-passing node, no execution capability"),
|
||||||
|
]
|
||||||
|
node_type = models.CharField(default='hybrid', choices=NODE_TYPE_CHOICES, max_length=16)
|
||||||
|
|
||||||
|
peers = models.ManyToManyField('self', symmetrical=False, through=InstanceLink, through_fields=('source', 'target'))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
@@ -98,17 +147,19 @@ class Instance(HasPolicyEditsMixin, BaseModel):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def consumed_capacity(self):
|
def consumed_capacity(self):
|
||||||
return sum(x.task_impact for x in UnifiedJob.objects.filter(execution_node=self.hostname, status__in=('running', 'waiting')))
|
capacity_consumed = 0
|
||||||
|
if self.node_type in ('hybrid', 'execution'):
|
||||||
|
capacity_consumed += sum(x.task_impact for x in UnifiedJob.objects.filter(execution_node=self.hostname, status__in=('running', 'waiting')))
|
||||||
|
if self.node_type in ('hybrid', 'control'):
|
||||||
|
capacity_consumed += sum(
|
||||||
|
settings.AWX_CONTROL_NODE_TASK_IMPACT for x in UnifiedJob.objects.filter(controller_node=self.hostname, status__in=('running', 'waiting'))
|
||||||
|
)
|
||||||
|
return capacity_consumed
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def remaining_capacity(self):
|
def remaining_capacity(self):
|
||||||
return self.capacity - self.consumed_capacity
|
return self.capacity - self.consumed_capacity
|
||||||
|
|
||||||
@property
|
|
||||||
def role(self):
|
|
||||||
# NOTE: TODO: Likely to repurpose this once standalone ramparts are a thing
|
|
||||||
return "awx"
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def jobs_running(self):
|
def jobs_running(self):
|
||||||
return UnifiedJob.objects.filter(
|
return UnifiedJob.objects.filter(
|
||||||
@@ -123,33 +174,121 @@ class Instance(HasPolicyEditsMixin, BaseModel):
|
|||||||
def jobs_total(self):
|
def jobs_total(self):
|
||||||
return UnifiedJob.objects.filter(execution_node=self.hostname).count()
|
return UnifiedJob.objects.filter(execution_node=self.hostname).count()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def choose_online_control_plane_node():
|
||||||
|
return random.choice(
|
||||||
|
Instance.objects.filter(enabled=True, capacity__gt=0).filter(node_type__in=['control', 'hybrid']).values_list('hostname', flat=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_cleanup_task_kwargs(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Produce options to use for the command: ansible-runner worker cleanup
|
||||||
|
returns a dict that is passed to the python interface for the runner method corresponding to that command
|
||||||
|
any kwargs will override that key=value combination in the returned dict
|
||||||
|
"""
|
||||||
|
vargs = dict()
|
||||||
|
if settings.AWX_CLEANUP_PATHS:
|
||||||
|
vargs['file_pattern'] = '/tmp/{}*'.format(JOB_FOLDER_PREFIX % '*')
|
||||||
|
vargs.update(kwargs)
|
||||||
|
if 'exclude_strings' not in vargs and vargs.get('file_pattern'):
|
||||||
|
active_pks = list(UnifiedJob.objects.filter(execution_node=self.hostname, status__in=('running', 'waiting')).values_list('pk', flat=True))
|
||||||
|
if active_pks:
|
||||||
|
vargs['exclude_strings'] = [JOB_FOLDER_PREFIX % job_id for job_id in active_pks]
|
||||||
|
if 'remove_images' in vargs or 'image_prune' in vargs:
|
||||||
|
vargs.setdefault('process_isolation_executable', 'podman')
|
||||||
|
return vargs
|
||||||
|
|
||||||
def is_lost(self, ref_time=None):
|
def is_lost(self, ref_time=None):
|
||||||
|
if self.last_seen is None:
|
||||||
|
return True
|
||||||
if ref_time is None:
|
if ref_time is None:
|
||||||
ref_time = now()
|
ref_time = now()
|
||||||
grace_period = 120
|
grace_period = settings.CLUSTER_NODE_HEARTBEAT_PERIOD * 2
|
||||||
return self.modified < ref_time - timedelta(seconds=grace_period)
|
if self.node_type in ('execution', 'hop'):
|
||||||
|
grace_period += settings.RECEPTOR_SERVICE_ADVERTISEMENT_PERIOD
|
||||||
|
return self.last_seen < ref_time - timedelta(seconds=grace_period)
|
||||||
|
|
||||||
def refresh_capacity(self):
|
def mark_offline(self, update_last_seen=False, perform_save=True, errors=''):
|
||||||
cpu = get_cpu_capacity()
|
if self.cpu_capacity == 0 and self.mem_capacity == 0 and self.capacity == 0 and self.errors == errors and (not update_last_seen):
|
||||||
mem = get_mem_capacity()
|
return
|
||||||
if self.enabled:
|
self.cpu_capacity = self.mem_capacity = self.capacity = 0
|
||||||
self.capacity = get_system_task_capacity(self.capacity_adjustment)
|
self.errors = errors
|
||||||
|
if update_last_seen:
|
||||||
|
self.last_seen = now()
|
||||||
|
|
||||||
|
if perform_save:
|
||||||
|
update_fields = ['capacity', 'cpu_capacity', 'mem_capacity', 'errors']
|
||||||
|
if update_last_seen:
|
||||||
|
update_fields += ['last_seen']
|
||||||
|
self.save(update_fields=update_fields)
|
||||||
|
|
||||||
|
def set_capacity_value(self):
|
||||||
|
"""Sets capacity according to capacity adjustment rule (no save)"""
|
||||||
|
if self.enabled and self.node_type != 'hop':
|
||||||
|
lower_cap = min(self.mem_capacity, self.cpu_capacity)
|
||||||
|
higher_cap = max(self.mem_capacity, self.cpu_capacity)
|
||||||
|
self.capacity = lower_cap + (higher_cap - lower_cap) * self.capacity_adjustment
|
||||||
else:
|
else:
|
||||||
self.capacity = 0
|
self.capacity = 0
|
||||||
|
|
||||||
|
def refresh_capacity_fields(self):
|
||||||
|
"""Update derived capacity fields from cpu and memory (no save)"""
|
||||||
|
self.cpu_capacity = get_cpu_effective_capacity(self.cpu)
|
||||||
|
self.mem_capacity = get_mem_effective_capacity(self.memory)
|
||||||
|
self.set_capacity_value()
|
||||||
|
|
||||||
|
def save_health_data(self, version, cpu, memory, uuid=None, update_last_seen=False, errors=''):
|
||||||
|
self.last_health_check = now()
|
||||||
|
update_fields = ['last_health_check']
|
||||||
|
|
||||||
|
if update_last_seen:
|
||||||
|
self.last_seen = self.last_health_check
|
||||||
|
update_fields.append('last_seen')
|
||||||
|
|
||||||
|
if uuid is not None and self.uuid != uuid:
|
||||||
|
if self.uuid is not None:
|
||||||
|
logger.warn(f'Self-reported uuid of {self.hostname} changed from {self.uuid} to {uuid}')
|
||||||
|
self.uuid = uuid
|
||||||
|
update_fields.append('uuid')
|
||||||
|
|
||||||
|
if self.version != version:
|
||||||
|
self.version = version
|
||||||
|
update_fields.append('version')
|
||||||
|
|
||||||
|
new_cpu = get_corrected_cpu(cpu)
|
||||||
|
if new_cpu != self.cpu:
|
||||||
|
self.cpu = new_cpu
|
||||||
|
update_fields.append('cpu')
|
||||||
|
|
||||||
|
new_memory = get_corrected_memory(memory)
|
||||||
|
if new_memory != self.memory:
|
||||||
|
self.memory = new_memory
|
||||||
|
update_fields.append('memory')
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
self.refresh_capacity_fields()
|
||||||
|
self.errors = ''
|
||||||
|
else:
|
||||||
|
self.mark_offline(perform_save=False, errors=errors)
|
||||||
|
update_fields.extend(['cpu_capacity', 'mem_capacity', 'capacity', 'errors'])
|
||||||
|
|
||||||
|
# disabling activity stream will avoid extra queries, which is important for heatbeat actions
|
||||||
|
from awx.main.signals import disable_activity_stream
|
||||||
|
|
||||||
|
with disable_activity_stream():
|
||||||
|
self.save(update_fields=update_fields)
|
||||||
|
|
||||||
|
def local_health_check(self):
|
||||||
|
"""Only call this method on the instance that this record represents"""
|
||||||
|
errors = None
|
||||||
try:
|
try:
|
||||||
# if redis is down for some reason, that means we can't persist
|
# if redis is down for some reason, that means we can't persist
|
||||||
# playbook event data; we should consider this a zero capacity event
|
# playbook event data; we should consider this a zero capacity event
|
||||||
redis.Redis.from_url(settings.BROKER_URL).ping()
|
redis.Redis.from_url(settings.BROKER_URL).ping()
|
||||||
except redis.ConnectionError:
|
except redis.ConnectionError:
|
||||||
self.capacity = 0
|
errors = _('Failed to connect ot Redis')
|
||||||
|
|
||||||
self.cpu = cpu[0]
|
self.save_health_data(awx_application_version, get_cpu_count(), get_mem_in_bytes(), update_last_seen=True, errors=errors)
|
||||||
self.memory = mem[0]
|
|
||||||
self.cpu_capacity = cpu[1]
|
|
||||||
self.mem_capacity = mem[1]
|
|
||||||
self.version = awx_application_version
|
|
||||||
self.save(update_fields=['capacity', 'version', 'modified', 'cpu', 'memory', 'cpu_capacity', 'mem_capacity'])
|
|
||||||
|
|
||||||
|
|
||||||
class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
|
class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
|
||||||
@@ -194,7 +333,7 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def capacity(self):
|
def capacity(self):
|
||||||
return sum([inst.capacity for inst in self.instances.all()])
|
return sum(inst.capacity for inst in self.instances.all())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def jobs_running(self):
|
def jobs_running(self):
|
||||||
@@ -215,19 +354,29 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
|
|||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def fit_task_to_most_remaining_capacity_instance(task, instances):
|
def fit_task_to_most_remaining_capacity_instance(task, instances, impact=None, capacity_type=None, add_hybrid_control_cost=False):
|
||||||
|
impact = impact if impact else task.task_impact
|
||||||
|
capacity_type = capacity_type if capacity_type else task.capacity_type
|
||||||
instance_most_capacity = None
|
instance_most_capacity = None
|
||||||
|
most_remaining_capacity = -1
|
||||||
for i in instances:
|
for i in instances:
|
||||||
if i.remaining_capacity >= task.task_impact and (
|
if i.node_type not in (capacity_type, 'hybrid'):
|
||||||
instance_most_capacity is None or i.remaining_capacity > instance_most_capacity.remaining_capacity
|
continue
|
||||||
):
|
would_be_remaining = i.remaining_capacity - impact
|
||||||
|
# hybrid nodes _always_ control their own tasks
|
||||||
|
if add_hybrid_control_cost and i.node_type == 'hybrid':
|
||||||
|
would_be_remaining -= settings.AWX_CONTROL_NODE_TASK_IMPACT
|
||||||
|
if would_be_remaining >= 0 and (instance_most_capacity is None or would_be_remaining > most_remaining_capacity):
|
||||||
instance_most_capacity = i
|
instance_most_capacity = i
|
||||||
|
most_remaining_capacity = would_be_remaining
|
||||||
return instance_most_capacity
|
return instance_most_capacity
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_largest_idle_instance(instances):
|
def find_largest_idle_instance(instances, capacity_type='execution'):
|
||||||
largest_instance = None
|
largest_instance = None
|
||||||
for i in instances:
|
for i in instances:
|
||||||
|
if i.node_type not in (capacity_type, 'hybrid'):
|
||||||
|
continue
|
||||||
if i.jobs_running == 0:
|
if i.jobs_running == 0:
|
||||||
if largest_instance is None:
|
if largest_instance is None:
|
||||||
largest_instance = i
|
largest_instance = i
|
||||||
@@ -246,7 +395,7 @@ class TowerScheduleState(SingletonModel):
|
|||||||
|
|
||||||
|
|
||||||
def schedule_policy_task():
|
def schedule_policy_task():
|
||||||
from awx.main.tasks import apply_cluster_membership_policies
|
from awx.main.tasks.system import apply_cluster_membership_policies
|
||||||
|
|
||||||
connection.on_commit(lambda: apply_cluster_membership_policies.apply_async())
|
connection.on_commit(lambda: apply_cluster_membership_policies.apply_async())
|
||||||
|
|
||||||
|
|||||||
@@ -170,6 +170,12 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
|||||||
editable=False,
|
editable=False,
|
||||||
help_text=_('Flag indicating the inventory is being deleted.'),
|
help_text=_('Flag indicating the inventory is being deleted.'),
|
||||||
)
|
)
|
||||||
|
labels = models.ManyToManyField(
|
||||||
|
"Label",
|
||||||
|
blank=True,
|
||||||
|
related_name='inventory_labels',
|
||||||
|
help_text=_('Labels associated with this inventory.'),
|
||||||
|
)
|
||||||
|
|
||||||
def get_absolute_url(self, request=None):
|
def get_absolute_url(self, request=None):
|
||||||
return reverse('api:inventory_detail', kwargs={'pk': self.pk}, request=request)
|
return reverse('api:inventory_detail', kwargs={'pk': self.pk}, request=request)
|
||||||
@@ -366,7 +372,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
|||||||
|
|
||||||
@transaction.atomic
|
@transaction.atomic
|
||||||
def schedule_deletion(self, user_id=None):
|
def schedule_deletion(self, user_id=None):
|
||||||
from awx.main.tasks import delete_inventory
|
from awx.main.tasks.system import delete_inventory
|
||||||
from awx.main.signals import activity_stream_delete
|
from awx.main.signals import activity_stream_delete
|
||||||
|
|
||||||
if self.pending_deletion is True:
|
if self.pending_deletion is True:
|
||||||
@@ -382,7 +388,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
|||||||
if self.kind == 'smart' and settings.AWX_REBUILD_SMART_MEMBERSHIP:
|
if self.kind == 'smart' and settings.AWX_REBUILD_SMART_MEMBERSHIP:
|
||||||
|
|
||||||
def on_commit():
|
def on_commit():
|
||||||
from awx.main.tasks import update_host_smart_inventory_memberships
|
from awx.main.tasks.system import update_host_smart_inventory_memberships
|
||||||
|
|
||||||
update_host_smart_inventory_memberships.delay()
|
update_host_smart_inventory_memberships.delay()
|
||||||
|
|
||||||
@@ -551,7 +557,7 @@ class Host(CommonModelNameNotUnique, RelatedJobsMixin):
|
|||||||
if settings.AWX_REBUILD_SMART_MEMBERSHIP:
|
if settings.AWX_REBUILD_SMART_MEMBERSHIP:
|
||||||
|
|
||||||
def on_commit():
|
def on_commit():
|
||||||
from awx.main.tasks import update_host_smart_inventory_memberships
|
from awx.main.tasks.system import update_host_smart_inventory_memberships
|
||||||
|
|
||||||
update_host_smart_inventory_memberships.delay()
|
update_host_smart_inventory_memberships.delay()
|
||||||
|
|
||||||
@@ -631,7 +637,7 @@ class Group(CommonModelNameNotUnique, RelatedJobsMixin):
|
|||||||
@transaction.atomic
|
@transaction.atomic
|
||||||
def delete_recursive(self):
|
def delete_recursive(self):
|
||||||
from awx.main.utils import ignore_inventory_computed_fields
|
from awx.main.utils import ignore_inventory_computed_fields
|
||||||
from awx.main.tasks import update_inventory_computed_fields
|
from awx.main.tasks.system import update_inventory_computed_fields
|
||||||
from awx.main.signals import disable_activity_stream, activity_stream_delete
|
from awx.main.signals import disable_activity_stream, activity_stream_delete
|
||||||
|
|
||||||
def mark_actual():
|
def mark_actual():
|
||||||
@@ -1214,16 +1220,12 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
|
|||||||
def is_container_group_task(self):
|
def is_container_group_task(self):
|
||||||
return bool(self.instance_group and self.instance_group.is_container_group)
|
return bool(self.instance_group and self.instance_group.is_container_group)
|
||||||
|
|
||||||
@property
|
|
||||||
def can_run_containerized(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _get_parent_field_name(self):
|
def _get_parent_field_name(self):
|
||||||
return 'inventory_source'
|
return 'inventory_source'
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_task_class(cls):
|
def _get_task_class(cls):
|
||||||
from awx.main.tasks import RunInventoryUpdate
|
from awx.main.tasks.jobs import RunInventoryUpdate
|
||||||
|
|
||||||
return RunInventoryUpdate
|
return RunInventoryUpdate
|
||||||
|
|
||||||
|
|||||||
@@ -583,7 +583,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_task_class(cls):
|
def _get_task_class(cls):
|
||||||
from awx.main.tasks import RunJob
|
from awx.main.tasks.jobs import RunJob
|
||||||
|
|
||||||
return RunJob
|
return RunJob
|
||||||
|
|
||||||
@@ -743,10 +743,6 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
|||||||
return "$hidden due to Ansible no_log flag$"
|
return "$hidden due to Ansible no_log flag$"
|
||||||
return artifacts
|
return artifacts
|
||||||
|
|
||||||
@property
|
|
||||||
def can_run_containerized(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_container_group_task(self):
|
def is_container_group_task(self):
|
||||||
return bool(self.instance_group and self.instance_group.is_container_group)
|
return bool(self.instance_group and self.instance_group.is_container_group)
|
||||||
@@ -1217,7 +1213,7 @@ class SystemJob(UnifiedJob, SystemJobOptions, JobNotificationMixin):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_task_class(cls):
|
def _get_task_class(cls):
|
||||||
from awx.main.tasks import RunSystemJob
|
from awx.main.tasks.jobs import RunSystemJob
|
||||||
|
|
||||||
return RunSystemJob
|
return RunSystemJob
|
||||||
|
|
||||||
@@ -1236,10 +1232,6 @@ class SystemJob(UnifiedJob, SystemJobOptions, JobNotificationMixin):
|
|||||||
return UnpartitionedSystemJobEvent
|
return UnpartitionedSystemJobEvent
|
||||||
return SystemJobEvent
|
return SystemJobEvent
|
||||||
|
|
||||||
@property
|
|
||||||
def can_run_on_control_plane(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def task_impact(self):
|
def task_impact(self):
|
||||||
return 5
|
return 5
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ from django.utils.translation import ugettext_lazy as _
|
|||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main.models.base import CommonModelNameNotUnique
|
from awx.main.models.base import CommonModelNameNotUnique
|
||||||
from awx.main.models.unified_jobs import UnifiedJobTemplate, UnifiedJob
|
from awx.main.models.unified_jobs import UnifiedJobTemplate, UnifiedJob
|
||||||
|
from awx.main.models.inventory import Inventory
|
||||||
|
|
||||||
__all__ = ('Label',)
|
__all__ = ('Label',)
|
||||||
|
|
||||||
@@ -35,15 +36,14 @@ class Label(CommonModelNameNotUnique):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_orphaned_labels():
|
def get_orphaned_labels():
|
||||||
return Label.objects.filter(organization=None, unifiedjobtemplate_labels__isnull=True)
|
return Label.objects.filter(organization=None, unifiedjobtemplate_labels__isnull=True, inventory_labels__isnull=True)
|
||||||
|
|
||||||
def is_detached(self):
|
def is_detached(self):
|
||||||
return bool(Label.objects.filter(id=self.id, unifiedjob_labels__isnull=True, unifiedjobtemplate_labels__isnull=True).count())
|
return Label.objects.filter(id=self.id, unifiedjob_labels__isnull=True, unifiedjobtemplate_labels__isnull=True, inventory_labels__isnull=True).exists()
|
||||||
|
|
||||||
def is_candidate_for_detach(self):
|
def is_candidate_for_detach(self):
|
||||||
|
|
||||||
c1 = UnifiedJob.objects.filter(labels__in=[self.id]).count()
|
c1 = UnifiedJob.objects.filter(labels__in=[self.id]).count()
|
||||||
c2 = UnifiedJobTemplate.objects.filter(labels__in=[self.id]).count()
|
c2 = UnifiedJobTemplate.objects.filter(labels__in=[self.id]).count()
|
||||||
if (c1 + c2 - 1) == 0:
|
c3 = Inventory.objects.filter(labels__in=[self.id]).count()
|
||||||
return True
|
return (c1 + c2 + c3 - 1) == 0
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|||||||
@@ -508,7 +508,7 @@ class JobNotificationMixin(object):
|
|||||||
return (msg, body)
|
return (msg, body)
|
||||||
|
|
||||||
def send_notification_templates(self, status):
|
def send_notification_templates(self, status):
|
||||||
from awx.main.tasks import send_notifications # avoid circular import
|
from awx.main.tasks.system import send_notifications # avoid circular import
|
||||||
|
|
||||||
if status not in ['running', 'succeeded', 'failed']:
|
if status not in ['running', 'succeeded', 'failed']:
|
||||||
raise ValueError(_("status must be either running, succeeded or failed"))
|
raise ValueError(_("status must be either running, succeeded or failed"))
|
||||||
|
|||||||
@@ -118,7 +118,7 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
|
|||||||
from awx.main.models import Credential
|
from awx.main.models import Credential
|
||||||
|
|
||||||
public_galaxy_credential = Credential.objects.filter(managed=True, name='Ansible Galaxy').first()
|
public_galaxy_credential = Credential.objects.filter(managed=True, name='Ansible Galaxy').first()
|
||||||
if public_galaxy_credential not in self.galaxy_credentials.all():
|
if public_galaxy_credential is not None and public_galaxy_credential not in self.galaxy_credentials.all():
|
||||||
self.galaxy_credentials.add(public_galaxy_credential)
|
self.galaxy_credentials.add(public_galaxy_credential)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -471,7 +471,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
|||||||
r = super(Project, self).delete(*args, **kwargs)
|
r = super(Project, self).delete(*args, **kwargs)
|
||||||
for path_to_delete in paths_to_delete:
|
for path_to_delete in paths_to_delete:
|
||||||
if self.scm_type and path_to_delete: # non-manual, concrete path
|
if self.scm_type and path_to_delete: # non-manual, concrete path
|
||||||
from awx.main.tasks import delete_project_files
|
from awx.main.tasks.system import delete_project_files
|
||||||
|
|
||||||
delete_project_files.delay(path_to_delete)
|
delete_project_files.delay(path_to_delete)
|
||||||
return r
|
return r
|
||||||
@@ -532,7 +532,7 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_task_class(cls):
|
def _get_task_class(cls):
|
||||||
from awx.main.tasks import RunProjectUpdate
|
from awx.main.tasks.jobs import RunProjectUpdate
|
||||||
|
|
||||||
return RunProjectUpdate
|
return RunProjectUpdate
|
||||||
|
|
||||||
@@ -553,10 +553,6 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage
|
|||||||
websocket_data.update(dict(project_id=self.project.id))
|
websocket_data.update(dict(project_id=self.project.id))
|
||||||
return websocket_data
|
return websocket_data
|
||||||
|
|
||||||
@property
|
|
||||||
def can_run_on_control_plane(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def event_class(self):
|
def event_class(self):
|
||||||
if self.has_unpartitioned_events:
|
if self.has_unpartitioned_events:
|
||||||
@@ -617,20 +613,6 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage
|
|||||||
def get_notification_friendly_name(self):
|
def get_notification_friendly_name(self):
|
||||||
return "Project Update"
|
return "Project Update"
|
||||||
|
|
||||||
@property
|
|
||||||
def preferred_instance_groups(self):
|
|
||||||
if self.organization is not None:
|
|
||||||
organization_groups = [x for x in self.organization.instance_groups.all()]
|
|
||||||
else:
|
|
||||||
organization_groups = []
|
|
||||||
template_groups = [x for x in super(ProjectUpdate, self).preferred_instance_groups]
|
|
||||||
selected_groups = template_groups + organization_groups
|
|
||||||
if not any([not group.is_container_group for group in selected_groups]):
|
|
||||||
selected_groups = selected_groups + list(self.control_plane_instance_group)
|
|
||||||
if not selected_groups:
|
|
||||||
return self.global_instance_groups
|
|
||||||
return selected_groups
|
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
added_update_fields = []
|
added_update_fields = []
|
||||||
if not self.job_tags:
|
if not self.job_tags:
|
||||||
|
|||||||
@@ -36,21 +36,21 @@ from awx.main.dispatch import get_local_queuename
|
|||||||
from awx.main.dispatch.control import Control as ControlDispatcher
|
from awx.main.dispatch.control import Control as ControlDispatcher
|
||||||
from awx.main.registrar import activity_stream_registrar
|
from awx.main.registrar import activity_stream_registrar
|
||||||
from awx.main.models.mixins import ResourceMixin, TaskManagerUnifiedJobMixin, ExecutionEnvironmentMixin
|
from awx.main.models.mixins import ResourceMixin, TaskManagerUnifiedJobMixin, ExecutionEnvironmentMixin
|
||||||
from awx.main.utils import (
|
from awx.main.utils.common import (
|
||||||
camelcase_to_underscore,
|
camelcase_to_underscore,
|
||||||
get_model_for_type,
|
get_model_for_type,
|
||||||
encrypt_dict,
|
|
||||||
decrypt_field,
|
|
||||||
_inventory_updates,
|
_inventory_updates,
|
||||||
copy_model_by_class,
|
copy_model_by_class,
|
||||||
copy_m2m_relationships,
|
copy_m2m_relationships,
|
||||||
get_type_for_model,
|
get_type_for_model,
|
||||||
parse_yaml_or_json,
|
parse_yaml_or_json,
|
||||||
getattr_dne,
|
getattr_dne,
|
||||||
polymorphic,
|
|
||||||
schedule_task_manager,
|
schedule_task_manager,
|
||||||
get_event_partition_epoch,
|
get_event_partition_epoch,
|
||||||
|
get_capacity_type,
|
||||||
)
|
)
|
||||||
|
from awx.main.utils.encryption import encrypt_dict, decrypt_field
|
||||||
|
from awx.main.utils import polymorphic
|
||||||
from awx.main.constants import ACTIVE_STATES, CAN_CANCEL
|
from awx.main.constants import ACTIVE_STATES, CAN_CANCEL
|
||||||
from awx.main.redact import UriCleaner, REPLACE_STR
|
from awx.main.redact import UriCleaner, REPLACE_STR
|
||||||
from awx.main.consumers import emit_channel_notification
|
from awx.main.consumers import emit_channel_notification
|
||||||
@@ -740,15 +740,8 @@ class UnifiedJob(
|
|||||||
raise NotImplementedError # Implement in subclasses.
|
raise NotImplementedError # Implement in subclasses.
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def can_run_on_control_plane(self):
|
def capacity_type(self):
|
||||||
if settings.IS_K8S:
|
return get_capacity_type(self)
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
@property
|
|
||||||
def can_run_containerized(self):
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _get_parent_field_name(self):
|
def _get_parent_field_name(self):
|
||||||
return 'unified_job_template' # Override in subclasses.
|
return 'unified_job_template' # Override in subclasses.
|
||||||
@@ -1053,7 +1046,7 @@ class UnifiedJob(
|
|||||||
fd = tempfile.NamedTemporaryFile(
|
fd = tempfile.NamedTemporaryFile(
|
||||||
mode='w', prefix='{}-{}-'.format(self.model_to_str(), self.pk), suffix='.out', dir=settings.JOBOUTPUT_ROOT, encoding='utf-8'
|
mode='w', prefix='{}-{}-'.format(self.model_to_str(), self.pk), suffix='.out', dir=settings.JOBOUTPUT_ROOT, encoding='utf-8'
|
||||||
)
|
)
|
||||||
from awx.main.tasks import purge_old_stdout_files # circular import
|
from awx.main.tasks.system import purge_old_stdout_files # circular import
|
||||||
|
|
||||||
purge_old_stdout_files.apply_async()
|
purge_old_stdout_files.apply_async()
|
||||||
|
|
||||||
@@ -1442,9 +1435,13 @@ class UnifiedJob(
|
|||||||
if not settings.IS_K8S:
|
if not settings.IS_K8S:
|
||||||
default_instance_group_names.append(settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME)
|
default_instance_group_names.append(settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME)
|
||||||
|
|
||||||
default_instance_groups = InstanceGroup.objects.filter(name__in=default_instance_group_names)
|
default_instance_groups = list(InstanceGroup.objects.filter(name__in=default_instance_group_names))
|
||||||
|
|
||||||
return list(default_instance_groups)
|
# assure deterministic precedence by making sure the default group is first
|
||||||
|
if (not settings.IS_K8S) and default_instance_groups and default_instance_groups[0].name != settings.DEFAULT_EXECUTION_QUEUE_NAME:
|
||||||
|
default_instance_groups.reverse()
|
||||||
|
|
||||||
|
return default_instance_groups
|
||||||
|
|
||||||
def awx_meta_vars(self):
|
def awx_meta_vars(self):
|
||||||
"""
|
"""
|
||||||
@@ -1500,7 +1497,12 @@ class UnifiedJob(
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def log_lifecycle(self, state, blocked_by=None):
|
def log_lifecycle(self, state, blocked_by=None):
|
||||||
extra = {'type': self._meta.model_name, 'task_id': self.id, 'state': state}
|
extra = {
|
||||||
|
'type': self._meta.model_name,
|
||||||
|
'task_id': self.id,
|
||||||
|
'state': state,
|
||||||
|
'work_unit_id': self.work_unit_id,
|
||||||
|
}
|
||||||
if self.unified_job_template:
|
if self.unified_job_template:
|
||||||
extra["template_name"] = self.unified_job_template.name
|
extra["template_name"] = self.unified_job_template.name
|
||||||
if state == "blocked" and blocked_by:
|
if state == "blocked" and blocked_by:
|
||||||
@@ -1509,6 +1511,11 @@ class UnifiedJob(
|
|||||||
extra["blocked_by"] = blocked_by_msg
|
extra["blocked_by"] = blocked_by_msg
|
||||||
else:
|
else:
|
||||||
msg = f"{self._meta.model_name}-{self.id} {state.replace('_', ' ')}"
|
msg = f"{self._meta.model_name}-{self.id} {state.replace('_', ' ')}"
|
||||||
|
|
||||||
|
if state == "controller_node_chosen":
|
||||||
|
extra["controller_node"] = self.controller_node or "NOT_SET"
|
||||||
|
elif state == "execution_node_chosen":
|
||||||
|
extra["execution_node"] = self.execution_node or "NOT_SET"
|
||||||
logger_job_lifecycle.debug(msg, extra=extra)
|
logger_job_lifecycle.debug(msg, extra=extra)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -813,7 +813,7 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def send_approval_notification(self, approval_status):
|
def send_approval_notification(self, approval_status):
|
||||||
from awx.main.tasks import send_notifications # avoid circular import
|
from awx.main.tasks.system import send_notifications # avoid circular import
|
||||||
|
|
||||||
if self.workflow_job_template is None:
|
if self.workflow_job_template is None:
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ from django.utils.encoding import smart_text
|
|||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||||
|
from awx.main.utils import get_awx_http_client_headers
|
||||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.notifications.rocketchat_backend')
|
logger = logging.getLogger('awx.main.notifications.rocketchat_backend')
|
||||||
@@ -38,7 +39,9 @@ class RocketChatBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
|||||||
if optvalue is not None:
|
if optvalue is not None:
|
||||||
payload[optval] = optvalue.strip()
|
payload[optval] = optvalue.strip()
|
||||||
|
|
||||||
r = requests.post("{}".format(m.recipients()[0]), data=json.dumps(payload), verify=(not self.rocketchat_no_verify_ssl))
|
r = requests.post(
|
||||||
|
"{}".format(m.recipients()[0]), data=json.dumps(payload), headers=get_awx_http_client_headers(), verify=(not self.rocketchat_no_verify_ssl)
|
||||||
|
)
|
||||||
|
|
||||||
if r.status_code >= 400:
|
if r.status_code >= 400:
|
||||||
logger.error(smart_text(_("Error sending notification rocket.chat: {}").format(r.status_code)))
|
logger.error(smart_text(_("Error sending notification rocket.chat: {}").format(r.status_code)))
|
||||||
|
|||||||
@@ -2,7 +2,8 @@
|
|||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from slackclient import SlackClient
|
from slack_sdk import WebClient
|
||||||
|
from slack_sdk.errors import SlackApiError
|
||||||
|
|
||||||
from django.utils.encoding import smart_text
|
from django.utils.encoding import smart_text
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
@@ -28,23 +29,30 @@ class SlackBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
|||||||
self.color = hex_color
|
self.color = hex_color
|
||||||
|
|
||||||
def send_messages(self, messages):
|
def send_messages(self, messages):
|
||||||
connection = SlackClient(self.token)
|
client = WebClient(self.token)
|
||||||
sent_messages = 0
|
sent_messages = 0
|
||||||
for m in messages:
|
for m in messages:
|
||||||
try:
|
try:
|
||||||
for r in m.recipients():
|
for r in m.recipients():
|
||||||
if r.startswith('#'):
|
if r.startswith('#'):
|
||||||
r = r[1:]
|
r = r[1:]
|
||||||
|
thread = None
|
||||||
|
channel = r
|
||||||
|
thread = None
|
||||||
|
if ',' in r:
|
||||||
|
channel, thread = r.split(',')
|
||||||
if self.color:
|
if self.color:
|
||||||
ret = connection.api_call("chat.postMessage", channel=r, as_user=True, attachments=[{"color": self.color, "text": m.subject}])
|
response = client.chat_postMessage(
|
||||||
|
channel=channel, thread_ts=thread, as_user=True, attachments=[{"color": self.color, "text": m.subject}]
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
ret = connection.api_call("chat.postMessage", channel=r, as_user=True, text=m.subject)
|
response = client.chat_postMessage(channel=channel, thread_ts=thread, as_user=True, text=m.subject)
|
||||||
logger.debug(ret)
|
logger.debug(response)
|
||||||
if ret['ok']:
|
if response['ok']:
|
||||||
sent_messages += 1
|
sent_messages += 1
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Slack Notification unable to send {}: {} ({})".format(r, m.subject, ret['error']))
|
raise RuntimeError("Slack Notification unable to send {}: {} ({})".format(r, m.subject, response['error']))
|
||||||
except Exception as e:
|
except SlackApiError as e:
|
||||||
logger.error(smart_text(_("Exception sending messages: {}").format(e)))
|
logger.error(smart_text(_("Exception sending messages: {}").format(e)))
|
||||||
if not self.fail_silently:
|
if not self.fail_silently:
|
||||||
raise
|
raise
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
|||||||
"{}".format(m.recipients()[0]),
|
"{}".format(m.recipients()[0]),
|
||||||
auth=auth,
|
auth=auth,
|
||||||
data=json.dumps(m.body, ensure_ascii=False).encode('utf-8'),
|
data=json.dumps(m.body, ensure_ascii=False).encode('utf-8'),
|
||||||
headers=get_awx_http_client_headers(),
|
headers=dict(list(get_awx_http_client_headers().items()) + list((self.headers or {}).items())),
|
||||||
verify=(not self.disable_ssl_verification),
|
verify=(not self.disable_ssl_verification),
|
||||||
)
|
)
|
||||||
if r.status_code >= 400:
|
if r.status_code >= 400:
|
||||||
|
|||||||
@@ -9,29 +9,12 @@ from kubernetes import client, config
|
|||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
from awx.main.utils.common import parse_yaml_or_json
|
from awx.main.utils.common import parse_yaml_or_json, deepmerge
|
||||||
from awx.main.utils.execution_environments import get_default_pod_spec
|
from awx.main.utils.execution_environments import get_default_pod_spec
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.scheduler')
|
logger = logging.getLogger('awx.main.scheduler')
|
||||||
|
|
||||||
|
|
||||||
def deepmerge(a, b):
|
|
||||||
"""
|
|
||||||
Merge dict structures and return the result.
|
|
||||||
|
|
||||||
>>> a = {'first': {'all_rows': {'pass': 'dog', 'number': '1'}}}
|
|
||||||
>>> b = {'first': {'all_rows': {'fail': 'cat', 'number': '5'}}}
|
|
||||||
>>> import pprint; pprint.pprint(deepmerge(a, b))
|
|
||||||
{'first': {'all_rows': {'fail': 'cat', 'number': '5', 'pass': 'dog'}}}
|
|
||||||
"""
|
|
||||||
if isinstance(a, dict) and isinstance(b, dict):
|
|
||||||
return dict([(k, deepmerge(a.get(k), b.get(k))) for k in set(a.keys()).union(b.keys())])
|
|
||||||
elif b is None:
|
|
||||||
return a
|
|
||||||
else:
|
|
||||||
return b
|
|
||||||
|
|
||||||
|
|
||||||
class PodManager(object):
|
class PodManager(object):
|
||||||
def __init__(self, task=None):
|
def __init__(self, task=None):
|
||||||
self.task = task
|
self.task = task
|
||||||
@@ -183,7 +166,7 @@ class PodManager(object):
|
|||||||
pod_spec_override = {}
|
pod_spec_override = {}
|
||||||
if self.task and self.task.instance_group.pod_spec_override:
|
if self.task and self.task.instance_group.pod_spec_override:
|
||||||
pod_spec_override = parse_yaml_or_json(self.task.instance_group.pod_spec_override)
|
pod_spec_override = parse_yaml_or_json(self.task.instance_group.pod_spec_override)
|
||||||
pod_spec = {**default_pod_spec, **pod_spec_override}
|
pod_spec = deepmerge(default_pod_spec, pod_spec_override)
|
||||||
|
|
||||||
if self.task:
|
if self.task:
|
||||||
pod_spec['metadata'] = deepmerge(
|
pod_spec['metadata'] = deepmerge(
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user