mirror of
https://github.com/ansible/awx.git
synced 2026-02-11 14:44:44 -03:30
Compare commits
1104 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0736f4d166 | ||
|
|
fed94b531d | ||
|
|
43a77e8667 | ||
|
|
637dc3844d | ||
|
|
815a45cf2f | ||
|
|
0b66b61dd6 | ||
|
|
7c011a1796 | ||
|
|
bf8859f401 | ||
|
|
c14d5ec59e | ||
|
|
6d850e031a | ||
|
|
38af9e2d42 | ||
|
|
a3d7901d5f | ||
|
|
54b3e2f285 | ||
|
|
003bf29dce | ||
|
|
adaa24a562 | ||
|
|
7eefa897b3 | ||
|
|
4c7c89b410 | ||
|
|
caafa55c35 | ||
|
|
7776d426ac | ||
|
|
2d87ccface | ||
|
|
b9131b9e8b | ||
|
|
7c9626b0e7 | ||
|
|
1338aef2bd | ||
|
|
b9ecf389c2 | ||
|
|
75a873079d | ||
|
|
4824153cd9 | ||
|
|
5b28e7b397 | ||
|
|
f3f781917a | ||
|
|
4398c7c777 | ||
|
|
b6179c6073 | ||
|
|
dd4943310d | ||
|
|
7df6f8d88c | ||
|
|
c026790f55 | ||
|
|
87105a654c | ||
|
|
32651db4e9 | ||
|
|
270f6c4abd | ||
|
|
3664cc3369 | ||
|
|
2204e03123 | ||
|
|
7b6befa3d2 | ||
|
|
84bc91defd | ||
|
|
2dca92c788 | ||
|
|
76dc22cd06 | ||
|
|
6d4b4cac37 | ||
|
|
3fc63489f1 | ||
|
|
e8cd8c249c | ||
|
|
471f47cd9e | ||
|
|
e5dbb592fa | ||
|
|
44466a3e76 | ||
|
|
d6ef84e9e2 | ||
|
|
c4d8485c81 | ||
|
|
dbb1a0c733 | ||
|
|
b5dee61e57 | ||
|
|
2c7d9320e2 | ||
|
|
fd3a82d430 | ||
|
|
3a776ccbff | ||
|
|
f96ed11a87 | ||
|
|
86f8ced486 | ||
|
|
940f055412 | ||
|
|
d7f1f0c7e6 | ||
|
|
045785c36f | ||
|
|
45600d034d | ||
|
|
33c7f0b5fc | ||
|
|
62e9e7ea80 | ||
|
|
a75c10f447 | ||
|
|
ee4b47595a | ||
|
|
9be8fba63d | ||
|
|
15f41a0f16 | ||
|
|
f06eb5e2f1 | ||
|
|
a9f4011a45 | ||
|
|
55f2125a51 | ||
|
|
b41f90e7d4 | ||
|
|
7c707ede2b | ||
|
|
4df9f9eca0 | ||
|
|
6af27fffbc | ||
|
|
a7ed9c5ff6 | ||
|
|
51b45c4fac | ||
|
|
313de35e60 | ||
|
|
0ac3a377fd | ||
|
|
1319fadc60 | ||
|
|
181bda51ce | ||
|
|
e914c23c42 | ||
|
|
c1587b25b8 | ||
|
|
48eb06f320 | ||
|
|
65ba87e71f | ||
|
|
f92924d57e | ||
|
|
eeb0feabc0 | ||
|
|
ac8b49b39d | ||
|
|
1b50db26b6 | ||
|
|
1f34d4c134 | ||
|
|
f864335463 | ||
|
|
47970d3455 | ||
|
|
6cdaacdda3 | ||
|
|
9b66bda8b9 | ||
|
|
ef354ca1e6 | ||
|
|
515c3450c2 | ||
|
|
5607c350cd | ||
|
|
b9758f5c1a | ||
|
|
aad432aaa3 | ||
|
|
d4971eb7b7 | ||
|
|
7860eb529f | ||
|
|
49c2a38437 | ||
|
|
d4bf238173 | ||
|
|
c085397bcb | ||
|
|
58fab2530f | ||
|
|
287b32870e | ||
|
|
46ac9506e6 | ||
|
|
19ccfcff9a | ||
|
|
f8a08c8a5e | ||
|
|
6f7fe8f9f9 | ||
|
|
86b41a4887 | ||
|
|
3786693078 | ||
|
|
6a17e5b65b | ||
|
|
169c0f6642 | ||
|
|
054569da70 | ||
|
|
4a6ab622df | ||
|
|
07cc75f6d4 | ||
|
|
7fc8775654 | ||
|
|
41a6473782 | ||
|
|
f39834ad82 | ||
|
|
bdb13343bb | ||
|
|
262cd3c695 | ||
|
|
f02099e8b7 | ||
|
|
7bf3ee69ef | ||
|
|
41e837d1e2 | ||
|
|
2090e46ac2 | ||
|
|
f09ee33e6c | ||
|
|
22782f8c5f | ||
|
|
e61e7df54e | ||
|
|
baf37e94eb | ||
|
|
bba2a264ea | ||
|
|
324ca7fe72 | ||
|
|
fb5394e31c | ||
|
|
53baea4c6c | ||
|
|
35a51b393a | ||
|
|
9ee9de76b5 | ||
|
|
ae15dcaf0b | ||
|
|
eb0528c157 | ||
|
|
764089e493 | ||
|
|
77e704cef1 | ||
|
|
59ce1bba16 | ||
|
|
1d3a36d821 | ||
|
|
dc0d74ca2c | ||
|
|
ef36d7c87f | ||
|
|
81fe39f060 | ||
|
|
5a6e9a06e2 | ||
|
|
9083425c24 | ||
|
|
010f5031a7 | ||
|
|
40e5b70495 | ||
|
|
9588ff3b4f | ||
|
|
30cf483357 | ||
|
|
4d1fa4d262 | ||
|
|
ac40449d6e | ||
|
|
dc4b014d12 | ||
|
|
d129928e42 | ||
|
|
573b2bc44f | ||
|
|
c095f0fc19 | ||
|
|
ae06e9cb14 | ||
|
|
73af95f55e | ||
|
|
64d9a7983b | ||
|
|
f6d14564a2 | ||
|
|
6c266b47e6 | ||
|
|
a2b984a1a5 | ||
|
|
0a7945a911 | ||
|
|
9c3e78443b | ||
|
|
68f79a1f3a | ||
|
|
b00e5876d4 | ||
|
|
7481d20261 | ||
|
|
637d6173bc | ||
|
|
e23e634974 | ||
|
|
1c65fbaae3 | ||
|
|
dc0cc0f910 | ||
|
|
424dbe8208 | ||
|
|
db34423af8 | ||
|
|
ca76f4db0c | ||
|
|
711e5e09ba | ||
|
|
6001bd5446 | ||
|
|
02f60467d7 | ||
|
|
cdce745c55 | ||
|
|
467a37f8fe | ||
|
|
88a6412b54 | ||
|
|
502eaf9fb9 | ||
|
|
de8eab0434 | ||
|
|
f317fca9e4 | ||
|
|
561fc289fb | ||
|
|
77933e97c0 | ||
|
|
ee4792dbf8 | ||
|
|
cde0df937f | ||
|
|
daf4310176 | ||
|
|
fb0e55fd1b | ||
|
|
2e5ef22585 | ||
|
|
8e043b139a | ||
|
|
e7dbe90cb5 | ||
|
|
42484cf98d | ||
|
|
274e487a96 | ||
|
|
940c189c12 | ||
|
|
c3ad479fc6 | ||
|
|
928c35ede5 | ||
|
|
1a9fcdccc2 | ||
|
|
3b1e40d227 | ||
|
|
4e84c7c4c4 | ||
|
|
f47eb126e2 | ||
|
|
5d4ab13386 | ||
|
|
b53d3bc81d | ||
|
|
46ccc58749 | ||
|
|
289beb85d2 | ||
|
|
460c7c3379 | ||
|
|
9881bb72b8 | ||
|
|
264c560a8a | ||
|
|
2fc581c249 | ||
|
|
a79d7444e5 | ||
|
|
f8d074db01 | ||
|
|
c3843004aa | ||
|
|
f597205fa7 | ||
|
|
e7be86867d | ||
|
|
13300bdbd4 | ||
|
|
b09da48835 | ||
|
|
39e23db523 | ||
|
|
b10a8b0fa9 | ||
|
|
05cb876df5 | ||
|
|
4a271d6897 | ||
|
|
41342883d4 | ||
|
|
cc7488bc15 | ||
|
|
367e0a5e87 | ||
|
|
4a2917b6a0 | ||
|
|
c6a63d01db | ||
|
|
0694cb9a7d | ||
|
|
da2bf4c510 | ||
|
|
48a044cc68 | ||
|
|
b7c0f02cb1 | ||
|
|
a76194c493 | ||
|
|
86390152bc | ||
|
|
28ad404baa | ||
|
|
1ff8ebab94 | ||
|
|
c616678beb | ||
|
|
500d407099 | ||
|
|
b99129c6b2 | ||
|
|
60f1919791 | ||
|
|
262a2b70e2 | ||
|
|
977164b920 | ||
|
|
a0df379225 | ||
|
|
b5bc9bb3f4 | ||
|
|
b5708a8cc4 | ||
|
|
c8604c73a9 | ||
|
|
949c2b92af | ||
|
|
5473e54219 | ||
|
|
aefc28a0ed | ||
|
|
f102b0ccf9 | ||
|
|
55e37f6229 | ||
|
|
ad0dc028f2 | ||
|
|
c89296e76d | ||
|
|
c58fef949d | ||
|
|
26ab6dd264 | ||
|
|
abf870e604 | ||
|
|
a83aa7c0ae | ||
|
|
82fe099060 | ||
|
|
304ec80d80 | ||
|
|
f6104dd438 | ||
|
|
7fadc00fb3 | ||
|
|
26e5830b80 | ||
|
|
efcac6d55a | ||
|
|
6a9de16cda | ||
|
|
9d648edc19 | ||
|
|
9da383fe2d | ||
|
|
86600531e2 | ||
|
|
7505ecd284 | ||
|
|
a22de5a9ee | ||
|
|
514616ad6e | ||
|
|
4f5909ad21 | ||
|
|
aa98a5b5e1 | ||
|
|
93ee0a362f | ||
|
|
56fd5c435d | ||
|
|
ebe5fff992 | ||
|
|
2a56be77b3 | ||
|
|
bf70200550 | ||
|
|
9b2d2a1856 | ||
|
|
ec729a3f15 | ||
|
|
34d0595bab | ||
|
|
b4243c6f03 | ||
|
|
0aa82c2784 | ||
|
|
80053cea83 | ||
|
|
100c1cbbce | ||
|
|
1173dca900 | ||
|
|
205935bc38 | ||
|
|
7025bc2678 | ||
|
|
6cc6442f0d | ||
|
|
071973d89e | ||
|
|
13ddc78b7d | ||
|
|
bc083089bb | ||
|
|
ec66ffb1eb | ||
|
|
3a6b228f6f | ||
|
|
bc365e2d01 | ||
|
|
e9b9dd3072 | ||
|
|
7b065ae0a0 | ||
|
|
c10d556f17 | ||
|
|
1ea0ff611a | ||
|
|
92e35978dc | ||
|
|
8d06c64495 | ||
|
|
fe8cd7188c | ||
|
|
c721fe0b37 | ||
|
|
ba1f89b9d8 | ||
|
|
d94eba7179 | ||
|
|
f2e8b90628 | ||
|
|
2f0b5fc20a | ||
|
|
53817d3cbe | ||
|
|
c23f7f5bdc | ||
|
|
4277149a3f | ||
|
|
3ba00c7a72 | ||
|
|
0d2bc750e8 | ||
|
|
97a4122ceb | ||
|
|
8b165b333e | ||
|
|
ea71fef2bd | ||
|
|
9409dc0085 | ||
|
|
dae3f1a164 | ||
|
|
0213fb5412 | ||
|
|
a4f263bc92 | ||
|
|
231cccbb19 | ||
|
|
791d24bcb6 | ||
|
|
729723205f | ||
|
|
2474f60e00 | ||
|
|
dbb5715fea | ||
|
|
86ebce6d3d | ||
|
|
e84a629ada | ||
|
|
1907859827 | ||
|
|
cf4a68c9b3 | ||
|
|
dea2ce6fde | ||
|
|
f85b2b6352 | ||
|
|
e88e81928c | ||
|
|
d89719c740 | ||
|
|
eef80c8875 | ||
|
|
34bd0588b4 | ||
|
|
bf8d70e657 | ||
|
|
b349774f92 | ||
|
|
452848ff27 | ||
|
|
5d345c22b4 | ||
|
|
33502daf45 | ||
|
|
07a4683f08 | ||
|
|
e207b424b1 | ||
|
|
50279478c8 | ||
|
|
7b6fa1815a | ||
|
|
b57966677e | ||
|
|
dc79d76444 | ||
|
|
3c1de8d683 | ||
|
|
b077f186d1 | ||
|
|
63075976c2 | ||
|
|
5f7db084d3 | ||
|
|
1e30e33d30 | ||
|
|
874b497794 | ||
|
|
8374533c5f | ||
|
|
034c665c83 | ||
|
|
d7521efc91 | ||
|
|
4db6eaf1aa | ||
|
|
fd6ce66906 | ||
|
|
f11b73da12 | ||
|
|
cfa5d1b11d | ||
|
|
706f3f97ea | ||
|
|
34fdf11217 | ||
|
|
3dec379052 | ||
|
|
56fb5479e2 | ||
|
|
0a64f3274e | ||
|
|
57fa2c03f7 | ||
|
|
3ef8008f91 | ||
|
|
70f69b6c8b | ||
|
|
c50c0d3f1e | ||
|
|
6740785054 | ||
|
|
777d37c4b8 | ||
|
|
41321d8ad5 | ||
|
|
e9b7f9ac40 | ||
|
|
f3b6291918 | ||
|
|
3c6e7b0983 | ||
|
|
09479be4ba | ||
|
|
18a51d1dd0 | ||
|
|
7882cb9008 | ||
|
|
05c7d3a60e | ||
|
|
c09050d1f2 | ||
|
|
dd9c6270ba | ||
|
|
18d9bfa06e | ||
|
|
ef82c1ce01 | ||
|
|
6a7aa77033 | ||
|
|
f683c7159d | ||
|
|
cd33db037d | ||
|
|
8609a637f9 | ||
|
|
99ffd3898c | ||
|
|
c36821d6e1 | ||
|
|
b3604ed94a | ||
|
|
0e30c6639a | ||
|
|
f6037f9df5 | ||
|
|
0aca4d658a | ||
|
|
ca14cbefaf | ||
|
|
041e22f609 | ||
|
|
a21dcec85d | ||
|
|
55a9a4ca46 | ||
|
|
e32471adbd | ||
|
|
77f2729a76 | ||
|
|
5fb1b604bf | ||
|
|
29753c6b9b | ||
|
|
1aacd94cb5 | ||
|
|
7bfc1702ab | ||
|
|
d68c118fd4 | ||
|
|
27543aed1d | ||
|
|
edd3554c36 | ||
|
|
2025426a1b | ||
|
|
77ab35e7a8 | ||
|
|
9dc84d69d5 | ||
|
|
fa7867e8a7 | ||
|
|
e6ed7f95f1 | ||
|
|
28de50f14e | ||
|
|
7ec56a56e5 | ||
|
|
b0b295ba1e | ||
|
|
0cc6bf9b60 | ||
|
|
14b767abc3 | ||
|
|
f06c6d349c | ||
|
|
d6621470b3 | ||
|
|
68ab6e4853 | ||
|
|
3980864151 | ||
|
|
0c8f4fc9e7 | ||
|
|
4f428af515 | ||
|
|
5d20acaa92 | ||
|
|
05e5cd6bf1 | ||
|
|
53251434f2 | ||
|
|
9704c57d6b | ||
|
|
9b7ef9ba21 | ||
|
|
e7f1d1d0c1 | ||
|
|
c9259ac45b | ||
|
|
7e46499e18 | ||
|
|
c1b2428b5d | ||
|
|
c073583663 | ||
|
|
66fc92a97f | ||
|
|
10c8480247 | ||
|
|
cfcaa4271c | ||
|
|
872513617e | ||
|
|
9237ca4809 | ||
|
|
719c9e824b | ||
|
|
9d01334a86 | ||
|
|
4c62bf268d | ||
|
|
3e79fa2dcb | ||
|
|
6715b88633 | ||
|
|
556f8aff17 | ||
|
|
c8322ee2f1 | ||
|
|
d2017feb55 | ||
|
|
aa934b1dda | ||
|
|
dd510ab90c | ||
|
|
3353b3f3b7 | ||
|
|
e22612fc3e | ||
|
|
baca30ef83 | ||
|
|
2622a1e764 | ||
|
|
3def23883e | ||
|
|
e77d297a28 | ||
|
|
8183179850 | ||
|
|
52777681d1 | ||
|
|
71a3a816e2 | ||
|
|
d389362ca3 | ||
|
|
87890234f8 | ||
|
|
fb91c8fba1 | ||
|
|
3f44c5d18b | ||
|
|
cf269fb337 | ||
|
|
fd99b366c2 | ||
|
|
001f66980f | ||
|
|
2f6855262e | ||
|
|
954be5dd32 | ||
|
|
6d71997b51 | ||
|
|
021e98b14a | ||
|
|
c92fffaecc | ||
|
|
595cf192b7 | ||
|
|
0ee7d22e9d | ||
|
|
dc1b312672 | ||
|
|
9820c8cd81 | ||
|
|
d346dbb8ba | ||
|
|
81eb3be8d4 | ||
|
|
d01cd5517d | ||
|
|
faf295d7f2 | ||
|
|
9bb7d918eb | ||
|
|
efed55d0c0 | ||
|
|
5f916e6237 | ||
|
|
d3143d9b1d | ||
|
|
207eaaf9b4 | ||
|
|
f16626c808 | ||
|
|
6533a255a7 | ||
|
|
f8ff9ffe62 | ||
|
|
7cc3ac1a11 | ||
|
|
9ed1f3bc0f | ||
|
|
4b81df2ab4 | ||
|
|
8d7bd5fb0f | ||
|
|
62a1eddd1a | ||
|
|
6e88e094ee | ||
|
|
b63313a08b | ||
|
|
b431067aa8 | ||
|
|
39522a35c6 | ||
|
|
718b3bab4a | ||
|
|
7d01cc45bc | ||
|
|
f6a71e770d | ||
|
|
c0e9ffd65a | ||
|
|
c511597c0f | ||
|
|
0e75193e3d | ||
|
|
a307421b82 | ||
|
|
65ddc8c0bd | ||
|
|
7961bcb2cb | ||
|
|
ef4feae9bf | ||
|
|
df3a8d8718 | ||
|
|
4eca133080 | ||
|
|
b7855a3c74 | ||
|
|
397fab793d | ||
|
|
b0c511a7a2 | ||
|
|
d0d9266dd1 | ||
|
|
7d97ad021f | ||
|
|
4e97492ed6 | ||
|
|
45a6d03dcd | ||
|
|
b0fe3d7f85 | ||
|
|
b8239c1b84 | ||
|
|
994175f386 | ||
|
|
8772ca2e3a | ||
|
|
3c43ed6d2d | ||
|
|
667121d325 | ||
|
|
ed02f28cbe | ||
|
|
4ca6c1c6c5 | ||
|
|
35a5c4153c | ||
|
|
11e7387055 | ||
|
|
0f51c56980 | ||
|
|
3e6cbd5114 | ||
|
|
486d6688e1 | ||
|
|
a08ca15f13 | ||
|
|
13a94dd7ac | ||
|
|
8198f045f9 | ||
|
|
6a717a8f3c | ||
|
|
212e924f9b | ||
|
|
45740b5ed0 | ||
|
|
6b865da025 | ||
|
|
c1913b0d44 | ||
|
|
e65f11c95e | ||
|
|
09751efe95 | ||
|
|
f05827df69 | ||
|
|
70ec4b915c | ||
|
|
5964ff6c93 | ||
|
|
0185269d97 | ||
|
|
81c16f4fa7 | ||
|
|
122f282e5d | ||
|
|
9b319cf2bf | ||
|
|
cb8441ed3d | ||
|
|
08cb497689 | ||
|
|
68e309ee32 | ||
|
|
84b32a91f0 | ||
|
|
bf7663a0a1 | ||
|
|
86fcfdf69a | ||
|
|
58d64045a1 | ||
|
|
0e12c7deb4 | ||
|
|
78d6e21256 | ||
|
|
226ffafbd6 | ||
|
|
1b5fa9c799 | ||
|
|
ad2f042f97 | ||
|
|
4b41bbbf34 | ||
|
|
fb64df21c5 | ||
|
|
adb6661015 | ||
|
|
17f9b57028 | ||
|
|
e96080a512 | ||
|
|
9295496949 | ||
|
|
62fc62a3c5 | ||
|
|
267f0a7bbd | ||
|
|
6d9996cd0e | ||
|
|
0a0c635de8 | ||
|
|
869b649b66 | ||
|
|
b41e70f9aa | ||
|
|
a98ae2a87d | ||
|
|
f422677145 | ||
|
|
bc8e19b51d | ||
|
|
64631fca56 | ||
|
|
49c84e6ca9 | ||
|
|
c0f587a2bf | ||
|
|
28147b71c5 | ||
|
|
be93178e65 | ||
|
|
147e50730d | ||
|
|
f88b3806f2 | ||
|
|
24a8653fab | ||
|
|
70f9d6f015 | ||
|
|
46cd62f3f0 | ||
|
|
6f6f601ca8 | ||
|
|
9ea6696bf9 | ||
|
|
a9013c43fa | ||
|
|
48dc1dfa17 | ||
|
|
a58d571858 | ||
|
|
b329d9cbf4 | ||
|
|
5c0c788d10 | ||
|
|
772e9f0d6b | ||
|
|
f0cd6b2457 | ||
|
|
9cd5566869 | ||
|
|
747231d350 | ||
|
|
b8f5dda6da | ||
|
|
21de95862e | ||
|
|
84116349ab | ||
|
|
c09cad3e6d | ||
|
|
ce20c8e77b | ||
|
|
73bb475503 | ||
|
|
6df5c0331a | ||
|
|
dc7bd73431 | ||
|
|
017bb63023 | ||
|
|
5d4fc9613d | ||
|
|
f126a6343b | ||
|
|
40f5ff362c | ||
|
|
1ed170fff0 | ||
|
|
d5deedc822 | ||
|
|
9992bf03b0 | ||
|
|
04839a037a | ||
|
|
f541fe9904 | ||
|
|
162ea776fd | ||
|
|
f1273d5810 | ||
|
|
390e1f9a0a | ||
|
|
8dc788dbcb | ||
|
|
397908543d | ||
|
|
3be29d54ad | ||
|
|
03fb12d4c2 | ||
|
|
69388edaf9 | ||
|
|
1e750cfed9 | ||
|
|
1ba51c0357 | ||
|
|
2fa27000ab | ||
|
|
057bd6e625 | ||
|
|
d0b7d970c4 | ||
|
|
5ffffebe34 | ||
|
|
b98544264b | ||
|
|
0d7ef709bf | ||
|
|
1211faf8df | ||
|
|
dc327ceaeb | ||
|
|
0f25720634 | ||
|
|
28a62ea774 | ||
|
|
d3c5397721 | ||
|
|
f06490a5f8 | ||
|
|
9fddf7c5cf | ||
|
|
673f722b71 | ||
|
|
4de477686e | ||
|
|
4cc734ce6e | ||
|
|
f08bf4766d | ||
|
|
77b0b9a4e3 | ||
|
|
487d78cc72 | ||
|
|
f0a6567cd8 | ||
|
|
8246d4a298 | ||
|
|
cd83030668 | ||
|
|
88d492371b | ||
|
|
9316ace3f6 | ||
|
|
55b5060944 | ||
|
|
0be68fe84f | ||
|
|
5da02690c1 | ||
|
|
c417c5e219 | ||
|
|
02cccbe608 | ||
|
|
18e63ae6da | ||
|
|
bb78a0b4ec | ||
|
|
7bc7c9c4bd | ||
|
|
804bd840f1 | ||
|
|
e4a52703f9 | ||
|
|
2afa406b7f | ||
|
|
1831b2591a | ||
|
|
3cdd35f2cf | ||
|
|
1422bb2043 | ||
|
|
695787be4e | ||
|
|
e1b8f30d8f | ||
|
|
293924168f | ||
|
|
7c72be7025 | ||
|
|
4a85983eb7 | ||
|
|
82c510e51e | ||
|
|
0508a9267c | ||
|
|
11e416995a | ||
|
|
bb3fc3caa8 | ||
|
|
ab40006535 | ||
|
|
53fe08af61 | ||
|
|
a8083296e6 | ||
|
|
95e796a88a | ||
|
|
0c0028541d | ||
|
|
32cc7e976a | ||
|
|
3495f75fa0 | ||
|
|
493e6cc527 | ||
|
|
7510b243bb | ||
|
|
e06ebb1f11 | ||
|
|
cc616206b3 | ||
|
|
06b04007a0 | ||
|
|
6db4732bf3 | ||
|
|
ead7907173 | ||
|
|
68f0ae612e | ||
|
|
8c1bc97c2f | ||
|
|
b64c2d6861 | ||
|
|
645f7f6dac | ||
|
|
0a5e9da287 | ||
|
|
a60abe38a8 | ||
|
|
51abbb9464 | ||
|
|
560b4ebf71 | ||
|
|
5a7a1b8f20 | ||
|
|
f1ca272394 | ||
|
|
8697387d13 | ||
|
|
1e68519c99 | ||
|
|
0947d30682 | ||
|
|
c993c8b3b9 | ||
|
|
2ce09d0dcc | ||
|
|
25bca532c2 | ||
|
|
fb897891c9 | ||
|
|
421d8f215c | ||
|
|
3db92ca668 | ||
|
|
c6fa85036e | ||
|
|
0b4a296181 | ||
|
|
1665acd58a | ||
|
|
75a27c38c2 | ||
|
|
7b9bcd0481 | ||
|
|
15444bef70 | ||
|
|
f4bc69d5f7 | ||
|
|
29d57ea403 | ||
|
|
ea5bd45d03 | ||
|
|
165a529ae0 | ||
|
|
5975082954 | ||
|
|
c1409f3dc7 | ||
|
|
1c31a56395 | ||
|
|
0c120782d4 | ||
|
|
6ba053d4ee | ||
|
|
3ecd26b5d8 | ||
|
|
bdf753ce23 | ||
|
|
0f54516b38 | ||
|
|
a56e32b59b | ||
|
|
026d5e6bdb | ||
|
|
03e73156ea | ||
|
|
0b6208047c | ||
|
|
f14129de9b | ||
|
|
6c1ba03235 | ||
|
|
85bb4e976f | ||
|
|
d1d3711fee | ||
|
|
d671366cad | ||
|
|
f9043864ce | ||
|
|
a1ded8db3f | ||
|
|
cf6b6d831f | ||
|
|
736cd4df36 | ||
|
|
8d99f79de4 | ||
|
|
1c70773cc2 | ||
|
|
c76a7d638f | ||
|
|
7a455d08d7 | ||
|
|
2c5bcf268d | ||
|
|
32cee852f0 | ||
|
|
8c1cd9ee71 | ||
|
|
da951714d1 | ||
|
|
2309feb6bd | ||
|
|
4d2c64ebb4 | ||
|
|
04f6fe6cd2 | ||
|
|
372baa12a5 | ||
|
|
21e6e5701e | ||
|
|
276a18b339 | ||
|
|
666acb9756 | ||
|
|
0eff3e6bac | ||
|
|
9af16c18c9 | ||
|
|
f0bcfc6024 | ||
|
|
d946103961 | ||
|
|
52d6f36b7c | ||
|
|
b10eb6f4c2 | ||
|
|
21aa1fc11f | ||
|
|
d4a3143b0e | ||
|
|
4478052b71 | ||
|
|
b92c5076a2 | ||
|
|
a3383716ab | ||
|
|
61846e88ca | ||
|
|
93e90228a2 | ||
|
|
395af1b5e4 | ||
|
|
5d7bdb3cbc | ||
|
|
95d40f037d | ||
|
|
ef67f9c65d | ||
|
|
8a4421dc0c | ||
|
|
2900bbbbd8 | ||
|
|
a6c09daf1e | ||
|
|
92c6ddf13c | ||
|
|
c6ac2f56dc | ||
|
|
7a130a0616 | ||
|
|
188f0417d2 | ||
|
|
d38401fd18 | ||
|
|
dadf8940cc | ||
|
|
6a36f802c2 | ||
|
|
5509686f5b | ||
|
|
1ee241199e | ||
|
|
67d828cf80 | ||
|
|
86c91509b3 | ||
|
|
bfd1abd79c | ||
|
|
c169cf6d58 | ||
|
|
9679c154f3 | ||
|
|
a317b6bede | ||
|
|
f73de11acc | ||
|
|
6f16d64929 | ||
|
|
8a6656aa90 | ||
|
|
95ba6de172 | ||
|
|
63aacd4e38 | ||
|
|
b39b80b036 | ||
|
|
214c27a5cf | ||
|
|
ae83032ff3 | ||
|
|
2e0dd61bb6 | ||
|
|
25aae9abc6 | ||
|
|
8b20d770a2 | ||
|
|
6726b72fe2 | ||
|
|
e610b77f8d | ||
|
|
2490929fd5 | ||
|
|
29ec0dc82a | ||
|
|
301d7a02c2 | ||
|
|
81e6ead99c | ||
|
|
158fe23d7c | ||
|
|
b3705357ba | ||
|
|
f460f70513 | ||
|
|
b6ced2b8dc | ||
|
|
be18803250 | ||
|
|
f26d975005 | ||
|
|
a7a17a2063 | ||
|
|
9e657059f3 | ||
|
|
486bcd80f8 | ||
|
|
e4f21ec294 | ||
|
|
3cb3819be9 | ||
|
|
122b36dcc5 | ||
|
|
2d1a859719 | ||
|
|
f882ac420d | ||
|
|
373cd9c20b | ||
|
|
673afdf1b5 | ||
|
|
7a16782ebf | ||
|
|
8ede74a7f6 | ||
|
|
19da9955ce | ||
|
|
e6e1f97048 | ||
|
|
0a63c2d4a0 | ||
|
|
493109782d | ||
|
|
0195bab931 | ||
|
|
e570810bdb | ||
|
|
9f1e8a1ae2 | ||
|
|
9a0c159943 | ||
|
|
9aa56b1247 | ||
|
|
d4d21a1511 | ||
|
|
27a2c842ac | ||
|
|
87dcc49429 | ||
|
|
805ca53765 | ||
|
|
070034047c | ||
|
|
53c50947d1 | ||
|
|
108a6e11f4 | ||
|
|
a4bc306b96 | ||
|
|
9106c3f813 | ||
|
|
3340ef9c91 | ||
|
|
fc171deb79 | ||
|
|
f2dac36dd1 | ||
|
|
fb1a5c0db5 | ||
|
|
d6a06c40f1 | ||
|
|
a6383e7f79 | ||
|
|
3063073395 | ||
|
|
9eda6359f0 | ||
|
|
1deaf55ba4 | ||
|
|
44c50bbbf7 | ||
|
|
c9e7747f2d | ||
|
|
199b4b6b47 | ||
|
|
f06485feca | ||
|
|
4bd910493a | ||
|
|
cd100fd770 | ||
|
|
157adb828e | ||
|
|
b26e33ca34 | ||
|
|
27a1254883 | ||
|
|
535bbfcc39 | ||
|
|
d2d511f596 | ||
|
|
25ca8d22d6 | ||
|
|
378a0711c2 | ||
|
|
8fd9225629 | ||
|
|
ee8c1638c5 | ||
|
|
4add72b9d2 | ||
|
|
54dd24b96b | ||
|
|
7d06fc74dd | ||
|
|
1a2e56c785 | ||
|
|
a7b29f6112 | ||
|
|
39b26c8f0e | ||
|
|
1ade9b3a7d | ||
|
|
82c5803e59 | ||
|
|
9f4172ce7b | ||
|
|
ef56571772 | ||
|
|
6911a59f39 | ||
|
|
7a63785255 | ||
|
|
a695274cb6 | ||
|
|
44fed1d7c1 | ||
|
|
be48d3eefd | ||
|
|
3cc6a4cf44 | ||
|
|
ddf4fbc4ce | ||
|
|
f0e7f2dbcd | ||
|
|
579d49033a | ||
|
|
210d5084f0 | ||
|
|
53e8a9e709 | ||
|
|
15effd7ade | ||
|
|
b919befc90 | ||
|
|
faded278e3 | ||
|
|
768ac01f58 | ||
|
|
4052603238 | ||
|
|
b306c6f258 | ||
|
|
4b6b8f2bdd | ||
|
|
70420dc3e4 | ||
|
|
50ca2d47ce | ||
|
|
faa0a6cf9a | ||
|
|
01228cea02 | ||
|
|
cbb461ab71 | ||
|
|
b551608f16 | ||
|
|
7a25f22078 | ||
|
|
b43d8e2c7f | ||
|
|
2ad84b60b3 | ||
|
|
d1981fcb4a | ||
|
|
df8ce801cf | ||
|
|
50e6348bef | ||
|
|
5ab449c90f | ||
|
|
66a9ffc376 | ||
|
|
16c6e2d716 | ||
|
|
ac5b53b13c | ||
|
|
6d433cc42a | ||
|
|
7c442f3f50 | ||
|
|
d79f73ab7a | ||
|
|
b26eaa3bd2 | ||
|
|
de46fb409e | ||
|
|
8078daa733 | ||
|
|
e4931bde6c | ||
|
|
e2b0a4f7a7 | ||
|
|
5da6b02801 | ||
|
|
326184da0f | ||
|
|
dad5533816 | ||
|
|
ca07946c24 | ||
|
|
2ce276455a | ||
|
|
4db6b8c1fe | ||
|
|
03209fe2f2 | ||
|
|
7832639c25 | ||
|
|
121db42699 | ||
|
|
ddb6c775b1 | ||
|
|
7467779ea9 | ||
|
|
66f140bb70 | ||
|
|
151f9e79ed | ||
|
|
f83343592b | ||
|
|
12504c9bc3 | ||
|
|
023cc68ba2 | ||
|
|
ec8ac6f1a7 | ||
|
|
82c4f6bb88 | ||
|
|
5beb68f527 | ||
|
|
2eb1e4bbe3 | ||
|
|
d3b20e6585 | ||
|
|
310cc2fd03 | ||
|
|
1fd6ba0bfc | ||
|
|
b64f966db1 | ||
|
|
891eeb22a5 | ||
|
|
0f6e221c14 | ||
|
|
239f20ede5 | ||
|
|
ffbbcd2bf6 | ||
|
|
b648957c8e | ||
|
|
31fe500921 | ||
|
|
2131703ca0 | ||
|
|
c429563126 | ||
|
|
1a1d66d2a2 | ||
|
|
30871bd6cf | ||
|
|
321135da3d | ||
|
|
2a23b4c719 | ||
|
|
f7d2f7a5e6 | ||
|
|
e371de38ed | ||
|
|
84af610a1f | ||
|
|
ef9f9129ba | ||
|
|
7b188aafea | ||
|
|
6ce227a6b6 | ||
|
|
1c97b9a046 | ||
|
|
137111351c | ||
|
|
c5a1e4c704 | ||
|
|
4f058245e4 | ||
|
|
ecdf6cccf8 | ||
|
|
4d7edbbad0 | ||
|
|
0f9f3f58e2 | ||
|
|
34c4967d27 | ||
|
|
6123b8e148 | ||
|
|
b86d365dde | ||
|
|
4efbd45b3c | ||
|
|
fb97687d14 | ||
|
|
0f53d9b911 | ||
|
|
5a785798b0 | ||
|
|
14168297bd | ||
|
|
7e1814e234 | ||
|
|
bdf11aa962 | ||
|
|
46807205f8 | ||
|
|
d749c172eb | ||
|
|
81db8091ea | ||
|
|
5c1a33382c | ||
|
|
db6f565dca | ||
|
|
6b4effc85a | ||
|
|
74a0c5bac5 | ||
|
|
661cf0afb3 | ||
|
|
fbb74a9896 | ||
|
|
200901e53b | ||
|
|
0eddd5ce7f | ||
|
|
a7cabec3d0 | ||
|
|
b98b3ced1c | ||
|
|
8501a45531 | ||
|
|
14b610dabf | ||
|
|
a1d1e70e43 | ||
|
|
0fa0a517ac | ||
|
|
28f9c0be0b | ||
|
|
373edbf8c0 | ||
|
|
b19bcdd882 | ||
|
|
08b96a0bd7 | ||
|
|
1e45e2ab9b | ||
|
|
2a58605727 | ||
|
|
c7ab3ea86e | ||
|
|
67046513ae | ||
|
|
f9b439ae82 | ||
|
|
80b08d17e3 | ||
|
|
f642c520bd | ||
|
|
221ddeb915 | ||
|
|
d90d0fb503 | ||
|
|
2c529f50af | ||
|
|
acfa1c4d1d | ||
|
|
ea2afeec1f | ||
|
|
a5cfc3036f | ||
|
|
ec484f81cf | ||
|
|
2ffa22e38f | ||
|
|
8fb313638c | ||
|
|
0eb1984b22 | ||
|
|
f259b0a71b | ||
|
|
82df3ebddb | ||
|
|
c87d7b0d79 | ||
|
|
612e91263c | ||
|
|
445042c0f4 | ||
|
|
0c289205de | ||
|
|
ba45592d93 | ||
|
|
7e0f2b0f08 | ||
|
|
fb30528197 | ||
|
|
48f1910075 | ||
|
|
0cb2d79889 | ||
|
|
1af1a5e9da | ||
|
|
c0d38e91f5 | ||
|
|
2f737f644f | ||
|
|
0574baf7f7 | ||
|
|
f70473dc0b | ||
|
|
de0b25862b | ||
|
|
6ff15a928a | ||
|
|
d10d1963c1 | ||
|
|
c6acca08d5 | ||
|
|
9946959599 | ||
|
|
9ce171d349 | ||
|
|
12f2975809 | ||
|
|
945125454b | ||
|
|
ea67c70437 | ||
|
|
f2f2483708 | ||
|
|
8c61a49e01 | ||
|
|
88ff68295b | ||
|
|
d93f62c030 | ||
|
|
ae5b11a2a9 | ||
|
|
baade775ab | ||
|
|
bd2da80cea | ||
|
|
550ab82f63 | ||
|
|
cece7ff741 | ||
|
|
c0b812c47a | ||
|
|
b256e5b79d | ||
|
|
2ed3038a5c | ||
|
|
779ca8b260 | ||
|
|
137fedfc9b | ||
|
|
256a47618f | ||
|
|
dfaa69be51 | ||
|
|
c34fa30ea7 | ||
|
|
3e5ee9d57a | ||
|
|
c7dd3996df | ||
|
|
e342919735 | ||
|
|
d0991bab9e | ||
|
|
efcbea1fc5 | ||
|
|
1537b84ec8 | ||
|
|
30b7535ca2 | ||
|
|
fe02c0b157 | ||
|
|
177901eca6 | ||
|
|
c2c93e7a66 | ||
|
|
00e60d2698 | ||
|
|
ec1408fbd1 | ||
|
|
23c3e62211 | ||
|
|
383c2bba58 | ||
|
|
8adb53b5a8 | ||
|
|
92401e5328 | ||
|
|
d360fb212e | ||
|
|
4c1b0297e7 | ||
|
|
a0b14b994d | ||
|
|
168c022d3e | ||
|
|
5adfacba64 | ||
|
|
52eeace20f | ||
|
|
21ff1d714d | ||
|
|
eeca5512be | ||
|
|
143a4a61b3 | ||
|
|
fef24355ab | ||
|
|
d70dfec6b6 | ||
|
|
49eccfb19f | ||
|
|
223c5bdaf6 | ||
|
|
e740cfcb52 | ||
|
|
65b03174ea | ||
|
|
134d84ded9 | ||
|
|
5707d65d0f | ||
|
|
9956538224 | ||
|
|
7818b2008f | ||
|
|
043aff6a8c | ||
|
|
7725c6f18f | ||
|
|
16a3f7c2df | ||
|
|
92f567539f | ||
|
|
951f6d4636 | ||
|
|
811fa514d2 | ||
|
|
a097602d7f | ||
|
|
e6cfd726c6 | ||
|
|
d3cc439fa8 | ||
|
|
50de068a02 | ||
|
|
5d838b8980 | ||
|
|
4ec7ba0107 | ||
|
|
fa7a459e50 | ||
|
|
b74990c480 | ||
|
|
6d052fdab4 | ||
|
|
51538b7688 | ||
|
|
8ac3cc1542 | ||
|
|
908263df50 | ||
|
|
7f6e022852 | ||
|
|
d324c12348 | ||
|
|
fd5e22a3f6 | ||
|
|
25903431bc | ||
|
|
f8374def64 | ||
|
|
9bbaa6993f | ||
|
|
51f4a40cd4 | ||
|
|
fd3475a813 | ||
|
|
412015b7bd | ||
|
|
58ce89fc0b | ||
|
|
2394c0cb3d |
@@ -1,3 +1,2 @@
|
|||||||
awx/ui/node_modules
|
awx/ui/node_modules
|
||||||
awx/ui_next/node_modules
|
|
||||||
Dockerfile
|
Dockerfile
|
||||||
|
|||||||
1
.github/CODEOWNERS
vendored
Normal file
1
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
workflows/e2e_test.yml @tiagodread @shanemcd @jakemcdermott
|
||||||
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,41 +0,0 @@
|
|||||||
---
|
|
||||||
name: "\U0001F41B Bug report"
|
|
||||||
about: Create a report to help us improve
|
|
||||||
|
|
||||||
---
|
|
||||||
<!-- Issues are for **concrete, actionable bugs and feature requests** only - if you're just asking for debugging help or technical support, please use:
|
|
||||||
|
|
||||||
- http://webchat.freenode.net/?channels=ansible-awx
|
|
||||||
- https://groups.google.com/forum/#!forum/awx-project
|
|
||||||
|
|
||||||
We have to limit this because of limited volunteer time to respond to issues! -->
|
|
||||||
|
|
||||||
##### ISSUE TYPE
|
|
||||||
- Bug Report
|
|
||||||
|
|
||||||
##### SUMMARY
|
|
||||||
<!-- Briefly describe the problem. -->
|
|
||||||
|
|
||||||
##### ENVIRONMENT
|
|
||||||
* AWX version: X.Y.Z
|
|
||||||
* AWX install method: openshift, minishift, docker on linux, docker for mac, boot2docker
|
|
||||||
* Ansible version: X.Y.Z
|
|
||||||
* Operating System:
|
|
||||||
* Web Browser:
|
|
||||||
|
|
||||||
##### STEPS TO REPRODUCE
|
|
||||||
|
|
||||||
<!-- Please describe exactly how to reproduce the problem. -->
|
|
||||||
|
|
||||||
##### EXPECTED RESULTS
|
|
||||||
|
|
||||||
<!-- What did you expect to happen when running the steps above? -->
|
|
||||||
|
|
||||||
##### ACTUAL RESULTS
|
|
||||||
|
|
||||||
<!-- What actually happened? -->
|
|
||||||
|
|
||||||
##### ADDITIONAL INFORMATION
|
|
||||||
|
|
||||||
<!-- Include any links to sosreport, database dumps, screenshots or other
|
|
||||||
information. -->
|
|
||||||
135
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
135
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
---
|
||||||
|
name: Bug Report
|
||||||
|
description: Create a report to help us improve
|
||||||
|
labels:
|
||||||
|
- bug
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
Issues are for **concrete, actionable bugs and feature requests** only. For debugging help or technical support, please use:
|
||||||
|
- The #ansible-awx channel on irc.libera.chat
|
||||||
|
- https://groups.google.com/forum/#!forum/awx-project
|
||||||
|
|
||||||
|
- type: checkboxes
|
||||||
|
id: terms
|
||||||
|
attributes:
|
||||||
|
label: Please confirm the following
|
||||||
|
options:
|
||||||
|
- label: I agree to follow this project's [code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
|
||||||
|
required: true
|
||||||
|
- label: I have checked the [current issues](https://github.com/ansible/awx/issues) for duplicates.
|
||||||
|
required: true
|
||||||
|
- label: I understand that AWX is open source software provided for free and that I am not entitled to status updates or other assurances.
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: summary
|
||||||
|
attributes:
|
||||||
|
label: Summary
|
||||||
|
description: Briefly describe the problem.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
id: awx-version
|
||||||
|
attributes:
|
||||||
|
label: AWX version
|
||||||
|
description: What version of AWX are you running?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
id: awx-install-method
|
||||||
|
attributes:
|
||||||
|
label: Installation method
|
||||||
|
description: How did you install AWX?
|
||||||
|
multiple: false
|
||||||
|
options:
|
||||||
|
- kubernetes
|
||||||
|
- minikube
|
||||||
|
- openshift
|
||||||
|
- minishift
|
||||||
|
- docker on linux
|
||||||
|
- docker for mac
|
||||||
|
- boot2docker
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
id: modified-architecture
|
||||||
|
attributes:
|
||||||
|
label: Modifications
|
||||||
|
description: >-
|
||||||
|
Have you modified the installation, deployment topology, or container images in any way? If yes, please
|
||||||
|
explain in the "additional information" field at the bottom of the form.
|
||||||
|
multiple: false
|
||||||
|
options:
|
||||||
|
- "no"
|
||||||
|
- "yes"
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
id: ansible-version
|
||||||
|
attributes:
|
||||||
|
label: Ansible version
|
||||||
|
description: What version of Ansible are you running?
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
id: operating-system
|
||||||
|
attributes:
|
||||||
|
label: Operating system
|
||||||
|
description: What operating system are you using?
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
id: browsers
|
||||||
|
attributes:
|
||||||
|
label: Web browser
|
||||||
|
description: Which browsers are affected?
|
||||||
|
multiple: true
|
||||||
|
options:
|
||||||
|
- Firefox
|
||||||
|
- Chrome
|
||||||
|
- Safari
|
||||||
|
- Edge
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: steps-to-reproduce
|
||||||
|
attributes:
|
||||||
|
label: Steps to reproduce
|
||||||
|
description: >-
|
||||||
|
Starting from a new installation of the system, describe exactly how a developer or quality engineer can reproduce the bug
|
||||||
|
on infrastructure that isn't yours. Include any and all resources created, input values, test users, roles assigned, playbooks used, etc.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: expected-results
|
||||||
|
attributes:
|
||||||
|
label: Expected results
|
||||||
|
description: What did you expect to happpen when running the steps above?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: actual-results
|
||||||
|
attributes:
|
||||||
|
label: Actual results
|
||||||
|
description: What actually happened?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: additional-information
|
||||||
|
attributes:
|
||||||
|
label: Additional information
|
||||||
|
description: Include any relevant log output, links to sosreport, database dumps, screenshots, or other information.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -5,7 +5,7 @@ about: Suggest an idea for this project
|
|||||||
---
|
---
|
||||||
<!-- Issues are for **concrete, actionable bugs and feature requests** only - if you're just asking for debugging help or technical support, please use:
|
<!-- Issues are for **concrete, actionable bugs and feature requests** only - if you're just asking for debugging help or technical support, please use:
|
||||||
|
|
||||||
- http://webchat.freenode.net/?channels=ansible-awx
|
- http://web.libera.chat/?channels=#ansible-awx
|
||||||
- https://groups.google.com/forum/#!forum/awx-project
|
- https://groups.google.com/forum/#!forum/awx-project
|
||||||
|
|
||||||
We have to limit this because of limited volunteer time to respond to issues! -->
|
We have to limit this because of limited volunteer time to respond to issues! -->
|
||||||
|
|||||||
8
.github/PULL_REQUEST_TEMPLATE.md
vendored
8
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,3 +1,11 @@
|
|||||||
|
<!--- changelog-entry
|
||||||
|
# Fill in 'msg' below to have an entry automatically added to the next release changelog.
|
||||||
|
# Leaving 'msg' blank will not generate a changelog entry for this PR.
|
||||||
|
# Please ensure this is a simple (and readable) one-line string.
|
||||||
|
---
|
||||||
|
msg: ""
|
||||||
|
-->
|
||||||
|
|
||||||
##### SUMMARY
|
##### SUMMARY
|
||||||
<!--- Describe the change, including rationale and design decisions -->
|
<!--- Describe the change, including rationale and design decisions -->
|
||||||
|
|
||||||
|
|||||||
177
.github/workflows/ci.yml
vendored
Normal file
177
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
---
|
||||||
|
name: CI
|
||||||
|
env:
|
||||||
|
BRANCH: ${{ github.base_ref || 'devel' }}
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
push:
|
||||||
|
branches: [devel]
|
||||||
|
jobs:
|
||||||
|
api-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }}
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
||||||
|
|
||||||
|
- name: Run API Tests
|
||||||
|
run: |
|
||||||
|
docker run -u $(id -u) --rm -v ${{ github.workspace}}:/awx_devel/:Z \
|
||||||
|
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} /start_tests.sh
|
||||||
|
api-lint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }}
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
||||||
|
|
||||||
|
- name: Run API Linters
|
||||||
|
run: |
|
||||||
|
docker run -u $(id -u) --rm -v ${{ github.workspace}}:/awx_devel/:Z \
|
||||||
|
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} /var/lib/awx/venv/awx/bin/tox -e linters
|
||||||
|
api-swagger:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} || :
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
||||||
|
|
||||||
|
- name: Generate API Reference
|
||||||
|
run: |
|
||||||
|
docker run -u $(id -u) --rm -v ${{ github.workspace}}:/awx_devel/:Z \
|
||||||
|
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} /start_tests.sh swagger
|
||||||
|
awx-collection:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }}
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
||||||
|
|
||||||
|
- name: Run Collection Tests
|
||||||
|
run: |
|
||||||
|
docker run -u $(id -u) --rm -v ${{ github.workspace}}:/awx_devel/:Z \
|
||||||
|
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} /start_tests.sh test_collection_all
|
||||||
|
api-schema:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }}
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
||||||
|
|
||||||
|
- name: Check API Schema
|
||||||
|
run: |
|
||||||
|
docker run -u $(id -u) --rm -v ${{ github.workspace}}:/awx_devel/:Z \
|
||||||
|
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} /start_tests.sh detect-schema-change
|
||||||
|
ui-lint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }}
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
||||||
|
|
||||||
|
- name: Run UI Linters
|
||||||
|
run: |
|
||||||
|
docker run -u $(id -u) --rm -v ${{ github.workspace}}:/awx_devel/:Z \
|
||||||
|
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} make ui-lint
|
||||||
|
ui-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }}
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
||||||
|
|
||||||
|
- name: Run UI Tests
|
||||||
|
run: |
|
||||||
|
docker run -u $(id -u) --rm -v ${{ github.workspace}}:/awx_devel/:Z \
|
||||||
|
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} make ui-test
|
||||||
30
.github/workflows/devel_image.yml
vendored
Normal file
30
.github/workflows/devel_image.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
---
|
||||||
|
name: Push Development Image
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- devel
|
||||||
|
jobs:
|
||||||
|
push:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/}
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
||||||
|
|
||||||
|
- name: Push image
|
||||||
|
run: |
|
||||||
|
docker push ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/}
|
||||||
100
.github/workflows/e2e_test.yml
vendored
Normal file
100
.github/workflows/e2e_test.yml
vendored
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
---
|
||||||
|
name: E2E Tests
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types: [labeled]
|
||||||
|
jobs:
|
||||||
|
e2e-test:
|
||||||
|
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 40
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
job: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Install system deps
|
||||||
|
run: sudo apt-get install -y gettext
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ github.base_ref }}
|
||||||
|
|
||||||
|
- name: Build UI
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ github.base_ref }} make ui-devel
|
||||||
|
|
||||||
|
- name: Start AWX
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ github.base_ref }} make docker-compose &> make-docker-compose-output.log &
|
||||||
|
|
||||||
|
- name: Pull awx_cypress_base image
|
||||||
|
run: |
|
||||||
|
docker pull quay.io/awx/awx_cypress_base:latest
|
||||||
|
|
||||||
|
- name: Checkout test project
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
repository: ${{ github.repository_owner }}/tower-qa
|
||||||
|
ssh-key: ${{ secrets.QA_REPO_KEY }}
|
||||||
|
path: tower-qa
|
||||||
|
ref: devel
|
||||||
|
|
||||||
|
- name: Build cypress
|
||||||
|
run: |
|
||||||
|
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
||||||
|
docker build -t awx-pf-tests .
|
||||||
|
|
||||||
|
- name: Update default AWX password
|
||||||
|
run: |
|
||||||
|
while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' -k https://localhost:8043/api/v2/ping/)" != "200" ]]
|
||||||
|
do
|
||||||
|
echo "Waiting for AWX..."
|
||||||
|
sleep 5;
|
||||||
|
done
|
||||||
|
echo "AWX is up, updating the password..."
|
||||||
|
docker exec -i tools_awx_1 sh <<-EOSH
|
||||||
|
awx-manage update_password --username=admin --password=password
|
||||||
|
EOSH
|
||||||
|
|
||||||
|
- name: Run E2E tests
|
||||||
|
env:
|
||||||
|
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||||
|
run: |
|
||||||
|
export COMMIT_INFO_BRANCH=$GITHUB_HEAD_REF
|
||||||
|
export COMMIT_INFO_AUTHOR=$GITHUB_ACTOR
|
||||||
|
export COMMIT_INFO_SHA=$GITHUB_SHA
|
||||||
|
export COMMIT_INFO_REMOTE=$GITHUB_REPOSITORY_OWNER
|
||||||
|
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
||||||
|
AWX_IP=$(docker inspect -f '{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' tools_awx_1)
|
||||||
|
printenv > .env
|
||||||
|
echo "Executing tests:"
|
||||||
|
docker run \
|
||||||
|
--network '_sources_default' \
|
||||||
|
--ipc=host \
|
||||||
|
--env-file=.env \
|
||||||
|
-e CYPRESS_baseUrl="https://$AWX_IP:8043" \
|
||||||
|
-e CYPRESS_AWX_E2E_USERNAME=admin \
|
||||||
|
-e CYPRESS_AWX_E2E_PASSWORD='password' \
|
||||||
|
-e COMMAND="npm run cypress-gha" \
|
||||||
|
-v /dev/shm:/dev/shm \
|
||||||
|
-v $PWD:/e2e \
|
||||||
|
-w /e2e \
|
||||||
|
awx-pf-tests run --project .
|
||||||
|
|
||||||
|
- name: Save AWX logs
|
||||||
|
uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: AWX-logs-${{ matrix.job }}
|
||||||
|
path: make-docker-compose-output.log
|
||||||
|
|
||||||
|
|
||||||
56
.github/workflows/release.yml
vendored
Normal file
56
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
name: Release AWX
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'Version'
|
||||||
|
required: true
|
||||||
|
default: ''
|
||||||
|
confirm:
|
||||||
|
description: 'Are you sure? Set this to yes.'
|
||||||
|
required: true
|
||||||
|
default: 'no'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: "Verify inputs"
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [[ ${{ github.event.inputs.confirm }} != "yes" ]]; then
|
||||||
|
>&2 echo "Confirm must be 'yes'"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${{ github.event.inputs.version }} == "" ]]; then
|
||||||
|
>&2 echo "Set version to continue."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
|
||||||
|
- name: Generate changelog
|
||||||
|
uses: shanemcd/simple-changelog-generator@v1
|
||||||
|
id: changelog
|
||||||
|
with:
|
||||||
|
repo: "${{ github.repository }}"
|
||||||
|
|
||||||
|
- name: Write changelog to file
|
||||||
|
run: |
|
||||||
|
cat << 'EOF' > /tmp/changelog
|
||||||
|
${{ steps.changelog.outputs.changelog }}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Release AWX
|
||||||
|
run: |
|
||||||
|
ansible-playbook -v tools/ansible/release.yml \
|
||||||
|
-e changelog_path=/tmp/changelog \
|
||||||
|
-e version=${{ github.event.inputs.version }} \
|
||||||
|
-e github_token=${{ secrets.GITHUB_TOKEN }} \
|
||||||
|
-e repo=${{ github.repository }}
|
||||||
|
|
||||||
|
|
||||||
43
.github/workflows/upload_schema.yml
vendored
Normal file
43
.github/workflows/upload_schema.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
name: Upload API Schema
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- devel
|
||||||
|
jobs:
|
||||||
|
push:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/}
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
||||||
|
|
||||||
|
- name: Generate API Schema
|
||||||
|
run: |
|
||||||
|
docker run -u $(id -u) --rm -v ${{ github.workspace }}:/awx_devel/:Z \
|
||||||
|
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} /start_tests.sh genschema
|
||||||
|
|
||||||
|
- name: Upload API Schema
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
|
||||||
|
AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
|
||||||
|
AWS_REGION: 'us-east-1'
|
||||||
|
run: |
|
||||||
|
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||||
|
ansible localhost -c local -m aws_s3 \
|
||||||
|
-a 'src=${{ github.workspace }}/schema.json bucket=awx-public-ci-files object=schema.json mode=put permission=public-read'
|
||||||
|
|
||||||
|
|
||||||
17
.gitignore
vendored
17
.gitignore
vendored
@@ -28,12 +28,12 @@ awx/ui/build_test
|
|||||||
awx/ui/client/languages
|
awx/ui/client/languages
|
||||||
awx/ui/templates/ui/index.html
|
awx/ui/templates/ui/index.html
|
||||||
awx/ui/templates/ui/installing.html
|
awx/ui/templates/ui/installing.html
|
||||||
awx/ui_next/node_modules/
|
awx/ui/node_modules/
|
||||||
awx/ui_next/src/locales/*/messages.js
|
awx/ui/src/locales/*/messages.js
|
||||||
awx/ui_next/coverage/
|
awx/ui/coverage/
|
||||||
awx/ui_next/build
|
awx/ui/build
|
||||||
awx/ui_next/.env.local
|
awx/ui/.env.local
|
||||||
awx/ui_next/instrumented
|
awx/ui/instrumented
|
||||||
rsyslog.pid
|
rsyslog.pid
|
||||||
tools/prometheus/data
|
tools/prometheus/data
|
||||||
tools/docker-compose/ansible/awx_dump.sql
|
tools/docker-compose/ansible/awx_dump.sql
|
||||||
@@ -41,6 +41,7 @@ tools/docker-compose/Dockerfile
|
|||||||
tools/docker-compose/_build
|
tools/docker-compose/_build
|
||||||
tools/docker-compose/_sources
|
tools/docker-compose/_sources
|
||||||
tools/docker-compose/overrides/
|
tools/docker-compose/overrides/
|
||||||
|
tools/docker-compose-minikube/_sources
|
||||||
|
|
||||||
# Tower setup playbook testing
|
# Tower setup playbook testing
|
||||||
setup/test/roles/postgresql
|
setup/test/roles/postgresql
|
||||||
@@ -62,14 +63,12 @@ __pycache__
|
|||||||
/Gruntfile.js
|
/Gruntfile.js
|
||||||
/Brocfile.js
|
/Brocfile.js
|
||||||
/bower.json
|
/bower.json
|
||||||
/package.json
|
|
||||||
/testem.yml
|
/testem.yml
|
||||||
**/coverage
|
**/coverage
|
||||||
/.istanbul.yml
|
/.istanbul.yml
|
||||||
**/node_modules/**
|
**/node_modules/**
|
||||||
/tmp
|
/tmp
|
||||||
**/npm-debug.log*
|
**/npm-debug.log*
|
||||||
**/package-lock.json
|
|
||||||
|
|
||||||
# UI build flag files
|
# UI build flag files
|
||||||
awx/ui/.deps_built
|
awx/ui/.deps_built
|
||||||
@@ -153,7 +152,7 @@ use_dev_supervisor.txt
|
|||||||
.idea/*
|
.idea/*
|
||||||
*.unison.tmp
|
*.unison.tmp
|
||||||
*.#
|
*.#
|
||||||
/awx/ui_next/.ui-built
|
/awx/ui/.ui-built
|
||||||
/Dockerfile
|
/Dockerfile
|
||||||
/_build/
|
/_build/
|
||||||
/_build_kube_dev/
|
/_build_kube_dev/
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
---
|
---
|
||||||
ignore: |
|
ignore: |
|
||||||
|
.github
|
||||||
.tox
|
.tox
|
||||||
awx/main/tests/data/inventory/plugins/**
|
awx/main/tests/data/inventory/plugins/**
|
||||||
# vault files
|
# vault files
|
||||||
|
|||||||
477
CHANGELOG.md
477
CHANGELOG.md
@@ -1,478 +1,7 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
This is a list of high-level changes for each release of AWX. A full list of commits can be found at `https://github.com/ansible/awx/releases/tag/<version>`.
|
**Note:** This file is deprecated and will be removed at some point in a future release.
|
||||||
|
|
||||||
# 19.2.0 (June 1, 2021)
|
Starting with AWX 20, release notes are published to [GitHub Releases](https://github.com/ansible/awx/releases).
|
||||||
- Fixed race condition that would sometimes cause jobs to error out at the very end of an otherwise successful run (https://github.com/ansible/receptor/pull/328)
|
|
||||||
- Fixes bug where users were unable to click on text next to checkboxes in modals (https://github.com/ansible/awx/pull/10279)
|
|
||||||
- Have the project update playbook warn if role/collection syncing is disabled. (https://github.com/ansible/awx/pull/10068)
|
|
||||||
- Move irc references to point to irc.libera.chat (https://github.com/ansible/awx/pull/10295)
|
|
||||||
- Fixes bug where activity stream changes were displaying as [object object] (https://github.com/ansible/awx/pull/10267)
|
|
||||||
- Update awxkit to enable export of Galaxy credentials associated to organizations (https://github.com/ansible/awx/pull/10271)
|
|
||||||
- Bump receptor and receptorctl versions to 1.0.0a2 (https://github.com/ansible/awx/pull/10261)
|
|
||||||
- Add the ability to disable local authentication (https://github.com/ansible/awx/pull/10102)
|
|
||||||
- Show error if no Execution Environment is found on project sync/job run (https://github.com/ansible/awx/pull/10183)
|
|
||||||
- Allow for editing and deleting managed_by_tower EEs from API/UI (https://github.com/ansible/awx/pull/10173)
|
|
||||||
|
|
||||||
|
For older release notes, see https://github.com/ansible/awx/blob/19.3.0/CHANGELOG.md.
|
||||||
# 19.1.0 (May 1, 2021)
|
|
||||||
|
|
||||||
- Custom inventory scripts have been removed from the API https://github.com/ansible/awx/pull/9822
|
|
||||||
- Old scripts can be exported via `awx-manage export_custom_scripts`
|
|
||||||
- Fixed a bug where ad-hoc commands targeted against multiple hosts would run against only 1 host https://github.com/ansible/awx/pull/9973
|
|
||||||
- AWX will now look for a top-level requirements.yml when installing collections / roles in project updates https://github.com/ansible/awx/pull/9945
|
|
||||||
- Improved error handling when Container Group pods fail to launch https://github.com/ansible/awx/pull/10025
|
|
||||||
- Added ability to set server-side password policies using Django's AUTH_PASSWORD_VALIDATORS setting https://github.com/ansible/awx/pull/9999
|
|
||||||
- Bumped versions of Ansible Runner & AWX EE https://github.com/ansible/awx/pull/10013
|
|
||||||
- If you have built any custom EEs on top of awx-ee 0.1.0, you will need to rebuild on top of 0.2.0.
|
|
||||||
- Remove legacy resource profiling code https://github.com/ansible/awx/pull/9883
|
|
||||||
|
|
||||||
# 19.0.0 (April 7, 2021)
|
|
||||||
|
|
||||||
- AWX now runs on Python 3.8 (https://github.com/ansible/awx/pull/8778/)
|
|
||||||
- Fixed inventories-from-projects when running in Kubernetes (https://github.com/ansible/awx/pull/9741)
|
|
||||||
- Fixed a bug where a slash was appended to invetory file paths in UI dropdown (https://github.com/ansible/awx/pull/9713)
|
|
||||||
- Fix a bug with large file parsing in project sync (https://github.com/ansible/awx/pull/9627)
|
|
||||||
- Fix k8s credentials that use a custom ca cert (https://github.com/ansible/awx/pull/9744)
|
|
||||||
- Fix a bug that allowed a user to attempt deleting a running job (https://github.com/ansible/awx/pull/9758)
|
|
||||||
- Fixed the Kubernetes Pod reaper to properly delete Pods launched by Receptor (https://github.com/ansible/awx/pull/9819)
|
|
||||||
- AWX Collection Modules: added ability to set instance groups for organization, job templates, and inventories. (https://github.com/ansible/awx/pull/9804)
|
|
||||||
- Fixed CSP violation errors on job details and job settings views (https://github.com/ansible/awx/pull/9818)
|
|
||||||
- Added support for convergence any/all on workflow nodes (https://github.com/ansible/awx/pull/9737)
|
|
||||||
- Fixed race condition that causes InvalidGitRepositoryError (https://github.com/ansible/awx/pull/9754)
|
|
||||||
- Added support for Execution Environments to the Activity Stream (https://github.com/ansible/awx/issues/9308)
|
|
||||||
- Fixed a bug that improperly formats OpenSSH keys specified in custom Credential Types (https://github.com/ansible/awx/issues/9361)
|
|
||||||
- Fixed an HTTP 500 error for unauthenticated users (https://github.com/ansible/awx/pull/9725)
|
|
||||||
- Added subscription wizard: https://github.com/ansible/awx/pull/9496
|
|
||||||
|
|
||||||
# 18.0.0 (March 23, 2021)
|
|
||||||
|
|
||||||
**IMPORTANT INSTALL AND UPGRADE NOTES**
|
|
||||||
|
|
||||||
Starting in version 18.0, the [AWX Operator](https://github.com/ansible/awx-operator) is the preferred way to install AWX: https://github.com/ansible/awx/blob/devel/INSTALL.md#installing-awx
|
|
||||||
|
|
||||||
If you have a pre-existing installation of AWX that utilizes the Docker-based installation method, this install method has ** notably changed** from 17.x to 18.x. For details, please see:
|
|
||||||
|
|
||||||
- https://groups.google.com/g/awx-project/c/47MjWSUQaOc/m/bCjSDn0eBQAJ
|
|
||||||
- https://github.com/ansible/awx/blob/devel/tools/docker-compose
|
|
||||||
- https://github.com/ansible/awx/blob/devel/tools/docker-compose/docs/data_migration.md
|
|
||||||
|
|
||||||
### Introducing Execution Environments
|
|
||||||
|
|
||||||
After a herculean effort from a number of contributors, we're excited to announce that AWX 18.0.0 introduces a new concept called Execution Environments.
|
|
||||||
|
|
||||||
Execution Environments are container images which consist of everything necessary to run a playbook within AWX, and which drive the entire management and lifecycle of playbook execution runtime in AWX: https://github.com/ansible/awx/issues/5157. This means that going forward, AWX no longer utilizes the [bubblewrap](https://github.com/containers/bubblewrap) project for playbook isolation, but instead utilizes a container per playbook run.
|
|
||||||
|
|
||||||
Much like custom virtualenvs, custom Execution Environments can be crafted to specify additional Python or system-level dependencies. [Ansible Builder](https://github.com/ansible/ansible-builder) outputs images you can upload to your registry which can *then* be defined in AWX and utilized for playbook runs.
|
|
||||||
|
|
||||||
To learn more about Ansible Builder and Execution Environments, see: https://www.ansible.com/blog/introduction-to-ansible-builder
|
|
||||||
|
|
||||||
### Other Notable Changes
|
|
||||||
|
|
||||||
- Removed `installer` directory.
|
|
||||||
- The Kubernetes installer has been removed in favor of [AWX Operator](https://github.com/ansible/awx-operator). Official images for Operator-based installs are no longer hosted on Docker Hub, but are instead available on [Quay](https://quay.io/repository/ansible/awx?tab=tags).
|
|
||||||
- The "Local Docker" install method has been removed in favor of the development environment. Details can be found at: https://github.com/ansible/awx/blob/devel/tools/docker-compose/README.md
|
|
||||||
- Removal of custom virtual environments https://github.com/ansible/awx/pull/9498
|
|
||||||
- Custom virtual environments have been replaced by Execution Environments https://github.com/ansible/awx/pull/9570
|
|
||||||
- The default Container Group Pod definition has changed. All custom Pod specs have been reset. https://github.com/ansible/awx/commit/05ef51f710dad8f8036bc5acee4097db4adc0d71
|
|
||||||
- Added user interface for the activity stream: https://github.com/ansible/awx/pull/9083
|
|
||||||
- Converted many of the top-level list views (Jobs, Teams, Hosts, Inventories, Projects, and more) to a new, permanent table component for substantially increased responsiveness, usability, maintainability, and other 'ility's: https://github.com/ansible/awx/pull/8970, https://github.com/ansible/awx/pull/9182 and many others!
|
|
||||||
- Added support for Centrify Vault (https://www.centrify.com) as a credential lookup plugin (https://github.com/ansible/awx/pull/9542)
|
|
||||||
- Added support for namespaces in Hashicorp Vault credential plugin (https://github.com/ansible/awx/pull/9590)
|
|
||||||
- Added click-to-expand details for job tables
|
|
||||||
- Added search filtering to job output https://github.com/ansible/awx/pull/9208
|
|
||||||
- Added the new migration, update, and "installation in progress" page https://github.com/ansible/awx/pull/9123
|
|
||||||
- Added the user interface for job settings https://github.com/ansible/awx/pull/8661
|
|
||||||
- Runtime errors from jobs are now displayed, along with an explanation for what went wrong, on the output page https://github.com/ansible/awx/pull/8726
|
|
||||||
- You can now cancel a running job from its output and details panel https://github.com/ansible/awx/pull/9199
|
|
||||||
- Fixed a bug where launch prompt inputs were unexpectedly deposited in the url: https://github.com/ansible/awx/pull/9231
|
|
||||||
- Playbook, credential type, and inventory file inputs now support type-ahead and manual type-in! https://github.com/ansible/awx/pull/9120
|
|
||||||
- Added ability to relaunch against failed hosts: https://github.com/ansible/awx/pull/9225
|
|
||||||
- Added pending workflow approval count to the application header https://github.com/ansible/awx/pull/9334
|
|
||||||
- Added user interface for management jobs: https://github.com/ansible/awx/pull/9224
|
|
||||||
- Added toast message to show notification template test result to notification templates list https://github.com/ansible/awx/pull/9318
|
|
||||||
- Replaced CodeMirror with AceEditor for editing template variables and notification templates https://github.com/ansible/awx/pull/9281
|
|
||||||
- Added support for filtering and pagination on job output https://github.com/ansible/awx/pull/9208
|
|
||||||
- Added support for html in custom login text https://github.com/ansible/awx/pull/9519
|
|
||||||
|
|
||||||
# 17.1.0 (March 9, 2021)
|
|
||||||
- Addressed a security issue in AWX (CVE-2021-20253)
|
|
||||||
- Fixed a bug permissions error related to redis in K8S-based deployments: https://github.com/ansible/awx/issues/9401
|
|
||||||
|
|
||||||
# 17.0.1 (January 26, 2021)
|
|
||||||
- Fixed pgdocker directory permissions issue with Local Docker installer: https://github.com/ansible/awx/pull/9152
|
|
||||||
- Fixed a bug in the UI which caused toggle settings to not be changed when clicked: https://github.com/ansible/awx/pull/9093
|
|
||||||
|
|
||||||
# 17.0.0 (January 22, 2021)
|
|
||||||
- AWX now requires PostgreSQL 12 by default: https://github.com/ansible/awx/pull/8943
|
|
||||||
**Note:** users who encounter permissions errors at upgrade time should `chown -R ~/.awx/pgdocker` to ensure it's owned by the user running the install playbook
|
|
||||||
- Added support for region name for OpenStack inventory: https://github.com/ansible/awx/issues/5080
|
|
||||||
- Added the ability to chain undefined attributes in custom notification templates: https://github.com/ansible/awx/issues/8677
|
|
||||||
- Dramatically simplified the `image_build` role: https://github.com/ansible/awx/pull/8980
|
|
||||||
- Fixed a bug which can cause schema migrations to fail at install time: https://github.com/ansible/awx/issues/9077
|
|
||||||
- Fixed a bug which caused the `is_superuser` user property to be out of date in certain circumstances: https://github.com/ansible/awx/pull/8833
|
|
||||||
- Fixed a bug which sometimes results in race conditions on setting access: https://github.com/ansible/awx/pull/8580
|
|
||||||
- Fixed a bug which sometimes causes an unexpected delay in stdout for some playbooks: https://github.com/ansible/awx/issues/9085
|
|
||||||
- (UI) Added support for credential password prompting on job launch: https://github.com/ansible/awx/pull/9028
|
|
||||||
- (UI) Added the ability to configure LDAP settings in the UI: https://github.com/ansible/awx/issues/8291
|
|
||||||
- (UI) Added a sync button to the Project detail view: https://github.com/ansible/awx/issues/8847
|
|
||||||
- (UI) Added a form for configuring Google Outh 2.0 settings: https://github.com/ansible/awx/pull/8762
|
|
||||||
- (UI) Added searchable keys and related keys to the Credentials list: https://github.com/ansible/awx/issues/8603
|
|
||||||
- (UI) Added support for advanced search and copying to Notification Templates: https://github.com/ansible/awx/issues/7879
|
|
||||||
- (UI) Added support for prompting on workflow nodes: https://github.com/ansible/awx/issues/5913
|
|
||||||
- (UI) Added support for session timeouts: https://github.com/ansible/awx/pull/8250
|
|
||||||
- (UI) Fixed a bug that broke websocket streaming for the insecure ws:// protocol: https://github.com/ansible/awx/pull/8877
|
|
||||||
- (UI) Fixed a bug in the user interface when a translation for the browser's preferred locale isn't available: https://github.com/ansible/awx/issues/8884
|
|
||||||
- (UI) Fixed bug where navigating from one survey question form directly to another wasn't reloading the form: https://github.com/ansible/awx/issues/7522
|
|
||||||
- (UI) Fixed a bug which can cause an uncaught error while launching a Job Template: https://github.com/ansible/awx/issues/8936
|
|
||||||
- Updated autobahn to address CVE-2020-35678
|
|
||||||
|
|
||||||
## 16.0.0 (December 10, 2020)
|
|
||||||
- AWX now ships with a reimagined user interface. **Please read this before upgrading:** https://groups.google.com/g/awx-project/c/KuT5Ao92HWo
|
|
||||||
- Removed support for syncing inventory from Red Hat CloudForms - https://github.com/ansible/awx/commit/0b701b3b2
|
|
||||||
- Removed support for Mercurial-based project updates - https://github.com/ansible/awx/issues/7932
|
|
||||||
- Upgraded NodeJS to actively maintained LTS 14.15.1 - https://github.com/ansible/awx/pull/8766
|
|
||||||
- Added Git-LFS to the default image build - https://github.com/ansible/awx/pull/8700
|
|
||||||
- Added the ability to specify `metadata.labels` in the podspec for container groups - https://github.com/ansible/awx/issues/8486
|
|
||||||
- Added support for Kubernetes pod annotations - https://github.com/ansible/awx/pull/8434
|
|
||||||
- Added the ability to label the web container in local Docker installs - https://github.com/ansible/awx/pull/8449
|
|
||||||
- Added additional metadata (as an extra var) to playbook runs to report the SCM branch name - https://github.com/ansible/awx/pull/8433
|
|
||||||
- Fixed a bug that caused k8s installations to fail due to an incorrect Helm repo - https://github.com/ansible/awx/issues/8715
|
|
||||||
- Fixed a bug that prevented certain Workflow Approval resources from being deleted - https://github.com/ansible/awx/pull/8612
|
|
||||||
- Fixed a bug that prevented the deletion of inventories stuck in "pending deletion" state - https://github.com/ansible/awx/issues/8525
|
|
||||||
- Fixed a display bug in webhook notifications with certain unicode characters - https://github.com/ansible/awx/issues/7400
|
|
||||||
- Improved support for exporting dependent objects (Inventory Hosts and Groups) in the `awx export` CLI tool - https://github.com/ansible/awx/commit/607bc0788
|
|
||||||
|
|
||||||
## 15.0.1 (October 20, 2020)
|
|
||||||
- Added several optimizations to improve performance for a variety of high-load simultaneous job launch use cases https://github.com/ansible/awx/pull/8403
|
|
||||||
- Added the ability to source roles and collections from requirements.yaml files (not just requirements.yml) - https://github.com/ansible/awx/issues/4540
|
|
||||||
- awx.awx collection modules now provide a clearer error message for incompatible versions of awxkit - https://github.com/ansible/awx/issues/8127
|
|
||||||
- Fixed a bug in notification messages that contain certain unicode characters - https://github.com/ansible/awx/issues/7400
|
|
||||||
- Fixed a bug that prevents the deletion of Workflow Approval records - https://github.com/ansible/awx/issues/8305
|
|
||||||
- Fixed a bug that broke the selection of webhook credentials - https://github.com/ansible/awx/issues/7892
|
|
||||||
- Fixed a bug which can cause confusing behavior for social auth logins across distinct browser tabs - https://github.com/ansible/awx/issues/8154
|
|
||||||
- Fixed several bugs in the output of Workflow Job Templates using the `awx export` tool - https://github.com/ansible/awx/issues/7798 https://github.com/ansible/awx/pull/7847
|
|
||||||
- Fixed a race condition that can lead to missing hosts when running parallel inventory syncs - https://github.com/ansible/awx/issues/5571
|
|
||||||
- Fixed an HTTP 500 error when certain LDAP group parameters aren't properly set - https://github.com/ansible/awx/issues/7622
|
|
||||||
- Updated a few dependencies in response to several CVEs:
|
|
||||||
* CVE-2020-7720
|
|
||||||
* CVE-2020-7743
|
|
||||||
* CVE-2020-7676
|
|
||||||
|
|
||||||
## 15.0.0 (September 30, 2020)
|
|
||||||
- Added improved support for fetching Ansible collections from private Galaxy content sources (such as https://github.com/ansible/galaxy_ng) - https://github.com/ansible/awx/issues/7813
|
|
||||||
**Note:** as part of this change, new Organizations created in the AWX API will _no longer_ automatically synchronize roles and collections from galaxy.ansible.com by default. More details on this change can be found at: https://github.com/ansible/awx/issues/8341#issuecomment-707310633
|
|
||||||
- AWX now utilizes a version of certifi that auto-discovers certificates in the system certificate store - https://github.com/ansible/awx/pull/8242
|
|
||||||
- Added support for arbitrary custom inventory plugin configuration: https://github.com/ansible/awx/issues/5150
|
|
||||||
- Added an optional setting to disable the auto-creation of organizations and teams on successful SAML login. - https://github.com/ansible/awx/pull/8069
|
|
||||||
- Added a number of optimizations to AWX's callback receiver to improve the speed of stdout processing for simultaneous playbooks runs - https://github.com/ansible/awx/pull/8193 https://github.com/ansible/awx/pull/8191
|
|
||||||
- Added the ability to use `!include` and `!import` constructors when constructing YAML for use with the AWX CLI - https://github.com/ansible/awx/issues/8135
|
|
||||||
- Fixed a bug that prevented certain users from being able to edit approval nodes in Workflows - https://github.com/ansible/awx/pull/8253
|
|
||||||
- Fixed a bug that broke password prompting for credentials in certain cases - https://github.com/ansible/awx/issues/8202
|
|
||||||
- Fixed a bug which can cause PostgreSQL deadlocks when running many parallel playbooks against large shared inventories - https://github.com/ansible/awx/issues/8145
|
|
||||||
- Fixed a bug which can cause delays in AWX's task manager when large numbers of simultaneous jobs are scheduled - https://github.com/ansible/awx/issues/7655
|
|
||||||
- Fixed a bug which can cause certain scheduled jobs - those that run every X minute(s) or hour(s) - to fail to run at the proper time - https://github.com/ansible/awx/issues/8071
|
|
||||||
- Fixed a performance issue for playbooks that store large amounts of data using the `set_stats` module - https://github.com/ansible/awx/issues/8006
|
|
||||||
- Fixed a bug related to AWX's handling of the auth_path argument for the HashiVault KeyValue credential plugin - https://github.com/ansible/awx/pull/7991
|
|
||||||
- Fixed a bug that broke support for Remote Archive SCM Type project syncs on platforms that utilize Python2 - https://github.com/ansible/awx/pull/8057
|
|
||||||
- Updated to the latest version of Django Rest Framework to address CVE-2020-25626
|
|
||||||
- Updated to the latest version of Django to address CVE-2020-24583 and CVE-2020-24584
|
|
||||||
- Updated to the latest verson of channels_redis to address a bug that slowly causes Daphne processes to leak memory over time - https://github.com/django/channels_redis/issues/212
|
|
||||||
|
|
||||||
## 14.1.0 (Aug 25, 2020)
|
|
||||||
- AWX images can now be built on ARM64 - https://github.com/ansible/awx/pull/7607
|
|
||||||
- Added the Remote Archive SCM Type to support using immutable artifacts and releases (such as tarballs and zip files) as projects - https://github.com/ansible/awx/issues/7954
|
|
||||||
- Deprecated official support for Mercurial-based project updates - https://github.com/ansible/awx/issues/7932
|
|
||||||
- Added resource import/export support to the official AWX collection - https://github.com/ansible/awx/issues/7329
|
|
||||||
- Added the ability to import YAML-based resources (instead of just JSON) when using the AWX CLI - https://github.com/ansible/awx/pull/7808
|
|
||||||
- Users upgrading from older versions of AWX may encounter an issue that causes their postgres container to restart in a loop (https://github.com/ansible/awx/issues/7854) - if you encounter this, bring your containers down and then back up (e.g., `docker-compose down && docker-compose up -d`) after upgrading to 14.1.0.
|
|
||||||
- Updated the AWX CLI to export labels associated with Workflow Job Templates - https://github.com/ansible/awx/pull/7847
|
|
||||||
- Updated to the latest python-ldap to address a bug - https://github.com/ansible/awx/issues/7868
|
|
||||||
- Upgraded git-python to fix a bug that caused workflows to sometimes fail - https://github.com/ansible/awx/issues/6119
|
|
||||||
- Worked around a bug in the channels_redis library that slowly causes Daphne processes to leak memory over time - https://github.com/django/channels_redis/issues/212
|
|
||||||
- Fixed a bug in the AWX CLI that prevented Workflow nodes from importing properly - https://github.com/ansible/awx/issues/7793
|
|
||||||
- Fixed a bug in the awx.awx collection release process that templated the wrong version - https://github.com/ansible/awx/issues/7870
|
|
||||||
- Fixed a bug that caused errors rendering stdout that contained UTF-16 surrogate pairs - https://github.com/ansible/awx/pull/7918
|
|
||||||
|
|
||||||
## 14.0.0 (Aug 6, 2020)
|
|
||||||
- As part of our commitment to inclusivity in open source, we recently took some time to audit AWX's source code and user interface and replace certain terminology with more inclusive language. Strictly speaking, this isn't a bug or a feature, but we think it's important and worth calling attention to:
|
|
||||||
* https://github.com/ansible/awx/commit/78229f58715fbfbf88177e54031f532543b57acc
|
|
||||||
* https://www.redhat.com/en/blog/making-open-source-more-inclusive-eradicating-problematic-language
|
|
||||||
- Installing roles and collections via requirements.yml as part of Project Updates now requires at least Ansible 2.9 - https://github.com/ansible/awx/issues/7769
|
|
||||||
- Deprecated the use of the `PRIMARY_GALAXY_USERNAME` and `PRIMARY_GALAXY_PASSWORD` settings. We recommend using tokens to access Galaxy or Automation Hub.
|
|
||||||
- Added local caching for downloaded roles and collections so they are not re-downloaded on nodes where they are up to date with the project - https://github.com/ansible/awx/issues/5518
|
|
||||||
- Added the ability to associate K8S/OpenShift credentials to Job Template for playbook interaction with the `community.kubernetes` collection - https://github.com/ansible/awx/issues/5735
|
|
||||||
- Added the ability to include HTML in the Custom Login Info presented on the login page - https://github.com/ansible/awx/issues/7600
|
|
||||||
- Fixed https://access.redhat.com/security/cve/cve-2020-14327 - Server-side request forgery on credentials
|
|
||||||
- Fixed https://access.redhat.com/security/cve/cve-2020-14328 - Server-side request forgery on webhooks
|
|
||||||
- Fixed https://access.redhat.com/security/cve/cve-2020-14329 - Sensitive data exposure on labels
|
|
||||||
- Fixed https://access.redhat.com/security/cve/cve-2020-14337 - Named URLs allow for testing the presence or absence of objects
|
|
||||||
- Fixed a number of bugs in the user interface related to an upgrade of jQuery:
|
|
||||||
* https://github.com/ansible/awx/issues/7530
|
|
||||||
* https://github.com/ansible/awx/issues/7546
|
|
||||||
* https://github.com/ansible/awx/issues/7534
|
|
||||||
* https://github.com/ansible/awx/issues/7606
|
|
||||||
- Fixed a bug that caused the `-f yaml` flag of the AWX CLI to not print properly formatted YAML - https://github.com/ansible/awx/issues/7795
|
|
||||||
- Fixed a bug in the installer that caused errors when `docker_registry_password` was set - https://github.com/ansible/awx/issues/7695
|
|
||||||
- Fixed a permissions error that prevented certain users from starting AWX services - https://github.com/ansible/awx/issues/7545
|
|
||||||
- Fixed a bug that allows superusers to run unsafe Jinja code when defining custom Credential Types - https://github.com/ansible/awx/pull/7584/
|
|
||||||
- Fixed a bug that prevented users from creating (or editing) custom Credential Types containing boolean fields - https://github.com/ansible/awx/issues/7483
|
|
||||||
- Fixed a bug that prevented users with postgres usernames containing uppercase letters from restoring backups succesfully - https://github.com/ansible/awx/pull/7519
|
|
||||||
- Fixed a bug which allowed the creation (in the Tower API) of Groups and Hosts with the same name - https://github.com/ansible/awx/issues/4680
|
|
||||||
|
|
||||||
## 13.0.0 (Jun 23, 2020)
|
|
||||||
- Added import and export commands to the official AWX CLI, replacing send and receive from the old tower-cli (https://github.com/ansible/awx/pull/6125).
|
|
||||||
- Removed scripts as a means of running inventory updates of built-in types (https://github.com/ansible/awx/pull/6911)
|
|
||||||
- Ansible 2.8 is now partially unsupported; some inventory source types are known to no longer work.
|
|
||||||
- Fixed an issue where the vmware inventory source ssl_verify source variable was not recognized (https://github.com/ansible/awx/pull/7360)
|
|
||||||
- Fixed a bug that caused redis' listen socket to have too-permissive file permissions (https://github.com/ansible/awx/pull/7317)
|
|
||||||
- Fixed a bug that caused rsyslogd's configuration file to have world-readable file permissions, potentially leaking secrets (CVE-2020-10782)
|
|
||||||
|
|
||||||
## 12.0.0 (Jun 9, 2020)
|
|
||||||
- Removed memcached as a dependency of AWX (https://github.com/ansible/awx/pull/7240)
|
|
||||||
- Moved to a single container image build instead of separate awx_web and awx_task images. The container image is just `awx` (https://github.com/ansible/awx/pull/7228)
|
|
||||||
- Official AWX container image builds now use a two-stage container build process that notably reduces the size of our published images (https://github.com/ansible/awx/pull/7017)
|
|
||||||
- Removed support for HipChat notifications ([EoL announcement](https://www.atlassian.com/partnerships/slack/faq#faq-98b17ca3-247f-423b-9a78-70a91681eff0)); all previously-created HipChat notification templates will be deleted due to this removal.
|
|
||||||
- Fixed a bug which broke AWX installations with oc version 4.3 (https://github.com/ansible/awx/pull/6948/)
|
|
||||||
- Fixed a performance issue that caused notable delay of stdout processing for playbooks run against large numbers of hosts (https://github.com/ansible/awx/issues/6991)
|
|
||||||
- Fixed a bug that caused CyberArk AIM credential plugin looks to hang forever in some environments (https://github.com/ansible/awx/issues/6986)
|
|
||||||
- Fixed a bug that caused ANY/ALL converage settings not to properly save when editing approval nodes in the UI (https://github.com/ansible/awx/issues/6998)
|
|
||||||
- Fixed a bug that broke support for the satellite6_group_prefix source variable (https://github.com/ansible/awx/issues/7031)
|
|
||||||
- Fixed a bug that prevented changes to workflow node convergence settings when approval nodes were in use (https://github.com/ansible/awx/issues/7063)
|
|
||||||
- Fixed a bug that caused notifications to fail on newer version of Mattermost (https://github.com/ansible/awx/issues/7264)
|
|
||||||
- Fixed a bug (by upgrading to 0.8.1 of the foreman collection) that prevented host_filters from working properly with Foreman-based inventory (https://github.com/ansible/awx/issues/7225)
|
|
||||||
- Fixed a bug that prevented the usage of the Conjur credential plugin with secrets that contain spaces (https://github.com/ansible/awx/issues/7191)
|
|
||||||
- Fixed a bug in awx-manage run_wsbroadcast --status in kubernetes (https://github.com/ansible/awx/pull/7009)
|
|
||||||
- Fixed a bug that broke notification toggles for system jobs in the UI (https://github.com/ansible/awx/pull/7042)
|
|
||||||
- Fixed a bug that broke local pip installs of awxkit (https://github.com/ansible/awx/issues/7107)
|
|
||||||
- Fixed a bug that prevented PagerDuty notifications from sending for workflow job template approvals (https://github.com/ansible/awx/issues/7094)
|
|
||||||
- Fixed a bug that broke external log aggregation support for URL paths that include the = character (such as the tokens for SumoLogic) (https://github.com/ansible/awx/issues/7139)
|
|
||||||
- Fixed a bug that prevented organization admins from removing labels from workflow job templates (https://github.com/ansible/awx/pull/7143)
|
|
||||||
|
|
||||||
## 11.2.0 (Apr 29, 2020)
|
|
||||||
|
|
||||||
- Inventory updates now use collection-based plugins by default (in Ansible 2.9+):
|
|
||||||
- amazon.aws.aws_ec2
|
|
||||||
- community.vmware.vmware_vm_inventory
|
|
||||||
- azure.azcollection.azure_rm
|
|
||||||
- google.cloud.gcp_compute
|
|
||||||
- theforeman.foreman.foreman
|
|
||||||
- openstack.cloud.openstack
|
|
||||||
- ovirt.ovirt_collection.ovirt
|
|
||||||
- awx.awx.tower
|
|
||||||
- Added support for Approle and LDAP/AD mechanisms to the Hashicorp Vault credential plugin (https://github.com/ansible/awx/issues/5076)
|
|
||||||
- Added Project (Domain Name) support for the OpenStack Keystone v3 API (https://github.com/ansible/awx/issues/6831)
|
|
||||||
- Added a new setting for raising log verbosity for rsyslogd (https://github.com/ansible/awx/pull/6818)
|
|
||||||
- Added the ability to monitor stdout in the CLI for running jobs and workflow jobs (https://github.com/ansible/awx/issues/6165)
|
|
||||||
- Fixed a bug which prevented the AWX CLI from properly installing with newer versions of pip (https://github.com/ansible/awx/issues/6870)
|
|
||||||
- Fixed a bug which broke AWX's external logging support when configured with HTTPS endpoints that utilize self-signed certificates (https://github.com/ansible/awx/issues/6851)
|
|
||||||
- Fixed a local docker installer bug that mistakenly attempted to upgrade PostgreSQL when an external pg_hostname is specified (https://github.com/ansible/awx/pull/5398)
|
|
||||||
- Fixed a race condition that caused task container crashes when pods are quickly brought down and back up (https://github.com/ansible/awx/issues/6750)
|
|
||||||
- Fixed a bug that caused 404 errors when attempting to view the second page of the workflow approvals view (https://github.com/ansible/awx/issues/6803)
|
|
||||||
- Fixed a bug that prevented the use of ANSIBLE_SSH_ARGS for ad-hoc-commands (https://github.com/ansible/awx/pull/6811)
|
|
||||||
- Fixed a bug that broke AWX installs/upgrades on Red Hat OpenShift (https://github.com/ansible/awx/issues/6791)
|
|
||||||
|
|
||||||
|
|
||||||
## 11.1.0 (Apr 22, 2020)
|
|
||||||
- Changed rsyslogd to persist queued events to disk (to prevent a risk of out-of-memory errors) (https://github.com/ansible/awx/issues/6746)
|
|
||||||
- Added the ability to configure the destination and maximum disk size of rsyslogd spool (in the event of a log aggregator outage) (https://github.com/ansible/awx/pull/6763)
|
|
||||||
- Added the ability to discover playbooks in project clones from symlinked directories (https://github.com/ansible/awx/pull/6773)
|
|
||||||
- Fixed a bug that caused certain log aggregator settings to break logging integration (https://github.com/ansible/awx/issues/6760)
|
|
||||||
- Fixed a bug that caused playbook execution in container groups to sometimes unexpectedly deadlock (https://github.com/ansible/awx/issues/6692)
|
|
||||||
- Improved stability of the new redis clustering implementation (https://github.com/ansible/awx/pull/6739 https://github.com/ansible/awx/pull/6720)
|
|
||||||
- Improved stability of the new rsyslogd-based logging implementation (https://github.com/ansible/awx/pull/6796)
|
|
||||||
|
|
||||||
## 11.0.0 (Apr 16, 2020)
|
|
||||||
- As of AWX 11.0.0, Kubernetes-based deployments use a Deployment rather than a StatefulSet.
|
|
||||||
- Reimplemented external logging support using rsyslogd to improve reliability and address a number of issues (https://github.com/ansible/awx/issues/5155)
|
|
||||||
- Changed activity stream logs to include summary fields for related objects (https://github.com/ansible/awx/issues/1761)
|
|
||||||
- Added code to more gracefully attempt to reconnect to redis if it restarts/becomes unavailable (https://github.com/ansible/awx/pull/6670)
|
|
||||||
- Fixed a bug that caused REFRESH_TOKEN_EXPIRE_SECONDS to not properly be respected for OAuth2.0 refresh tokens generated by AWX (https://github.com/ansible/awx/issues/6630)
|
|
||||||
- Fixed a bug that broke schedules containing RRULES with very old DTSTART dates (https://github.com/ansible/awx/pull/6550)
|
|
||||||
- Fixed a bug that broke installs on older versions of Ansible packaged with certain Linux distributions (https://github.com/ansible/awx/issues/5501)
|
|
||||||
- Fixed a bug that caused the activity stream to sometimes report the incorrect actor when associating user membership on SAML login (https://github.com/ansible/awx/pull/6525)
|
|
||||||
- Fixed a bug in AWX's Grafana notification support when annotation tags are omitted (https://github.com/ansible/awx/issues/6580)
|
|
||||||
- Fixed a bug that prevented some users from searching for Source Control credentials in the AWX user interface (https://github.com/ansible/awx/issues/6600)
|
|
||||||
- Fixed a bug that prevented disassociating orphaned users from credentials (https://github.com/ansible/awx/pull/6554)
|
|
||||||
- Updated Twisted to address CVE-2020-10108 and CVE-2020-10109.
|
|
||||||
|
|
||||||
## 10.0.0 (Mar 30, 2020)
|
|
||||||
- As of AWX 10.0.0, the official AWX CLI no longer supports Python 2 (it requires at least Python 3.6) (https://github.com/ansible/awx/pull/6327)
|
|
||||||
- AWX no longer relies on RabbitMQ; Redis is added as a new dependency (https://github.com/ansible/awx/issues/5443)
|
|
||||||
- Altered AWX's event tables to allow more than ~2 billion total events (https://github.com/ansible/awx/issues/6010)
|
|
||||||
- Improved the performance (time to execute, and memory consumption) of the periodic job cleanup system job (https://github.com/ansible/awx/pull/6166)
|
|
||||||
- Updated Job Templates so they now have an explicit Organization field (it is no longer inferred from the associated Project) (https://github.com/ansible/awx/issues/3903)
|
|
||||||
- Updated social-auth-core to address an upcoming GitHub API deprecation (https://github.com/ansible/awx/issues/5970)
|
|
||||||
- Updated to ansible-runner 1.4.6 to address various bugs.
|
|
||||||
- Updated Django to address CVE-2020-9402
|
|
||||||
- Updated pyyaml version to address CVE-2017-18342
|
|
||||||
- Fixed a bug which prevented the new `scm_branch` field from being used in custom notification templates (https://github.com/ansible/awx/issues/6258)
|
|
||||||
- Fixed a race condition that sometimes causes success/failure notifications to include an incomplete list of hosts (https://github.com/ansible/awx/pull/6290)
|
|
||||||
- Fixed a bug that can cause certain setting pages to lose unsaved form edits when a playbook is launched (https://github.com/ansible/awx/issues/5265)
|
|
||||||
- Fixed a bug that can prevent the "Use TLS/SSL" field from properly saving when editing email notification templates (https://github.com/ansible/awx/issues/6383)
|
|
||||||
- Fixed a race condition that sometimes broke event/stdout processing for jobs launched in container groups (https://github.com/ansible/awx/issues/6280)
|
|
||||||
|
|
||||||
## 9.3.0 (Mar 12, 2020)
|
|
||||||
- Added the ability to specify an OAuth2 token description in the AWX CLI (https://github.com/ansible/awx/issues/6122)
|
|
||||||
- Added support for K8S service account annotations to the installer (https://github.com/ansible/awx/pull/6007)
|
|
||||||
- Added support for K8S imagePullSecrets to the installer (https://github.com/ansible/awx/pull/5989)
|
|
||||||
- Launching jobs (and workflows) using the --monitor flag in the AWX CLI now returns a non-zero exit code on job failure (https://github.com/ansible/awx/issues/5920)
|
|
||||||
- Improved UI performance for various job views when many simultaneous users are logged into AWX (https://github.com/ansible/awx/issues/5883)
|
|
||||||
- Updated to the latest version of Django to address a few open CVEs (https://github.com/ansible/awx/pull/6080)
|
|
||||||
- Fixed a critical bug which can cause AWX to hang and stop launching playbooks after a periodic of time (https://github.com/ansible/awx/issues/5617)
|
|
||||||
- Fixed a bug which caused delays in project update stdout for certain large SCM clones (as of Ansible 2.9+) (https://github.com/ansible/awx/pull/6254)
|
|
||||||
- Fixed a bug which caused certain smart inventory filters to mistakenly return duplicate hosts (https://github.com/ansible/awx/pull/5972)
|
|
||||||
- Fixed an unclear server error when creating smart inventories with the AWX collection (https://github.com/ansible/awx/issues/6250)
|
|
||||||
- Fixed a bug that broke Grafana notification support (https://github.com/ansible/awx/issues/6137)
|
|
||||||
- Fixed a UI bug which prevent users with read access to an organization from editing credentials for that organization (https://github.com/ansible/awx/pull/6241)
|
|
||||||
- Fixed a bug which prevent workflow approval records from recording a `started` and `elapsed` date (https://github.com/ansible/awx/issues/6202)
|
|
||||||
- Fixed a bug which caused workflow nodes to have a confusing option for `verbosity` (https://github.com/ansible/awx/issues/6196)
|
|
||||||
- Fixed an RBAC bug which prevented projects and inventory schedules from being created by certain users in certain contexts (https://github.com/ansible/awx/issues/5717)
|
|
||||||
- Fixed a bug that caused `role_path` in a project's config to not be respected due to an error processing `/etc/ansible/ansible.cfg` (https://github.com/ansible/awx/pull/6038)
|
|
||||||
- Fixed a bug that broke inventory updates for installs with custom home directories for the awx user (https://github.com/ansible/awx/pull/6152)
|
|
||||||
- Fixed a bug that broke fact data collection when AWX encounters invalid/unexpected fact data (https://github.com/ansible/awx/issues/5935)
|
|
||||||
|
|
||||||
|
|
||||||
## 9.2.0 (Feb 12, 2020)
|
|
||||||
- Added the ability to configure the convergence behavior of workflow nodes https://github.com/ansible/awx/issues/3054
|
|
||||||
- AWX now allows for a configurable global limit for fork count (per-job run). The default maximum is 200. https://github.com/ansible/awx/pull/5604
|
|
||||||
- Added the ability to specify AZURE_PUBLIC_CLOUD (for e.g., Azure Government KeyVault support) for the Azure credential plugin https://github.com/ansible/awx/issues/5138
|
|
||||||
- Added support for several additional parameters for Satellite dynamic inventory https://github.com/ansible/awx/pull/5598
|
|
||||||
- Added a new field to jobs for tracking the date/time a job is cancelled https://github.com/ansible/awx/pull/5610
|
|
||||||
- Made a series of additional optimizations to the callback receiver to further improve stdout write speed for running playbooks https://github.com/ansible/awx/pull/5677 https://github.com/ansible/awx/pull/5739
|
|
||||||
- Updated AWX to be compatible with Helm 3.x (https://github.com/ansible/awx/pull/5776)
|
|
||||||
- Optimized AWX's job dependency/scheduling code to drastically improve processing time in scenarios where there are many pending jobs scheduled simultaneously https://github.com/ansible/awx/issues/5154
|
|
||||||
- Fixed a bug which could cause SCM authentication details (basic auth passwords) to be reported to external loggers in certain failure scenarios (e.g., when a git clone fails and ansible itself prints an error message to stdout) https://github.com/ansible/awx/pull/5812
|
|
||||||
- Fixed a k8s installer bug that caused installs to fail in certain situations https://github.com/ansible/awx/issues/5574
|
|
||||||
- Fixed a number of issues that caused analytics gathering and reporting to run more often than necessary https://github.com/ansible/awx/pull/5721
|
|
||||||
- Fixed a bug in the AWX CLI that prevented JSON-type settings from saving properly https://github.com/ansible/awx/issues/5528
|
|
||||||
- Improved support for fetching custom virtualenv dependencies when AWX is installed behind a proxy https://github.com/ansible/awx/pull/5805
|
|
||||||
- Updated the bundled version of openstacksdk to address a known issue https://github.com/ansible/awx/issues/5821
|
|
||||||
- Updated the bundled vmware_inventory plugin to the latest version to address a bug https://github.com/ansible/awx/pull/5668
|
|
||||||
- Fixed a bug that can cause inventory updates to fail to properly save their output when run within a workflow https://github.com/ansible/awx/pull/5666
|
|
||||||
- Removed a number of pre-computed fields from the Host and Group models to improve AWX performance. As part of this change, inventory group UIs throughout the interface no longer display status icons https://github.com/ansible/awx/pull/5448
|
|
||||||
|
|
||||||
## 9.1.1 (Jan 14, 2020)
|
|
||||||
|
|
||||||
- Fixed a bug that caused database migrations on Kubernetes installs to hang https://github.com/ansible/awx/pull/5579
|
|
||||||
- Upgraded Python-level app dependencies in AWX virtual environment https://github.com/ansible/awx/pull/5407
|
|
||||||
- Running jobs no longer block associated inventory updates https://github.com/ansible/awx/pull/5519
|
|
||||||
- Fixed invalid_response SAML error https://github.com/ansible/awx/pull/5577
|
|
||||||
- Optimized the callback receiver to drastically improve the write speed of stdout for parallel jobs (https://github.com/ansible/awx/pull/5618)
|
|
||||||
|
|
||||||
## 9.1.0 (Dec 17, 2019)
|
|
||||||
- Added a command to generate a new SECRET_KEY and rekey the secrets in the database
|
|
||||||
- Removed project update locking when jobs using it are running
|
|
||||||
- Fixed slow queries for /api/v2/instances and /api/v2/instance_groups when smart inventories are used
|
|
||||||
- Fixed a partial password disclosure when special characters existed in the RabbitMQ password (CVE-2019-19342)
|
|
||||||
- Fixed hang in error handling for source control checkouts
|
|
||||||
- Fixed an error on subsequent job runs that override the branch of a project on an instance that did not have a prior project checkout
|
|
||||||
- Fixed an issue where jobs launched in isolated or container groups would incorrectly timeout
|
|
||||||
- Fixed an incorrect link to instance groups documentation in the user interface
|
|
||||||
- Fixed editing of inventory on Workflow templates
|
|
||||||
- Fixed multiple issues with OAuth2 token cleanup system jobs
|
|
||||||
- Fixed a bug that broke email notifications for workflow approval/deny https://github.com/ansible/awx/issues/5401
|
|
||||||
- Updated SAML implementation to automatically login if authorization already exists
|
|
||||||
- Updated AngularJS to 1.7.9 for CVE-2019-10768
|
|
||||||
|
|
||||||
## 9.0.1 (Nov 4, 2019)
|
|
||||||
|
|
||||||
- Fixed a bug in the installer that broke certain types of k8s installs https://github.com/ansible/awx/issues/5205
|
|
||||||
|
|
||||||
## 9.0.0 (Oct 31, 2019)
|
|
||||||
|
|
||||||
- Updated AWX images to use centos:8 as the parent image.
|
|
||||||
- Updated to ansible-runner 1.4.4 to address various bugs.
|
|
||||||
- Added oc and kubectl to the AWX images to support new container-based execution introduced in 8.0.0.
|
|
||||||
- Added some optimizations to speed up the deletion of large Inventory Groups.
|
|
||||||
- Fixed a bug that broke webhook launches for Job Templates that define a survey (https://github.com/ansible/awx/issues/5062).
|
|
||||||
- Fixed a bug in the CLI which incorrectly parsed launch time arguments for `awx job_templates launch` and `awx workflow_job_templates launch` (https://github.com/ansible/awx/issues/5093).
|
|
||||||
- Fixed a bug that caused inventory updates using "sourced from a project" to stop working (https://github.com/ansible/awx/issues/4750).
|
|
||||||
- Fixed a bug that caused Slack notifications to sometimes show the wrong bot avatar (https://github.com/ansible/awx/pull/5125).
|
|
||||||
- Fixed a bug that prevented the use of digits in AWX's URL settings (https://github.com/ansible/awx/issues/5081).
|
|
||||||
|
|
||||||
## 8.0.0 (Oct 21, 2019)
|
|
||||||
|
|
||||||
- The Ansible Tower Ansible modules have been migrated to a new official Ansible AWX collection: https://galaxy.ansible.com/awx/AWX
|
|
||||||
Please note that this functionality is only supported in Ansible 2.9+
|
|
||||||
- AWX now supports the ability to launch jobs from external webhooks (GitHub and GitLab integration are supported).
|
|
||||||
- AWX now supports Container Groups, a new feature that allows you to schedule and run playbooks on single-use kubernetes pods on-demand.
|
|
||||||
- AWX now supports sending notifications when Workflow steps are approved, denied, or time out.
|
|
||||||
- AWX now records the user who approved or denied Workflow steps.
|
|
||||||
- AWX now supports fetching Ansible Collections from private galaxy servers.
|
|
||||||
- AWX now checks the user's ansible.cfg for paths where role/collections may live when running project updates.
|
|
||||||
- AWX now uses PostgreSQL 10 by default.
|
|
||||||
- AWX now warns more loudly about underlying AMQP connectivity issues (https://github.com/ansible/awx/pull/4857).
|
|
||||||
- Added a few optimizations to drastically improve dashboard performance for larger AWX installs (installs with several hundred thousand jobs or more).
|
|
||||||
- Updated to the latest version of Ansible's VMWare inventory script (which adds support for vmware_guest_facts).
|
|
||||||
- Deprecated /api/v2/inventory_scripts/ (this endpoint - and the Custom Inventory Script feature - will be removed in a future release of AWX).
|
|
||||||
- Fixed a bug which prevented Organization Admins from removing users from their own Organization (https://github.com/ansible/awx/issues/2979)
|
|
||||||
- Fixed a bug which sometimes caused cluster nodes to fail to re-join with a cryptic error, "No instance found with the current cluster host id" (https://github.com/ansible/awx/issues/4294)
|
|
||||||
- Fixed a bug that prevented the use of launch-time passphrases when using credential plugins (https://github.com/ansible/awx/pull/4807)
|
|
||||||
- Fixed a bug that caused notifications assigned at the Organization level not to take effect for Workflows in that Organization (https://github.com/ansible/awx/issues/4712)
|
|
||||||
- Fixed a bug which caused a notable amount of CPU overhead on RabbitMQ health checks (https://github.com/ansible/awx/pull/5009)
|
|
||||||
- Fixed a bug which sometimes caused the <return> key to stop functioning in <textarea> elements (https://github.com/ansible/awx/issues/4192)
|
|
||||||
- Fixed a bug which caused request contention when the same OAuth2.0 token was used in multiple simultaneous requests (https://github.com/ansible/awx/issues/4694)
|
|
||||||
- Fixed a bug related to parsing multiple choice survey options (https://github.com/ansible/awx/issues/4452).
|
|
||||||
- Fixed a bug that caused single-sign-on icons on the login page to fail to render in certain Windows browsers (https://github.com/ansible/awx/issues/3924)
|
|
||||||
- Fixed a number of bugs that caused certain OAuth2 settings to not be properly respected, such as REFRESH_TOKEN_EXPIRE_SECONDS.
|
|
||||||
- Fixed a number of bugs in the AWX CLI, including a bug which sometimes caused long lines of stdout output to be unexpectedly truncated.
|
|
||||||
- Fixed a number of bugs on the job details UI which sometimes caused auto-scrolling stdout to become stuck.
|
|
||||||
- Fixed a bug which caused LDAP authentication to fail if the TLD of the server URL contained digits (https://github.com/ansible/awx/issues/3646)
|
|
||||||
- Fixed a bug which broke HashiCorp Vault integration on older versions of HashiCorp Vault.
|
|
||||||
|
|
||||||
## 7.0.0 (Sept 4, 2019)
|
|
||||||
|
|
||||||
- AWX now detects and installs Ansible Collections defined in your project (note - this feature only works in Ansible 2.9+) (https://github.com/ansible/awx/issues/2534)
|
|
||||||
- AWX now includes an official command line client. Keep an eye out for a follow-up email on this mailing list for information on how to install it and try it out.
|
|
||||||
- Added the ability to provide a specific SCM branch on jobs (https://github.com/ansible/awx/issues/282)
|
|
||||||
- Added support for Workflow Approval Nodes, a new feature which allows you to add "pause and wait for approval" steps into your workflows (https://github.com/ansible/awx/issues/1206)
|
|
||||||
- Added the ability to specify a specific HTTP method for webhook notifications (POST vs PUT) (https://github.com/ansible/awx/pull/4124)
|
|
||||||
- Added the ability to specify a username and password for HTTP Basic Authorization for webhook notifications (https://github.com/ansible/awx/pull/4124)
|
|
||||||
- Added support for customizing the text content of notifications (https://github.com/ansible/awx/issues/79)
|
|
||||||
- Added the ability to enable and disable hosts in dynamic inventory (https://github.com/ansible/awx/pull/4420)
|
|
||||||
- Added the description (if any) to the Job Template list (https://github.com/ansible/awx/issues/4359)
|
|
||||||
- Added new metrics for instance hostnames and pending jobs to the /api/v2/metrics/ endpoint (https://github.com/ansible/awx/pull/4375)
|
|
||||||
- Changed AWX's on/off toggle buttons to a non-text based style to simplify internationalization (https://github.com/ansible/awx/pull/4425)
|
|
||||||
- Events emitted by ansible for adhoc commands are now sent to the external log aggregrator (https://github.com/ansible/awx/issues/4545)
|
|
||||||
- Fixed a bug which allowed a user to make an organization credential in another organization without permissions to that organization (https://github.com/ansible/awx/pull/4483)
|
|
||||||
- Fixed a bug that caused `extra_vars` on workflows to break when edited (https://github.com/ansible/awx/issues/4293)
|
|
||||||
- Fixed a slow SQL query that caused performance issues when large numbers of groups exist (https://github.com/ansible/awx/issues/4461)
|
|
||||||
- Fixed a few minor bugs in survey field validation (https://github.com/ansible/awx/pull/4509) (https://github.com/ansible/awx/pull/4479)
|
|
||||||
- Fixed a bug that sometimes resulted in orphaned `ansible_runner_pi` directories in `/tmp` after playbook execution (https://github.com/ansible/awx/pull/4409)
|
|
||||||
- Fixed a bug that caused the `is_system_auditor` flag in LDAP configuration to not work (https://github.com/ansible/awx/pull/4396)
|
|
||||||
- Fixed a bug which caused schedules to disappear from the UI when toggled off (https://github.com/ansible/awx/pull/4378)
|
|
||||||
- Fixed a bug that sometimes caused stdout content to contain extraneous blank lines in newer versions of Ansible (https://github.com/ansible/awx/pull/4391)
|
|
||||||
- Updated to the latest Django security release, 2.2.4 (https://github.com/ansible/awx/pull/4410) (https://www.djangoproject.com/weblog/2019/aug/01/security-releases/)
|
|
||||||
- Updated the default version of git to a version that includes support for x509 certificates (https://github.com/ansible/awx/issues/4362)
|
|
||||||
- Removed the deprecated `credential` field from `/api/v2/workflow_job_templates/N/` (as part of the `/api/v1/` removal in prior AWX versions - https://github.com/ansible/awx/pull/4490).
|
|
||||||
|
|
||||||
## 6.1.0 (Jul 18, 2019)
|
|
||||||
|
|
||||||
- Updated AWX to use Django 2.2.2.
|
|
||||||
- Updated the provided openstacksdk version to support new functionality (such as Nova scheduler_hints)
|
|
||||||
- Added the ability to specify a custom cacert for the HashiCorp Vault credential plugin
|
|
||||||
- Fixed a number of bugs related to path lookups for the HashiCorp Vault credential plugin
|
|
||||||
- Fixed a bug which prevented signed SSH certificates from working, including the HashiCorp Vault Signed SSH backend
|
|
||||||
- Fixed a bug which prevented custom logos from displaying on the login page (as a result of a new Content Security Policy in 6.0.0)
|
|
||||||
- Fixed a bug which broke websocket connectivity in Apple Safari (as a result of a new Content Security Policy in 6.0.0)
|
|
||||||
- Fixed a bug on the job output page that occasionally caused the "up" and "down" buttons to not load additional output
|
|
||||||
- Fixed a bug on the job output page that caused quoted task names to display incorrectly
|
|
||||||
|
|
||||||
## 6.0.0 (Jul 1, 2019)
|
|
||||||
|
|
||||||
- Removed support for "Any" notification templates and their API endpoints e.g., /api/v2/job_templates/N/notification_templates/any/ (https://github.com/ansible/awx/issues/4022)
|
|
||||||
- Fixed a bug which prevented credentials from properly being applied to inventory sources (https://github.com/ansible/awx/issues/4059)
|
|
||||||
- Fixed a bug which can cause the task dispatcher to hang indefinitely when external logging support (e.g., Splunk, Logstash) is enabled (https://github.com/ansible/awx/issues/4181)
|
|
||||||
- Fixed a bug which causes slow stdout display when running jobs against smart inventories. (https://github.com/ansible/awx/issues/3106)
|
|
||||||
- Fixed a bug that caused SSL verification flags to fail to be respected for LDAP authentication in certain environments. (https://github.com/ansible/awx/pull/4190)
|
|
||||||
- Added a simple Content Security Policy (https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP) to restrict access to third-party resources in the browser. (https://github.com/ansible/awx/pull/4167)
|
|
||||||
- Updated ovirt4 library dependencies to work with newer versions of oVirt (https://github.com/ansible/awx/issues/4138)
|
|
||||||
|
|
||||||
## 5.0.0 (Jun 21, 2019)
|
|
||||||
|
|
||||||
- Bump Django Rest Framework from 3.7.7 to 3.9.4
|
|
||||||
- Bump setuptools / pip dependencies
|
|
||||||
- Fixed bug where Recent Notification list would not appear
|
|
||||||
- Added notifications on job start
|
|
||||||
- Default to Ansible 2.8
|
|
||||||
|
|||||||
@@ -6,21 +6,21 @@ Have questions about this document or anything not covered here? Come chat with
|
|||||||
|
|
||||||
## Table of contents
|
## Table of contents
|
||||||
|
|
||||||
* [Things to know prior to submitting code](#things-to-know-prior-to-submitting-code)
|
- [Things to know prior to submitting code](#things-to-know-prior-to-submitting-code)
|
||||||
* [Setting up your development environment](#setting-up-your-development-environment)
|
- [Setting up your development environment](#setting-up-your-development-environment)
|
||||||
* [Prerequisites](#prerequisites)
|
- [Prerequisites](#prerequisites)
|
||||||
* [Docker](#docker)
|
- [Docker](#docker)
|
||||||
* [Docker compose](#docker-compose)
|
- [Docker compose](#docker-compose)
|
||||||
* [Frontend Development](#frontend-development)
|
- [Frontend Development](#frontend-development)
|
||||||
* [Build and Run the Development Environment](#build-and-run-the-development-environment)
|
- [Build and Run the Development Environment](#build-and-run-the-development-environment)
|
||||||
* [Fork and clone the AWX repo](#fork-and-clone-the-awx-repo)
|
- [Fork and clone the AWX repo](#fork-and-clone-the-awx-repo)
|
||||||
* [Building API Documentation](#building-api-documentation)
|
- [Building API Documentation](#building-api-documentation)
|
||||||
* [Accessing the AWX web interface](#accessing-the-awx-web-interface)
|
- [Accessing the AWX web interface](#accessing-the-awx-web-interface)
|
||||||
* [Purging containers and images](#purging-containers-and-images)
|
- [Purging containers and images](#purging-containers-and-images)
|
||||||
* [What should I work on?](#what-should-i-work-on)
|
- [What should I work on?](#what-should-i-work-on)
|
||||||
* [Submitting Pull Requests](#submitting-pull-requests)
|
- [Submitting Pull Requests](#submitting-pull-requests)
|
||||||
* [PR Checks run by Zuul](#pr-checks-run-by-zuul)
|
- [PR Checks run by Zuul](#pr-checks-run-by-zuul)
|
||||||
* [Reporting Issues](#reporting-issues)
|
- [Reporting Issues](#reporting-issues)
|
||||||
|
|
||||||
## Things to know prior to submitting code
|
## Things to know prior to submitting code
|
||||||
|
|
||||||
@@ -46,15 +46,15 @@ respectively.
|
|||||||
|
|
||||||
For Linux platforms, refer to the following from Docker:
|
For Linux platforms, refer to the following from Docker:
|
||||||
|
|
||||||
* **Fedora** - https://docs.docker.com/engine/installation/linux/docker-ce/fedora/
|
- **Fedora** - https://docs.docker.com/engine/installation/linux/docker-ce/fedora/
|
||||||
|
|
||||||
* **CentOS** - https://docs.docker.com/engine/installation/linux/docker-ce/centos/
|
- **CentOS** - https://docs.docker.com/engine/installation/linux/docker-ce/centos/
|
||||||
|
|
||||||
* **Ubuntu** - https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
- **Ubuntu** - https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||||
|
|
||||||
* **Debian** - https://docs.docker.com/engine/installation/linux/docker-ce/debian/
|
- **Debian** - https://docs.docker.com/engine/installation/linux/docker-ce/debian/
|
||||||
|
|
||||||
* **Arch** - https://wiki.archlinux.org/index.php/Docker
|
- **Arch** - https://wiki.archlinux.org/index.php/Docker
|
||||||
|
|
||||||
#### Docker Compose
|
#### Docker Compose
|
||||||
|
|
||||||
@@ -66,7 +66,7 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
|
|||||||
|
|
||||||
#### Frontend Development
|
#### Frontend Development
|
||||||
|
|
||||||
See [the ui development documentation](awx/ui_next/CONTRIBUTING.md).
|
See [the ui development documentation](awx/ui/CONTRIBUTING.md).
|
||||||
|
|
||||||
#### Fork and clone the AWX repo
|
#### Fork and clone the AWX repo
|
||||||
|
|
||||||
@@ -74,19 +74,19 @@ If you have not done so already, you'll need to fork the AWX repo on GitHub. For
|
|||||||
|
|
||||||
### Build and Run the Development Environment
|
### Build and Run the Development Environment
|
||||||
|
|
||||||
See the [README.md](./tools/docker-compose/README.md) for docs on how to build the awx_devel image and run the development environment.
|
See the [README.md](./tools/docker-compose/README.md) for docs on how to build the awx_devel image and run the development environment.
|
||||||
|
|
||||||
### Building API Documentation
|
### Building API Documentation
|
||||||
|
|
||||||
AWX includes support for building [Swagger/OpenAPI
|
AWX includes support for building [Swagger/OpenAPI
|
||||||
documentation](https://swagger.io). To build the documentation locally, run:
|
documentation](https://swagger.io). To build the documentation locally, run:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
(container)/awx_devel$ make swagger
|
(container)/awx_devel$ make swagger
|
||||||
```
|
```
|
||||||
|
|
||||||
This will write a file named `swagger.json` that contains the API specification
|
This will write a file named `swagger.json` that contains the API specification
|
||||||
in OpenAPI format. A variety of online tools are available for translating
|
in OpenAPI format. A variety of online tools are available for translating
|
||||||
this data into more consumable formats (such as HTML). http://editor.swagger.io
|
this data into more consumable formats (such as HTML). http://editor.swagger.io
|
||||||
is an example of one such service.
|
is an example of one such service.
|
||||||
|
|
||||||
@@ -126,15 +126,15 @@ Fixes and Features for AWX will go through the Github pull request process. Subm
|
|||||||
|
|
||||||
Here are a few things you can do to help the visibility of your change, and increase the likelihood that it will be accepted:
|
Here are a few things you can do to help the visibility of your change, and increase the likelihood that it will be accepted:
|
||||||
|
|
||||||
* No issues when running linters/code checkers
|
- No issues when running linters/code checkers
|
||||||
* Python: black: `(container)/awx_devel$ make black`
|
- Python: black: `(container)/awx_devel$ make black`
|
||||||
* Javascript: JsHint: `(container)/awx_devel$ make jshint`
|
- Javascript: `(container)/awx_devel$ make ui-lint`
|
||||||
* No issues from unit tests
|
- No issues from unit tests
|
||||||
* Python: py.test: `(container)/awx_devel$ make test`
|
- Python: py.test: `(container)/awx_devel$ make test`
|
||||||
* JavaScript: Jasmine: `(container)/awx_devel$ make ui-test-ci`
|
- JavaScript: `(container)/awx_devel$ make ui-test`
|
||||||
* Write tests for new functionality, update/add tests for bug fixes
|
- Write tests for new functionality, update/add tests for bug fixes
|
||||||
* Make the smallest change possible
|
- Make the smallest change possible
|
||||||
* Write good commit messages. See [How to write a Git commit message](https://chris.beams.io/posts/git-commit/).
|
- Write good commit messages. See [How to write a Git commit message](https://chris.beams.io/posts/git-commit/).
|
||||||
|
|
||||||
It's generally a good idea to discuss features with us first by engaging us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
It's generally a good idea to discuss features with us first by engaging us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||||
|
|
||||||
@@ -146,21 +146,24 @@ Sometimes it might take us a while to fully review your PR. We try to keep the `
|
|||||||
All submitted PRs will have the linter and unit tests run against them via Zuul, and the status reported in the PR.
|
All submitted PRs will have the linter and unit tests run against them via Zuul, and the status reported in the PR.
|
||||||
|
|
||||||
## PR Checks run by Zuul
|
## PR Checks run by Zuul
|
||||||
|
|
||||||
Zuul jobs for awx are defined in the [zuul-jobs](https://github.com/ansible/zuul-jobs) repo.
|
Zuul jobs for awx are defined in the [zuul-jobs](https://github.com/ansible/zuul-jobs) repo.
|
||||||
|
|
||||||
Zuul runs the following checks that must pass:
|
Zuul runs the following checks that must pass:
|
||||||
1) `tox-awx-api-lint`
|
|
||||||
2) `tox-awx-ui-lint`
|
1. `tox-awx-api-lint`
|
||||||
3) `tox-awx-api`
|
2. `tox-awx-ui-lint`
|
||||||
4) `tox-awx-ui`
|
3. `tox-awx-api`
|
||||||
5) `tox-awx-swagger`
|
4. `tox-awx-ui`
|
||||||
|
5. `tox-awx-swagger`
|
||||||
|
|
||||||
Zuul runs the following checks that are non-voting (can not pass but serve to inform PR reviewers):
|
Zuul runs the following checks that are non-voting (can not pass but serve to inform PR reviewers):
|
||||||
1) `tox-awx-detect-schema-change`
|
|
||||||
This check generates the schema and diffs it against a reference copy of the `devel` version of the schema.
|
1. `tox-awx-detect-schema-change`
|
||||||
Reviewers should inspect the `job-output.txt.gz` related to the check if their is a failure (grep for `diff -u -b` to find beginning of diff).
|
This check generates the schema and diffs it against a reference copy of the `devel` version of the schema.
|
||||||
If the schema change is expected and makes sense in relation to the changes made by the PR, then you are good to go!
|
Reviewers should inspect the `job-output.txt.gz` related to the check if their is a failure (grep for `diff -u -b` to find beginning of diff).
|
||||||
If not, the schema changes should be fixed, but this decision must be enforced by reviewers.
|
If the schema change is expected and makes sense in relation to the changes made by the PR, then you are good to go!
|
||||||
|
If not, the schema changes should be fixed, but this decision must be enforced by reviewers.
|
||||||
|
|
||||||
## Reporting Issues
|
## Reporting Issues
|
||||||
|
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ recursive-include awx *.mo
|
|||||||
recursive-include awx/static *
|
recursive-include awx/static *
|
||||||
recursive-include awx/templates *.html
|
recursive-include awx/templates *.html
|
||||||
recursive-include awx/api/templates *.md *.html
|
recursive-include awx/api/templates *.md *.html
|
||||||
recursive-include awx/ui_next/build *.html
|
recursive-include awx/ui/build *.html
|
||||||
recursive-include awx/ui_next/build *
|
recursive-include awx/ui/build *
|
||||||
recursive-include awx/playbooks *.yml
|
recursive-include awx/playbooks *.yml
|
||||||
recursive-include awx/lib/site-packages *
|
recursive-include awx/lib/site-packages *
|
||||||
recursive-include awx/plugins *.ps1
|
recursive-include awx/plugins *.ps1
|
||||||
|
|||||||
160
Makefile
160
Makefile
@@ -1,26 +1,19 @@
|
|||||||
PYTHON ?= python3.8
|
PYTHON ?= python3.8
|
||||||
PYTHON_VERSION = $(shell $(PYTHON) -c "from distutils.sysconfig import get_python_version; print(get_python_version())")
|
PYTHON_VERSION = $(shell $(PYTHON) -c "from distutils.sysconfig import get_python_version; print(get_python_version())")
|
||||||
SITELIB=$(shell $(PYTHON) -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")
|
|
||||||
OFFICIAL ?= no
|
OFFICIAL ?= no
|
||||||
PACKER ?= packer
|
|
||||||
PACKER_BUILD_OPTS ?= -var 'official=$(OFFICIAL)' -var 'aw_repo_url=$(AW_REPO_URL)'
|
|
||||||
NODE ?= node
|
NODE ?= node
|
||||||
NPM_BIN ?= npm
|
NPM_BIN ?= npm
|
||||||
CHROMIUM_BIN=/tmp/chrome-linux/chrome
|
CHROMIUM_BIN=/tmp/chrome-linux/chrome
|
||||||
DEPS_SCRIPT ?= packaging/bundle/deps.py
|
|
||||||
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||||
MANAGEMENT_COMMAND ?= awx-manage
|
MANAGEMENT_COMMAND ?= awx-manage
|
||||||
IMAGE_REPOSITORY_AUTH ?=
|
|
||||||
IMAGE_REPOSITORY_BASE ?= https://gcr.io
|
|
||||||
VERSION := $(shell cat VERSION)
|
VERSION := $(shell cat VERSION)
|
||||||
|
|
||||||
# NOTE: This defaults the container image version to the branch that's active
|
# NOTE: This defaults the container image version to the branch that's active
|
||||||
COMPOSE_TAG ?= $(GIT_BRANCH)
|
COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||||
COMPOSE_HOST ?= $(shell hostname)
|
COMPOSE_HOST ?= $(shell hostname)
|
||||||
|
MAIN_NODE_TYPE ?= hybrid
|
||||||
|
|
||||||
VENV_BASE ?= /var/lib/awx/venv/
|
VENV_BASE ?= /var/lib/awx/venv
|
||||||
SCL_PREFIX ?=
|
|
||||||
CELERY_SCHEDULE_FILE ?= /var/lib/awx/beat.db
|
|
||||||
|
|
||||||
DEV_DOCKER_TAG_BASE ?= quay.io/awx
|
DEV_DOCKER_TAG_BASE ?= quay.io/awx
|
||||||
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||||
@@ -32,30 +25,13 @@ SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio
|
|||||||
# to install the actual requirements
|
# to install the actual requirements
|
||||||
VENV_BOOTSTRAP ?= pip==19.3.1 setuptools==41.6.0 wheel==0.36.2
|
VENV_BOOTSTRAP ?= pip==19.3.1 setuptools==41.6.0 wheel==0.36.2
|
||||||
|
|
||||||
# Determine appropriate shasum command
|
|
||||||
UNAME_S := $(shell uname -s)
|
|
||||||
ifeq ($(UNAME_S),Linux)
|
|
||||||
SHASUM_BIN ?= sha256sum
|
|
||||||
endif
|
|
||||||
ifeq ($(UNAME_S),Darwin)
|
|
||||||
SHASUM_BIN ?= shasum -a 256
|
|
||||||
endif
|
|
||||||
|
|
||||||
# Get the branch information from git
|
|
||||||
GIT_DATE := $(shell git log -n 1 --format="%ai")
|
|
||||||
DATE := $(shell date -u +%Y%m%d%H%M)
|
|
||||||
|
|
||||||
NAME ?= awx
|
NAME ?= awx
|
||||||
GIT_REMOTE_URL = $(shell git config --get remote.origin.url)
|
|
||||||
|
|
||||||
# TAR build parameters
|
# TAR build parameters
|
||||||
SDIST_TAR_NAME=$(NAME)-$(VERSION)
|
SDIST_TAR_NAME=$(NAME)-$(VERSION)
|
||||||
WHEEL_NAME=$(NAME)-$(VERSION)
|
|
||||||
|
|
||||||
SDIST_COMMAND ?= sdist
|
SDIST_COMMAND ?= sdist
|
||||||
WHEEL_COMMAND ?= bdist_wheel
|
|
||||||
SDIST_TAR_FILE ?= $(SDIST_TAR_NAME).tar.gz
|
SDIST_TAR_FILE ?= $(SDIST_TAR_NAME).tar.gz
|
||||||
WHEEL_FILE ?= $(WHEEL_NAME)-py2-none-any.whl
|
|
||||||
|
|
||||||
I18N_FLAG_FILE = .i18n_built
|
I18N_FLAG_FILE = .i18n_built
|
||||||
|
|
||||||
@@ -83,7 +59,7 @@ clean-schema:
|
|||||||
|
|
||||||
clean-languages:
|
clean-languages:
|
||||||
rm -f $(I18N_FLAG_FILE)
|
rm -f $(I18N_FLAG_FILE)
|
||||||
find . -type f -regex ".*\.mo$$" -delete
|
find ./awx/locale/ -type f -regex ".*\.mo$" -delete
|
||||||
|
|
||||||
# Remove temporary build files, compiled Python files.
|
# Remove temporary build files, compiled Python files.
|
||||||
clean: clean-ui clean-api clean-awxkit clean-dist
|
clean: clean-ui clean-api clean-awxkit clean-dist
|
||||||
@@ -172,8 +148,17 @@ init:
|
|||||||
if [ "$(VENV_BASE)" ]; then \
|
if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
$(MANAGEMENT_COMMAND) provision_instance --hostname=$(COMPOSE_HOST); \
|
$(MANAGEMENT_COMMAND) provision_instance --hostname=$(COMPOSE_HOST) --node_type=$(MAIN_NODE_TYPE); \
|
||||||
$(MANAGEMENT_COMMAND) register_queue --queuename=tower --instance_percent=100;
|
$(MANAGEMENT_COMMAND) register_queue --queuename=controlplane --instance_percent=100;\
|
||||||
|
$(MANAGEMENT_COMMAND) register_queue --queuename=default --instance_percent=100;
|
||||||
|
if [ ! -f /etc/receptor/certs/awx.key ]; then \
|
||||||
|
rm -f /etc/receptor/certs/*; \
|
||||||
|
receptor --cert-init commonname="AWX Test CA" bits=2048 outcert=/etc/receptor/certs/ca.crt outkey=/etc/receptor/certs/ca.key; \
|
||||||
|
for node in $(RECEPTOR_MUTUAL_TLS); do \
|
||||||
|
receptor --cert-makereq bits=2048 commonname="$$node test cert" dnsname=$$node nodeid=$$node outreq=/etc/receptor/certs/$$node.csr outkey=/etc/receptor/certs/$$node.key; \
|
||||||
|
receptor --cert-signreq req=/etc/receptor/certs/$$node.csr cacert=/etc/receptor/certs/ca.crt cakey=/etc/receptor/certs/ca.key outcert=/etc/receptor/certs/$$node.crt verify=yes; \
|
||||||
|
done; \
|
||||||
|
fi
|
||||||
|
|
||||||
# Refresh development environment after pulling new code.
|
# Refresh development environment after pulling new code.
|
||||||
refresh: clean requirements_dev version_file develop migrate
|
refresh: clean requirements_dev version_file develop migrate
|
||||||
@@ -288,6 +273,11 @@ swagger: reports
|
|||||||
|
|
||||||
check: black
|
check: black
|
||||||
|
|
||||||
|
api-lint:
|
||||||
|
BLACK_ARGS="--check" make black
|
||||||
|
flake8 awx
|
||||||
|
yamllint -s .
|
||||||
|
|
||||||
awx-link:
|
awx-link:
|
||||||
[ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/setup.py egg_info_dev
|
[ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/setup.py egg_info_dev
|
||||||
cp -f /tmp/awx.egg-link /var/lib/awx/venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.egg-link
|
cp -f /tmp/awx.egg-link /var/lib/awx/venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.egg-link
|
||||||
@@ -315,7 +305,7 @@ test_collection:
|
|||||||
if [ "$(VENV_BASE)" ]; then \
|
if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi && \
|
fi && \
|
||||||
pip install ansible && \
|
pip install ansible-core && \
|
||||||
py.test $(COLLECTION_TEST_DIRS) -v
|
py.test $(COLLECTION_TEST_DIRS) -v
|
||||||
# The python path needs to be modified so that the tests can find Ansible within the container
|
# The python path needs to be modified so that the tests can find Ansible within the container
|
||||||
# First we will use anything expility set as PYTHONPATH
|
# First we will use anything expility set as PYTHONPATH
|
||||||
@@ -378,45 +368,49 @@ bulk_data:
|
|||||||
# UI TASKS
|
# UI TASKS
|
||||||
# --------------------------------------
|
# --------------------------------------
|
||||||
|
|
||||||
UI_BUILD_FLAG_FILE = awx/ui_next/.ui-built
|
UI_BUILD_FLAG_FILE = awx/ui/.ui-built
|
||||||
|
|
||||||
clean-ui:
|
clean-ui:
|
||||||
rm -rf node_modules
|
rm -rf node_modules
|
||||||
rm -rf awx/ui_next/node_modules
|
rm -rf awx/ui/node_modules
|
||||||
rm -rf awx/ui_next/build
|
rm -rf awx/ui/build
|
||||||
rm -rf awx/ui_next/src/locales/_build
|
rm -rf awx/ui/src/locales/_build
|
||||||
rm -rf $(UI_BUILD_FLAG_FILE)
|
rm -rf $(UI_BUILD_FLAG_FILE)
|
||||||
|
|
||||||
awx/ui_next/node_modules:
|
awx/ui/node_modules:
|
||||||
NODE_OPTIONS=--max-old-space-size=4096 $(NPM_BIN) --prefix awx/ui_next --loglevel warn ci
|
NODE_OPTIONS=--max-old-space-size=4096 $(NPM_BIN) --prefix awx/ui --loglevel warn ci
|
||||||
|
|
||||||
$(UI_BUILD_FLAG_FILE):
|
$(UI_BUILD_FLAG_FILE):
|
||||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run compile-strings
|
$(PYTHON) tools/scripts/compilemessages.py
|
||||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run build
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
|
||||||
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
|
||||||
mkdir -p awx/public/static/css
|
mkdir -p awx/public/static/css
|
||||||
mkdir -p awx/public/static/js
|
mkdir -p awx/public/static/js
|
||||||
mkdir -p awx/public/static/media
|
mkdir -p awx/public/static/media
|
||||||
cp -r awx/ui_next/build/static/css/* awx/public/static/css
|
cp -r awx/ui/build/static/css/* awx/public/static/css
|
||||||
cp -r awx/ui_next/build/static/js/* awx/public/static/js
|
cp -r awx/ui/build/static/js/* awx/public/static/js
|
||||||
cp -r awx/ui_next/build/static/media/* awx/public/static/media
|
cp -r awx/ui/build/static/media/* awx/public/static/media
|
||||||
touch $@
|
touch $@
|
||||||
|
|
||||||
ui-release: awx/ui_next/node_modules $(UI_BUILD_FLAG_FILE)
|
ui-release: awx/ui/node_modules $(UI_BUILD_FLAG_FILE)
|
||||||
|
|
||||||
ui-devel: awx/ui_next/node_modules
|
ui-devel: awx/ui/node_modules
|
||||||
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
|
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
|
||||||
|
|
||||||
ui-devel-instrumented: awx/ui_next/node_modules
|
ui-devel-instrumented: awx/ui/node_modules
|
||||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run start-instrumented
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run start-instrumented
|
||||||
|
|
||||||
ui-devel-test: awx/ui_next/node_modules
|
ui-devel-test: awx/ui/node_modules
|
||||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run start
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run start
|
||||||
|
|
||||||
ui-zuul-lint-and-test:
|
ui-lint:
|
||||||
$(NPM_BIN) --prefix awx/ui_next install
|
$(NPM_BIN) --prefix awx/ui install
|
||||||
$(NPM_BIN) run --prefix awx/ui_next lint
|
$(NPM_BIN) run --prefix awx/ui lint
|
||||||
$(NPM_BIN) run --prefix awx/ui_next prettier-check
|
$(NPM_BIN) run --prefix awx/ui prettier-check
|
||||||
$(NPM_BIN) run --prefix awx/ui_next test -- --coverage --watchAll=false
|
|
||||||
|
ui-test:
|
||||||
|
$(NPM_BIN) --prefix awx/ui install
|
||||||
|
$(NPM_BIN) run --prefix awx/ui test -- --coverage --maxWorkers=4 --watchAll=false
|
||||||
|
|
||||||
|
|
||||||
# Build a pip-installable package into dist/ with a timestamped version number.
|
# Build a pip-installable package into dist/ with a timestamped version number.
|
||||||
@@ -430,30 +424,12 @@ release_build:
|
|||||||
dist/$(SDIST_TAR_FILE): ui-release VERSION
|
dist/$(SDIST_TAR_FILE): ui-release VERSION
|
||||||
$(PYTHON) setup.py $(SDIST_COMMAND)
|
$(PYTHON) setup.py $(SDIST_COMMAND)
|
||||||
|
|
||||||
dist/$(WHEEL_FILE): ui-release
|
|
||||||
$(PYTHON) setup.py $(WHEEL_COMMAND)
|
|
||||||
|
|
||||||
sdist: dist/$(SDIST_TAR_FILE)
|
sdist: dist/$(SDIST_TAR_FILE)
|
||||||
@echo "#############################################"
|
@echo "#############################################"
|
||||||
@echo "Artifacts:"
|
@echo "Artifacts:"
|
||||||
@echo dist/$(SDIST_TAR_FILE)
|
@echo dist/$(SDIST_TAR_FILE)
|
||||||
@echo "#############################################"
|
@echo "#############################################"
|
||||||
|
|
||||||
wheel: dist/$(WHEEL_FILE)
|
|
||||||
@echo "#############################################"
|
|
||||||
@echo "Artifacts:"
|
|
||||||
@echo dist/$(WHEEL_FILE)
|
|
||||||
@echo "#############################################"
|
|
||||||
|
|
||||||
# Build setup bundle tarball
|
|
||||||
setup-bundle-build:
|
|
||||||
mkdir -p $@
|
|
||||||
|
|
||||||
docker-auth:
|
|
||||||
@if [ "$(IMAGE_REPOSITORY_AUTH)" ]; then \
|
|
||||||
echo "$(IMAGE_REPOSITORY_AUTH)" | docker login -u oauth2accesstoken --password-stdin $(IMAGE_REPOSITORY_BASE); \
|
|
||||||
fi;
|
|
||||||
|
|
||||||
# This directory is bind-mounted inside of the development container and
|
# This directory is bind-mounted inside of the development container and
|
||||||
# needs to be pre-created for permissions to be set correctly. Otherwise,
|
# needs to be pre-created for permissions to be set correctly. Otherwise,
|
||||||
# Docker will create this directory as root.
|
# Docker will create this directory as root.
|
||||||
@@ -461,20 +437,30 @@ awx/projects:
|
|||||||
@mkdir -p $@
|
@mkdir -p $@
|
||||||
|
|
||||||
COMPOSE_UP_OPTS ?=
|
COMPOSE_UP_OPTS ?=
|
||||||
CLUSTER_NODE_COUNT ?= 1
|
COMPOSE_OPTS ?=
|
||||||
|
CONTROL_PLANE_NODE_COUNT ?= 1
|
||||||
|
EXECUTION_NODE_COUNT ?= 2
|
||||||
|
MINIKUBE_CONTAINER_GROUP ?= false
|
||||||
|
|
||||||
docker-compose-sources: .git/hooks/pre-commit
|
docker-compose-sources: .git/hooks/pre-commit
|
||||||
|
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||||
|
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose-minikube/deploy.yml; \
|
||||||
|
fi;
|
||||||
|
|
||||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||||
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
||||||
-e awx_image_tag=$(COMPOSE_TAG) \
|
-e awx_image_tag=$(COMPOSE_TAG) \
|
||||||
-e cluster_node_count=$(CLUSTER_NODE_COUNT)
|
-e control_plane_node_count=$(CONTROL_PLANE_NODE_COUNT) \
|
||||||
|
-e execution_node_count=$(EXECUTION_NODE_COUNT) \
|
||||||
|
-e minikube_container_group=$(MINIKUBE_CONTAINER_GROUP)
|
||||||
|
|
||||||
|
|
||||||
docker-compose: docker-auth awx/projects docker-compose-sources
|
docker-compose: docker-auth awx/projects docker-compose-sources
|
||||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_UP_OPTS) up
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||||
|
|
||||||
docker-compose-credential-plugins: docker-auth awx/projects docker-compose-sources
|
docker-compose-credential-plugins: docker-auth awx/projects docker-compose-sources
|
||||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||||
|
|
||||||
docker-compose-test: docker-auth awx/projects docker-compose-sources
|
docker-compose-test: docker-auth awx/projects docker-compose-sources
|
||||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash
|
||||||
@@ -493,6 +479,12 @@ detect-schema-change: genschema
|
|||||||
docker-compose-clean: awx/projects
|
docker-compose-clean: awx/projects
|
||||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||||
|
|
||||||
|
docker-compose-container-group-clean:
|
||||||
|
@if [ -f "tools/docker-compose-minikube/_sources/minikube" ]; then \
|
||||||
|
tools/docker-compose-minikube/_sources/minikube delete; \
|
||||||
|
fi
|
||||||
|
rm -rf tools/docker-compose-minikube/_sources/
|
||||||
|
|
||||||
# Base development image build
|
# Base development image build
|
||||||
docker-compose-build:
|
docker-compose-build:
|
||||||
ansible-playbook tools/ansible/dockerfile.yml -e build_dev=True
|
ansible-playbook tools/ansible/dockerfile.yml -e build_dev=True
|
||||||
@@ -501,10 +493,12 @@ docker-compose-build:
|
|||||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||||
|
|
||||||
docker-clean:
|
docker-clean:
|
||||||
$(foreach container_id,$(shell docker ps -f name=tools_awx -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||||
docker images | grep "awx_devel" | awk '{print $$1 ":" $$2}' | xargs docker rmi
|
if [ $(shell docker images | grep "awx_devel") ]; then \
|
||||||
|
docker images | grep "awx_devel" | awk '{print $$3}' | xargs docker rmi --force; \
|
||||||
|
fi
|
||||||
|
|
||||||
docker-clean-volumes: docker-compose-clean
|
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
||||||
docker volume rm tools_awx_db
|
docker volume rm tools_awx_db
|
||||||
|
|
||||||
docker-refresh: docker-clean docker-compose
|
docker-refresh: docker-clean docker-compose
|
||||||
@@ -519,6 +513,9 @@ docker-compose-cluster-elk: docker-auth awx/projects docker-compose-sources
|
|||||||
prometheus:
|
prometheus:
|
||||||
docker run -u0 --net=tools_default --link=`docker ps | egrep -o "tools_awx(_run)?_([^ ]+)?"`:awxweb --volume `pwd`/tools/prometheus:/prometheus --name prometheus -d -p 0.0.0.0:9090:9090 prom/prometheus --web.enable-lifecycle --config.file=/prometheus/prometheus.yml
|
docker run -u0 --net=tools_default --link=`docker ps | egrep -o "tools_awx(_run)?_([^ ]+)?"`:awxweb --volume `pwd`/tools/prometheus:/prometheus --name prometheus -d -p 0.0.0.0:9090:9090 prom/prometheus --web.enable-lifecycle --config.file=/prometheus/prometheus.yml
|
||||||
|
|
||||||
|
docker-compose-container-group:
|
||||||
|
MINIKUBE_CONTAINER_GROUP=true make docker-compose
|
||||||
|
|
||||||
clean-elk:
|
clean-elk:
|
||||||
docker stop tools_kibana_1
|
docker stop tools_kibana_1
|
||||||
docker stop tools_logstash_1
|
docker stop tools_logstash_1
|
||||||
@@ -551,10 +548,13 @@ awx-kube-dev-build: Dockerfile.kube-dev
|
|||||||
# Translation TASKS
|
# Translation TASKS
|
||||||
# --------------------------------------
|
# --------------------------------------
|
||||||
|
|
||||||
# generate UI .pot
|
# generate UI .pot file, an empty template of strings yet to be translated
|
||||||
pot: $(UI_BUILD_FLAG_FILE)
|
pot: $(UI_BUILD_FLAG_FILE)
|
||||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-strings
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-template --clean
|
||||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-template
|
|
||||||
|
# generate UI .po files for each locale (will update translated strings for `en`)
|
||||||
|
po: $(UI_BUILD_FLAG_FILE)
|
||||||
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean
|
||||||
|
|
||||||
# generate API django .pot .po
|
# generate API django .pot .po
|
||||||
LANG = "en-us"
|
LANG = "en-us"
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
[](https://ansible.softwarefactory-project.io/zuul/status) [](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) [](https://github.com/ansible/awx/blob/devel/LICENSE.md) [](https://groups.google.com/g/awx-project)
|
[](https://github.com/ansible/awx/actions/workflows/ci.yml) [](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) [](https://github.com/ansible/awx/blob/devel/LICENSE.md) [](https://groups.google.com/g/awx-project)
|
||||||
[](irc.libera.chat - #ansible-awx)
|
[](https://libera.chat)
|
||||||
|
|
||||||
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
|
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
|
||||||
|
|
||||||
@@ -20,7 +20,7 @@ Contributing
|
|||||||
- All code submissions are made through pull requests against the `devel` branch.
|
- All code submissions are made through pull requests against the `devel` branch.
|
||||||
- All contributors must use git commit --signoff for any commit to be merged and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md)
|
- All contributors must use git commit --signoff for any commit to be merged and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md)
|
||||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs. `git merge` for this reason.
|
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs. `git merge` for this reason.
|
||||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on webchat.freenode.net and talk about what you would like to do or add first. This not only helps everyone know what's going on, but it also helps save time and effort if the community decides some changes are needed.
|
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on web.libera.chat and talk about what you would like to do or add first. This not only helps everyone know what's going on, but it also helps save time and effort if the community decides some changes are needed.
|
||||||
|
|
||||||
Reporting Issues
|
Reporting Issues
|
||||||
----------------
|
----------------
|
||||||
@@ -37,5 +37,5 @@ Get Involved
|
|||||||
|
|
||||||
We welcome your feedback and ideas. Here's how to reach us with feedback and questions:
|
We welcome your feedback and ideas. Here's how to reach us with feedback and questions:
|
||||||
|
|
||||||
- Join the `#ansible-awx` channel on webchat.freenode.net
|
- Join the `#ansible-awx` channel on irc.libera.chat
|
||||||
- Join the [mailing list](https://groups.google.com/forum/#!forum/awx-project)
|
- Join the [mailing list](https://groups.google.com/forum/#!forum/awx-project)
|
||||||
|
|||||||
@@ -34,6 +34,7 @@ else:
|
|||||||
from django.db.backends.base import schema
|
from django.db.backends.base import schema
|
||||||
from django.db.models import indexes
|
from django.db.models import indexes
|
||||||
from django.db.backends.utils import names_digest
|
from django.db.backends.utils import names_digest
|
||||||
|
from django.db import connection
|
||||||
|
|
||||||
|
|
||||||
if HAS_DJANGO is True:
|
if HAS_DJANGO is True:
|
||||||
@@ -149,6 +150,12 @@ def manage():
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.management import execute_from_command_line
|
from django.core.management import execute_from_command_line
|
||||||
|
|
||||||
|
# enforce the postgres version is equal to 12. if not, then terminate program with exit code of 1
|
||||||
|
if not MODE == 'development':
|
||||||
|
if (connection.pg_version // 10000) < 12:
|
||||||
|
sys.stderr.write("Postgres version 12 is required\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
||||||
sys.stdout.write('%s\n' % __version__)
|
sys.stdout.write('%s\n' % __version__)
|
||||||
# If running as a user without permission to read settings, display an
|
# If running as a user without permission to read settings, display an
|
||||||
|
|||||||
@@ -133,7 +133,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
|||||||
Filter using field lookups provided via query string parameters.
|
Filter using field lookups provided via query string parameters.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by', 'search', 'type', 'host_filter', 'count_disabled', 'no_truncate')
|
RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by', 'search', 'type', 'host_filter', 'count_disabled', 'no_truncate', 'limit')
|
||||||
|
|
||||||
SUPPORTED_LOOKUPS = (
|
SUPPORTED_LOOKUPS = (
|
||||||
'exact',
|
'exact',
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credenti
|
|||||||
from awx.main.access import access_registry
|
from awx.main.access import access_registry
|
||||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||||
from awx.main.utils.db import get_all_field_names
|
from awx.main.utils.db import get_all_field_names
|
||||||
|
from awx.main.utils.licensing import server_product_name
|
||||||
from awx.main.views import ApiErrorView
|
from awx.main.views import ApiErrorView
|
||||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
|
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
|
||||||
from awx.api.versioning import URLPathVersioning
|
from awx.api.versioning import URLPathVersioning
|
||||||
@@ -184,9 +185,6 @@ class APIView(views.APIView):
|
|||||||
"""
|
"""
|
||||||
Log warning for 400 requests. Add header with elapsed time.
|
Log warning for 400 requests. Add header with elapsed time.
|
||||||
"""
|
"""
|
||||||
from awx.main.utils import get_licenser
|
|
||||||
from awx.main.utils.licensing import OpenLicense
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# If the URL was rewritten, and we get a 404, we should entirely
|
# If the URL was rewritten, and we get a 404, we should entirely
|
||||||
# replace the view in the request context with an ApiErrorView()
|
# replace the view in the request context with an ApiErrorView()
|
||||||
@@ -219,14 +217,14 @@ class APIView(views.APIView):
|
|||||||
if hasattr(self, '__init_request_error__'):
|
if hasattr(self, '__init_request_error__'):
|
||||||
response = self.handle_exception(self.__init_request_error__)
|
response = self.handle_exception(self.__init_request_error__)
|
||||||
if response.status_code == 401:
|
if response.status_code == 401:
|
||||||
response.data['detail'] += ' To establish a login session, visit /api/login/.'
|
response.data['detail'] += _(' To establish a login session, visit') + ' /api/login/.'
|
||||||
logger.info(status_msg)
|
logger.info(status_msg)
|
||||||
else:
|
else:
|
||||||
logger.warning(status_msg)
|
logger.warning(status_msg)
|
||||||
response = super(APIView, self).finalize_response(request, response, *args, **kwargs)
|
response = super(APIView, self).finalize_response(request, response, *args, **kwargs)
|
||||||
time_started = getattr(self, 'time_started', None)
|
time_started = getattr(self, 'time_started', None)
|
||||||
response['X-API-Product-Version'] = get_awx_version()
|
response['X-API-Product-Version'] = get_awx_version()
|
||||||
response['X-API-Product-Name'] = 'AWX' if isinstance(get_licenser(), OpenLicense) else 'Red Hat Ansible Tower'
|
response['X-API-Product-Name'] = server_product_name()
|
||||||
|
|
||||||
response['X-API-Node'] = settings.CLUSTER_HOST_ID
|
response['X-API-Node'] = settings.CLUSTER_HOST_ID
|
||||||
if time_started:
|
if time_started:
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ from rest_framework.request import clone_request
|
|||||||
from awx.api.fields import ChoiceNullField
|
from awx.api.fields import ChoiceNullField
|
||||||
from awx.main.fields import JSONField, ImplicitRoleField
|
from awx.main.fields import JSONField, ImplicitRoleField
|
||||||
from awx.main.models import NotificationTemplate
|
from awx.main.models import NotificationTemplate
|
||||||
from awx.main.tasks import AWXReceptorJob
|
from awx.main.utils.execution_environments import get_default_pod_spec
|
||||||
|
|
||||||
# Polymorphic
|
# Polymorphic
|
||||||
from polymorphic.models import PolymorphicModel
|
from polymorphic.models import PolymorphicModel
|
||||||
@@ -211,7 +211,7 @@ class Metadata(metadata.SimpleMetadata):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if field == "pod_spec_override":
|
if field == "pod_spec_override":
|
||||||
meta['default'] = AWXReceptorJob().pod_definition
|
meta['default'] = get_default_pod_spec()
|
||||||
|
|
||||||
# Add type choices if available from the serializer.
|
# Add type choices if available from the serializer.
|
||||||
if field == 'type' and hasattr(serializer, 'get_type_choices'):
|
if field == 'type' and hasattr(serializer, 'get_type_choices'):
|
||||||
|
|||||||
@@ -1,12 +1,16 @@
|
|||||||
# Copyright (c) 2015 Ansible, Inc.
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
# Django REST Framework
|
# Django REST Framework
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.paginator import Paginator as DjangoPaginator
|
from django.core.paginator import Paginator as DjangoPaginator
|
||||||
from rest_framework import pagination
|
from rest_framework import pagination
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.utils.urls import replace_query_param
|
from rest_framework.utils.urls import replace_query_param
|
||||||
|
from rest_framework.settings import api_settings
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
|
||||||
class DisabledPaginator(DjangoPaginator):
|
class DisabledPaginator(DjangoPaginator):
|
||||||
@@ -65,3 +69,65 @@ class Pagination(pagination.PageNumberPagination):
|
|||||||
if self.count_disabled:
|
if self.count_disabled:
|
||||||
return Response({'results': data})
|
return Response({'results': data})
|
||||||
return super(Pagination, self).get_paginated_response(data)
|
return super(Pagination, self).get_paginated_response(data)
|
||||||
|
|
||||||
|
|
||||||
|
class LimitPagination(pagination.BasePagination):
|
||||||
|
default_limit = api_settings.PAGE_SIZE
|
||||||
|
limit_query_param = 'limit'
|
||||||
|
limit_query_description = _('Number of results to return per page.')
|
||||||
|
max_page_size = settings.MAX_PAGE_SIZE
|
||||||
|
|
||||||
|
def paginate_queryset(self, queryset, request, view=None):
|
||||||
|
self.limit = self.get_limit(request)
|
||||||
|
self.request = request
|
||||||
|
|
||||||
|
return list(queryset[0 : self.limit])
|
||||||
|
|
||||||
|
def get_paginated_response(self, data):
|
||||||
|
return Response(OrderedDict([('results', data)]))
|
||||||
|
|
||||||
|
def get_paginated_response_schema(self, schema):
|
||||||
|
return {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'results': schema,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_limit(self, request):
|
||||||
|
try:
|
||||||
|
return pagination._positive_int(request.query_params[self.limit_query_param], strict=True)
|
||||||
|
except (KeyError, ValueError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
return self.default_limit
|
||||||
|
|
||||||
|
|
||||||
|
class UnifiedJobEventPagination(Pagination):
|
||||||
|
"""
|
||||||
|
By default, use Pagination for all operations.
|
||||||
|
If `limit` query parameter specified use LimitPagination
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.use_limit_paginator = False
|
||||||
|
self.limit_pagination = LimitPagination()
|
||||||
|
return super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def paginate_queryset(self, queryset, request, view=None):
|
||||||
|
if 'limit' in request.query_params:
|
||||||
|
self.use_limit_paginator = True
|
||||||
|
|
||||||
|
if self.use_limit_paginator:
|
||||||
|
return self.limit_pagination.paginate_queryset(queryset, request, view=view)
|
||||||
|
return super().paginate_queryset(queryset, request, view=view)
|
||||||
|
|
||||||
|
def get_paginated_response(self, data):
|
||||||
|
if self.use_limit_paginator:
|
||||||
|
return self.limit_pagination.get_paginated_response(data)
|
||||||
|
return super().get_paginated_response(data)
|
||||||
|
|
||||||
|
def get_paginated_response_schema(self, schema):
|
||||||
|
if self.use_limit_paginator:
|
||||||
|
return self.limit_pagination.get_paginated_response_schema(schema)
|
||||||
|
return super().get_paginated_response_schema(schema)
|
||||||
|
|||||||
@@ -4,6 +4,8 @@
|
|||||||
# Python
|
# Python
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
# Django REST Framework
|
# Django REST Framework
|
||||||
from rest_framework.exceptions import MethodNotAllowed, PermissionDenied
|
from rest_framework.exceptions import MethodNotAllowed, PermissionDenied
|
||||||
from rest_framework import permissions
|
from rest_framework import permissions
|
||||||
@@ -23,7 +25,7 @@ __all__ = [
|
|||||||
'ProjectUpdatePermission',
|
'ProjectUpdatePermission',
|
||||||
'InventoryInventorySourcesUpdatePermission',
|
'InventoryInventorySourcesUpdatePermission',
|
||||||
'UserPermission',
|
'UserPermission',
|
||||||
'IsSuperUser',
|
'IsSystemAdminOrAuditor',
|
||||||
'InstanceGroupTowerPermission',
|
'InstanceGroupTowerPermission',
|
||||||
'WorkflowApprovalPermission',
|
'WorkflowApprovalPermission',
|
||||||
]
|
]
|
||||||
@@ -234,18 +236,23 @@ class UserPermission(ModelAccessPermission):
|
|||||||
raise PermissionDenied()
|
raise PermissionDenied()
|
||||||
|
|
||||||
|
|
||||||
class IsSuperUser(permissions.BasePermission):
|
class IsSystemAdminOrAuditor(permissions.BasePermission):
|
||||||
"""
|
"""
|
||||||
Allows access only to admin users.
|
Allows write access only to system admin users.
|
||||||
|
Allows read access only to system auditor users.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
return request.user and request.user.is_superuser
|
if not request.user:
|
||||||
|
return False
|
||||||
|
if request.method == 'GET':
|
||||||
|
return request.user.is_superuser or request.user.is_system_auditor
|
||||||
|
return request.user.is_superuser
|
||||||
|
|
||||||
|
|
||||||
class InstanceGroupTowerPermission(ModelAccessPermission):
|
class InstanceGroupTowerPermission(ModelAccessPermission):
|
||||||
def has_object_permission(self, request, view, obj):
|
def has_object_permission(self, request, view, obj):
|
||||||
if request.method == 'DELETE' and obj.name == "tower":
|
if request.method == 'DELETE' and obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
|
||||||
return False
|
return False
|
||||||
return super(InstanceGroupTowerPermission, self).has_object_permission(request, view, obj)
|
return super(InstanceGroupTowerPermission, self).has_object_permission(request, view, obj)
|
||||||
|
|
||||||
|
|||||||
@@ -144,7 +144,6 @@ SUMMARIZABLE_FK_FIELDS = {
|
|||||||
'inventory_sources_with_failures',
|
'inventory_sources_with_failures',
|
||||||
'organization_id',
|
'organization_id',
|
||||||
'kind',
|
'kind',
|
||||||
'insights_credential_id',
|
|
||||||
),
|
),
|
||||||
'host': DEFAULT_SUMMARY_FIELDS,
|
'host': DEFAULT_SUMMARY_FIELDS,
|
||||||
'group': DEFAULT_SUMMARY_FIELDS,
|
'group': DEFAULT_SUMMARY_FIELDS,
|
||||||
@@ -171,7 +170,6 @@ SUMMARIZABLE_FK_FIELDS = {
|
|||||||
'role': ('id', 'role_field'),
|
'role': ('id', 'role_field'),
|
||||||
'notification_template': DEFAULT_SUMMARY_FIELDS,
|
'notification_template': DEFAULT_SUMMARY_FIELDS,
|
||||||
'instance_group': ('id', 'name', 'is_container_group'),
|
'instance_group': ('id', 'name', 'is_container_group'),
|
||||||
'insights_credential': DEFAULT_SUMMARY_FIELDS,
|
|
||||||
'source_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
'source_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||||
'target_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
'target_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||||
'webhook_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
'webhook_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||||
@@ -724,6 +722,20 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
|
|||||||
else:
|
else:
|
||||||
return super(UnifiedJobTemplateSerializer, self).to_representation(obj)
|
return super(UnifiedJobTemplateSerializer, self).to_representation(obj)
|
||||||
|
|
||||||
|
def get_summary_fields(self, obj):
|
||||||
|
summary_fields = super().get_summary_fields(obj)
|
||||||
|
|
||||||
|
if self.is_detail_view:
|
||||||
|
resolved_ee = obj.resolve_execution_environment()
|
||||||
|
if resolved_ee is not None:
|
||||||
|
summary_fields['resolved_environment'] = {
|
||||||
|
field: getattr(resolved_ee, field, None)
|
||||||
|
for field in SUMMARIZABLE_FK_FIELDS['execution_environment']
|
||||||
|
if getattr(resolved_ee, field, None) is not None
|
||||||
|
}
|
||||||
|
|
||||||
|
return summary_fields
|
||||||
|
|
||||||
|
|
||||||
class UnifiedJobSerializer(BaseSerializer):
|
class UnifiedJobSerializer(BaseSerializer):
|
||||||
show_capabilities = ['start', 'delete']
|
show_capabilities = ['start', 'delete']
|
||||||
@@ -754,6 +766,7 @@ class UnifiedJobSerializer(BaseSerializer):
|
|||||||
'result_traceback',
|
'result_traceback',
|
||||||
'event_processing_finished',
|
'event_processing_finished',
|
||||||
'launched_by',
|
'launched_by',
|
||||||
|
'work_unit_id',
|
||||||
)
|
)
|
||||||
|
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
@@ -935,7 +948,6 @@ class UserSerializer(BaseSerializer):
|
|||||||
'*',
|
'*',
|
||||||
'-name',
|
'-name',
|
||||||
'-description',
|
'-description',
|
||||||
'-modified',
|
|
||||||
'username',
|
'username',
|
||||||
'first_name',
|
'first_name',
|
||||||
'last_name',
|
'last_name',
|
||||||
@@ -1396,11 +1408,11 @@ class ProjectOptionsSerializer(BaseSerializer):
|
|||||||
|
|
||||||
class ExecutionEnvironmentSerializer(BaseSerializer):
|
class ExecutionEnvironmentSerializer(BaseSerializer):
|
||||||
show_capabilities = ['edit', 'delete', 'copy']
|
show_capabilities = ['edit', 'delete', 'copy']
|
||||||
managed_by_tower = serializers.ReadOnlyField()
|
managed = serializers.ReadOnlyField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = ExecutionEnvironment
|
model = ExecutionEnvironment
|
||||||
fields = ('*', 'organization', 'image', 'managed_by_tower', 'credential', 'pull')
|
fields = ('*', 'organization', 'image', 'managed', 'credential', 'pull')
|
||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
res = super(ExecutionEnvironmentSerializer, self).get_related(obj)
|
res = super(ExecutionEnvironmentSerializer, self).get_related(obj)
|
||||||
@@ -1646,7 +1658,6 @@ class InventorySerializer(BaseSerializerWithVariables):
|
|||||||
'has_inventory_sources',
|
'has_inventory_sources',
|
||||||
'total_inventory_sources',
|
'total_inventory_sources',
|
||||||
'inventory_sources_with_failures',
|
'inventory_sources_with_failures',
|
||||||
'insights_credential',
|
|
||||||
'pending_deletion',
|
'pending_deletion',
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1671,8 +1682,6 @@ class InventorySerializer(BaseSerializerWithVariables):
|
|||||||
copy=self.reverse('api:inventory_copy', kwargs={'pk': obj.pk}),
|
copy=self.reverse('api:inventory_copy', kwargs={'pk': obj.pk}),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if obj.insights_credential:
|
|
||||||
res['insights_credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.insights_credential.pk})
|
|
||||||
if obj.organization:
|
if obj.organization:
|
||||||
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
|
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
|
||||||
return res
|
return res
|
||||||
@@ -1740,10 +1749,9 @@ class HostSerializer(BaseSerializerWithVariables):
|
|||||||
'has_inventory_sources',
|
'has_inventory_sources',
|
||||||
'last_job',
|
'last_job',
|
||||||
'last_job_host_summary',
|
'last_job_host_summary',
|
||||||
'insights_system_id',
|
|
||||||
'ansible_facts_modified',
|
'ansible_facts_modified',
|
||||||
)
|
)
|
||||||
read_only_fields = ('last_job', 'last_job_host_summary', 'insights_system_id', 'ansible_facts_modified')
|
read_only_fields = ('last_job', 'last_job_host_summary', 'ansible_facts_modified')
|
||||||
|
|
||||||
def build_relational_field(self, field_name, relation_info):
|
def build_relational_field(self, field_name, relation_info):
|
||||||
field_class, field_kwargs = super(HostSerializer, self).build_relational_field(field_name, relation_info)
|
field_class, field_kwargs = super(HostSerializer, self).build_relational_field(field_name, relation_info)
|
||||||
@@ -1767,7 +1775,6 @@ class HostSerializer(BaseSerializerWithVariables):
|
|||||||
smart_inventories=self.reverse('api:host_smart_inventories_list', kwargs={'pk': obj.pk}),
|
smart_inventories=self.reverse('api:host_smart_inventories_list', kwargs={'pk': obj.pk}),
|
||||||
ad_hoc_commands=self.reverse('api:host_ad_hoc_commands_list', kwargs={'pk': obj.pk}),
|
ad_hoc_commands=self.reverse('api:host_ad_hoc_commands_list', kwargs={'pk': obj.pk}),
|
||||||
ad_hoc_command_events=self.reverse('api:host_ad_hoc_command_events_list', kwargs={'pk': obj.pk}),
|
ad_hoc_command_events=self.reverse('api:host_ad_hoc_command_events_list', kwargs={'pk': obj.pk}),
|
||||||
insights=self.reverse('api:host_insights', kwargs={'pk': obj.pk}),
|
|
||||||
ansible_facts=self.reverse('api:host_ansible_facts_detail', kwargs={'pk': obj.pk}),
|
ansible_facts=self.reverse('api:host_ansible_facts_detail', kwargs={'pk': obj.pk}),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -2473,14 +2480,14 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
|||||||
|
|
||||||
class CredentialTypeSerializer(BaseSerializer):
|
class CredentialTypeSerializer(BaseSerializer):
|
||||||
show_capabilities = ['edit', 'delete']
|
show_capabilities = ['edit', 'delete']
|
||||||
managed_by_tower = serializers.ReadOnlyField()
|
managed = serializers.ReadOnlyField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = CredentialType
|
model = CredentialType
|
||||||
fields = ('*', 'kind', 'namespace', 'name', 'managed_by_tower', 'inputs', 'injectors')
|
fields = ('*', 'kind', 'namespace', 'name', 'managed', 'inputs', 'injectors')
|
||||||
|
|
||||||
def validate(self, attrs):
|
def validate(self, attrs):
|
||||||
if self.instance and self.instance.managed_by_tower:
|
if self.instance and self.instance.managed:
|
||||||
raise PermissionDenied(detail=_("Modifications not allowed for managed credential types"))
|
raise PermissionDenied(detail=_("Modifications not allowed for managed credential types"))
|
||||||
|
|
||||||
old_inputs = {}
|
old_inputs = {}
|
||||||
@@ -2512,8 +2519,8 @@ class CredentialTypeSerializer(BaseSerializer):
|
|||||||
def to_representation(self, data):
|
def to_representation(self, data):
|
||||||
value = super(CredentialTypeSerializer, self).to_representation(data)
|
value = super(CredentialTypeSerializer, self).to_representation(data)
|
||||||
|
|
||||||
# translate labels and help_text for credential fields "managed by Tower"
|
# translate labels and help_text for credential fields "managed"
|
||||||
if value.get('managed_by_tower'):
|
if value.get('managed'):
|
||||||
value['name'] = _(value['name'])
|
value['name'] = _(value['name'])
|
||||||
for field in value.get('inputs', {}).get('fields', []):
|
for field in value.get('inputs', {}).get('fields', []):
|
||||||
field['label'] = _(field['label'])
|
field['label'] = _(field['label'])
|
||||||
@@ -2532,11 +2539,11 @@ class CredentialTypeSerializer(BaseSerializer):
|
|||||||
class CredentialSerializer(BaseSerializer):
|
class CredentialSerializer(BaseSerializer):
|
||||||
show_capabilities = ['edit', 'delete', 'copy', 'use']
|
show_capabilities = ['edit', 'delete', 'copy', 'use']
|
||||||
capabilities_prefetch = ['admin', 'use']
|
capabilities_prefetch = ['admin', 'use']
|
||||||
managed_by_tower = serializers.ReadOnlyField()
|
managed = serializers.ReadOnlyField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Credential
|
model = Credential
|
||||||
fields = ('*', 'organization', 'credential_type', 'managed_by_tower', 'inputs', 'kind', 'cloud', 'kubernetes')
|
fields = ('*', 'organization', 'credential_type', 'managed', 'inputs', 'kind', 'cloud', 'kubernetes')
|
||||||
extra_kwargs = {'credential_type': {'label': _('Credential Type')}}
|
extra_kwargs = {'credential_type': {'label': _('Credential Type')}}
|
||||||
|
|
||||||
def to_representation(self, data):
|
def to_representation(self, data):
|
||||||
@@ -2603,7 +2610,7 @@ class CredentialSerializer(BaseSerializer):
|
|||||||
return summary_dict
|
return summary_dict
|
||||||
|
|
||||||
def validate(self, attrs):
|
def validate(self, attrs):
|
||||||
if self.instance and self.instance.managed_by_tower:
|
if self.instance and self.instance.managed:
|
||||||
raise PermissionDenied(detail=_("Modifications not allowed for managed credentials"))
|
raise PermissionDenied(detail=_("Modifications not allowed for managed credentials"))
|
||||||
return super(CredentialSerializer, self).validate(attrs)
|
return super(CredentialSerializer, self).validate(attrs)
|
||||||
|
|
||||||
@@ -2615,7 +2622,7 @@ class CredentialSerializer(BaseSerializer):
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
def validate_organization(self, org):
|
def validate_organization(self, org):
|
||||||
if self.instance and self.instance.credential_type.kind == 'galaxy' and org is None:
|
if self.instance and (not self.instance.managed) and self.instance.credential_type.kind == 'galaxy' and org is None:
|
||||||
raise serializers.ValidationError(_("Galaxy credentials must be owned by an Organization."))
|
raise serializers.ValidationError(_("Galaxy credentials must be owned by an Organization."))
|
||||||
return org
|
return org
|
||||||
|
|
||||||
@@ -2623,7 +2630,6 @@ class CredentialSerializer(BaseSerializer):
|
|||||||
if self.instance and credential_type.pk != self.instance.credential_type.pk:
|
if self.instance and credential_type.pk != self.instance.credential_type.pk:
|
||||||
for related_objects in (
|
for related_objects in (
|
||||||
'ad_hoc_commands',
|
'ad_hoc_commands',
|
||||||
'insights_inventories',
|
|
||||||
'unifiedjobs',
|
'unifiedjobs',
|
||||||
'unifiedjobtemplates',
|
'unifiedjobtemplates',
|
||||||
'projects',
|
'projects',
|
||||||
@@ -3031,7 +3037,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
|||||||
res = super(JobSerializer, self).get_related(obj)
|
res = super(JobSerializer, self).get_related(obj)
|
||||||
res.update(
|
res.update(
|
||||||
dict(
|
dict(
|
||||||
job_events=self.reverse('api:job_job_events_list', kwargs={'pk': obj.pk}),
|
job_events=self.reverse('api:job_job_events_list', kwargs={'pk': obj.pk}), # TODO: consider adding job_created
|
||||||
job_host_summaries=self.reverse('api:job_job_host_summaries_list', kwargs={'pk': obj.pk}),
|
job_host_summaries=self.reverse('api:job_job_host_summaries_list', kwargs={'pk': obj.pk}),
|
||||||
activity_stream=self.reverse('api:job_activity_stream_list', kwargs={'pk': obj.pk}),
|
activity_stream=self.reverse('api:job_activity_stream_list', kwargs={'pk': obj.pk}),
|
||||||
notifications=self.reverse('api:job_notifications_list', kwargs={'pk': obj.pk}),
|
notifications=self.reverse('api:job_notifications_list', kwargs={'pk': obj.pk}),
|
||||||
@@ -3098,8 +3104,8 @@ class JobDetailSerializer(JobSerializer):
|
|||||||
fields = ('*', 'host_status_counts', 'playbook_counts', 'custom_virtualenv')
|
fields = ('*', 'host_status_counts', 'playbook_counts', 'custom_virtualenv')
|
||||||
|
|
||||||
def get_playbook_counts(self, obj):
|
def get_playbook_counts(self, obj):
|
||||||
task_count = obj.job_events.filter(event='playbook_on_task_start').count()
|
task_count = obj.get_event_queryset().filter(event='playbook_on_task_start').count()
|
||||||
play_count = obj.job_events.filter(event='playbook_on_play_start').count()
|
play_count = obj.get_event_queryset().filter(event='playbook_on_play_start').count()
|
||||||
|
|
||||||
data = {'play_count': play_count, 'task_count': task_count}
|
data = {'play_count': play_count, 'task_count': task_count}
|
||||||
|
|
||||||
@@ -3107,7 +3113,7 @@ class JobDetailSerializer(JobSerializer):
|
|||||||
|
|
||||||
def get_host_status_counts(self, obj):
|
def get_host_status_counts(self, obj):
|
||||||
try:
|
try:
|
||||||
counts = obj.job_events.only('event_data').get(event='playbook_on_stats').get_host_status_counts()
|
counts = obj.get_event_queryset().only('event_data').get(event='playbook_on_stats').get_host_status_counts()
|
||||||
except JobEvent.DoesNotExist:
|
except JobEvent.DoesNotExist:
|
||||||
counts = {}
|
counts = {}
|
||||||
|
|
||||||
@@ -3414,6 +3420,7 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
|
|||||||
'ask_limit_on_launch',
|
'ask_limit_on_launch',
|
||||||
'webhook_service',
|
'webhook_service',
|
||||||
'webhook_credential',
|
'webhook_credential',
|
||||||
|
'-execution_environment',
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
@@ -3440,6 +3447,7 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
|
|||||||
survey_spec=self.reverse('api:workflow_job_template_survey_spec', kwargs={'pk': obj.pk}),
|
survey_spec=self.reverse('api:workflow_job_template_survey_spec', kwargs={'pk': obj.pk}),
|
||||||
copy=self.reverse('api:workflow_job_template_copy', kwargs={'pk': obj.pk}),
|
copy=self.reverse('api:workflow_job_template_copy', kwargs={'pk': obj.pk}),
|
||||||
)
|
)
|
||||||
|
res.pop('execution_environment', None) # EEs aren't meaningful for workflows
|
||||||
if obj.organization:
|
if obj.organization:
|
||||||
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
|
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
|
||||||
if obj.webhook_credential_id:
|
if obj.webhook_credential_id:
|
||||||
@@ -3491,6 +3499,7 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
|
|||||||
'allow_simultaneous',
|
'allow_simultaneous',
|
||||||
'job_template',
|
'job_template',
|
||||||
'is_sliced_job',
|
'is_sliced_job',
|
||||||
|
'-execution_environment',
|
||||||
'-execution_node',
|
'-execution_node',
|
||||||
'-event_processing_finished',
|
'-event_processing_finished',
|
||||||
'-controller_node',
|
'-controller_node',
|
||||||
@@ -3504,6 +3513,7 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
|
|||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
res = super(WorkflowJobSerializer, self).get_related(obj)
|
res = super(WorkflowJobSerializer, self).get_related(obj)
|
||||||
|
res.pop('execution_environment', None) # EEs aren't meaningful for workflows
|
||||||
if obj.workflow_job_template:
|
if obj.workflow_job_template:
|
||||||
res['workflow_job_template'] = self.reverse('api:workflow_job_template_detail', kwargs={'pk': obj.workflow_job_template.pk})
|
res['workflow_job_template'] = self.reverse('api:workflow_job_template_detail', kwargs={'pk': obj.workflow_job_template.pk})
|
||||||
res['notifications'] = self.reverse('api:workflow_job_notifications_list', kwargs={'pk': obj.pk})
|
res['notifications'] = self.reverse('api:workflow_job_notifications_list', kwargs={'pk': obj.pk})
|
||||||
@@ -3528,7 +3538,7 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
|
|||||||
|
|
||||||
class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer):
|
class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
fields = ('*', '-execution_node', '-controller_node')
|
fields = ('*', '-execution_environment', '-execution_node', '-controller_node')
|
||||||
|
|
||||||
|
|
||||||
class WorkflowJobCancelSerializer(WorkflowJobSerializer):
|
class WorkflowJobCancelSerializer(WorkflowJobSerializer):
|
||||||
@@ -4178,7 +4188,7 @@ class JobLaunchSerializer(BaseSerializer):
|
|||||||
elif field_name == 'credentials':
|
elif field_name == 'credentials':
|
||||||
for cred in obj.credentials.all():
|
for cred in obj.credentials.all():
|
||||||
cred_dict = dict(id=cred.id, name=cred.name, credential_type=cred.credential_type.pk, passwords_needed=cred.passwords_needed)
|
cred_dict = dict(id=cred.id, name=cred.name, credential_type=cred.credential_type.pk, passwords_needed=cred.passwords_needed)
|
||||||
if cred.credential_type.managed_by_tower and 'vault_id' in cred.credential_type.defined_fields:
|
if cred.credential_type.managed and 'vault_id' in cred.credential_type.defined_fields:
|
||||||
cred_dict['vault_id'] = cred.get_input('vault_id', default=None)
|
cred_dict['vault_id'] = cred.get_input('vault_id', default=None)
|
||||||
defaults_dict.setdefault(field_name, []).append(cred_dict)
|
defaults_dict.setdefault(field_name, []).append(cred_dict)
|
||||||
else:
|
else:
|
||||||
@@ -4766,7 +4776,7 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Instance
|
model = Instance
|
||||||
read_only_fields = ('uuid', 'hostname', 'version')
|
read_only_fields = ('uuid', 'hostname', 'version', 'node_type')
|
||||||
fields = (
|
fields = (
|
||||||
"id",
|
"id",
|
||||||
"type",
|
"type",
|
||||||
@@ -4776,6 +4786,9 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
"hostname",
|
"hostname",
|
||||||
"created",
|
"created",
|
||||||
"modified",
|
"modified",
|
||||||
|
"last_seen",
|
||||||
|
"last_health_check",
|
||||||
|
"errors",
|
||||||
'capacity_adjustment',
|
'capacity_adjustment',
|
||||||
"version",
|
"version",
|
||||||
"capacity",
|
"capacity",
|
||||||
@@ -4789,12 +4802,15 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
"mem_capacity",
|
"mem_capacity",
|
||||||
"enabled",
|
"enabled",
|
||||||
"managed_by_policy",
|
"managed_by_policy",
|
||||||
|
"node_type",
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
res = super(InstanceSerializer, self).get_related(obj)
|
res = super(InstanceSerializer, self).get_related(obj)
|
||||||
res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk})
|
res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk})
|
||||||
res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk})
|
res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk})
|
||||||
|
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
||||||
|
res['health_check'] = self.reverse('api:instance_health_check', kwargs={'pk': obj.pk})
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def get_consumed_capacity(self, obj):
|
def get_consumed_capacity(self, obj):
|
||||||
@@ -4807,6 +4823,13 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
return float("{0:.2f}".format(((float(obj.capacity) - float(obj.consumed_capacity)) / (float(obj.capacity))) * 100))
|
return float("{0:.2f}".format(((float(obj.capacity) - float(obj.consumed_capacity)) / (float(obj.capacity))) * 100))
|
||||||
|
|
||||||
|
|
||||||
|
class InstanceHealthCheckSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Instance
|
||||||
|
read_only_fields = ('uuid', 'hostname', 'version', 'last_health_check', 'errors', 'cpu', 'memory', 'cpu_capacity', 'mem_capacity', 'capacity')
|
||||||
|
fields = read_only_fields
|
||||||
|
|
||||||
|
|
||||||
class InstanceGroupSerializer(BaseSerializer):
|
class InstanceGroupSerializer(BaseSerializer):
|
||||||
|
|
||||||
show_capabilities = ['edit', 'delete']
|
show_capabilities = ['edit', 'delete']
|
||||||
@@ -4905,8 +4928,12 @@ class InstanceGroupSerializer(BaseSerializer):
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
def validate_name(self, value):
|
def validate_name(self, value):
|
||||||
if self.instance and self.instance.name == 'tower' and value != 'tower':
|
if self.instance and self.instance.name == settings.DEFAULT_EXECUTION_QUEUE_NAME and value != settings.DEFAULT_EXECUTION_QUEUE_NAME:
|
||||||
raise serializers.ValidationError(_('tower instance group name may not be changed.'))
|
raise serializers.ValidationError(_('%s instance group name may not be changed.' % settings.DEFAULT_EXECUTION_QUEUE_NAME))
|
||||||
|
|
||||||
|
if self.instance and self.instance.name == settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME and value != settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME:
|
||||||
|
raise serializers.ValidationError(_('%s instance group name may not be changed.' % settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME))
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def validate_credential(self, value):
|
def validate_credential(self, value):
|
||||||
@@ -4973,7 +5000,7 @@ class ActivityStreamSerializer(BaseSerializer):
|
|||||||
('notification', ('id', 'status', 'notification_type', 'notification_template_id')),
|
('notification', ('id', 'status', 'notification_type', 'notification_template_id')),
|
||||||
('o_auth2_access_token', ('id', 'user_id', 'description', 'application_id', 'scope')),
|
('o_auth2_access_token', ('id', 'user_id', 'description', 'application_id', 'scope')),
|
||||||
('o_auth2_application', ('id', 'name', 'description')),
|
('o_auth2_application', ('id', 'name', 'description')),
|
||||||
('credential_type', ('id', 'name', 'description', 'kind', 'managed_by_tower')),
|
('credential_type', ('id', 'name', 'description', 'kind', 'managed')),
|
||||||
('ad_hoc_command', ('id', 'name', 'status', 'limit')),
|
('ad_hoc_command', ('id', 'name', 'status', 'limit')),
|
||||||
('workflow_approval', ('id', 'name', 'unified_job_id')),
|
('workflow_approval', ('id', 'name', 'unified_job_id')),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
{% include "api/job_job_events_list.md" %}
|
||||||
33
awx/api/templates/api/instance_health_check.md
Normal file
33
awx/api/templates/api/instance_health_check.md
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
{% ifmeth GET %}
|
||||||
|
# Health Check Data
|
||||||
|
|
||||||
|
Health checks are used to obtain important data about an instance.
|
||||||
|
Instance fields affected by the health check are shown in this view.
|
||||||
|
Fundamentally, health checks require running code on the machine in question.
|
||||||
|
|
||||||
|
- For instances with `node_type` of "control" or "hybrid", health checks are
|
||||||
|
performed as part of a periodic task that runs in the background.
|
||||||
|
- For instances with `node_type` of "execution", health checks are done by submitting
|
||||||
|
a work unit through the receptor mesh.
|
||||||
|
|
||||||
|
If ran through the receptor mesh, the invoked command is:
|
||||||
|
|
||||||
|
```
|
||||||
|
ansible-runner worker --worker-info
|
||||||
|
```
|
||||||
|
|
||||||
|
For execution nodes, these checks are _not_ performed on a regular basis.
|
||||||
|
Health checks against functional nodes will be ran when the node is first discovered.
|
||||||
|
Health checks against nodes with errors will be repeated at a reduced frequency.
|
||||||
|
|
||||||
|
{% endifmeth %}
|
||||||
|
|
||||||
|
{% ifmeth POST %}
|
||||||
|
# Manually Initiate a Health Check
|
||||||
|
For purposes of error remediation or debugging, a health check can be
|
||||||
|
manually initiated by making a POST request to this endpoint.
|
||||||
|
|
||||||
|
This will submit the work unit to the target node through the receptor mesh and wait for it to finish.
|
||||||
|
The model will be updated with the result.
|
||||||
|
Up-to-date values of the fields will be returned in the response data.
|
||||||
|
{% endifmeth %}
|
||||||
1
awx/api/templates/api/inventory_update_events_list.md
Normal file
1
awx/api/templates/api/inventory_update_events_list.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{% include "api/job_job_events_list.md" %}
|
||||||
21
awx/api/templates/api/job_job_events_list.md
Normal file
21
awx/api/templates/api/job_job_events_list.md
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
{% include "api/sub_list_api_view.md" %}
|
||||||
|
{% ifmeth GET %}
|
||||||
|
## Special limit feature for event list views
|
||||||
|
|
||||||
|
Use the `limit` query string parameter to opt out of the pagination keys.
|
||||||
|
Doing this can improve response times for jobs that produce a large volume
|
||||||
|
of outputs.
|
||||||
|
|
||||||
|
?limit=25
|
||||||
|
|
||||||
|
This will set the page size to 25 and the `previous` and `next` keys will be
|
||||||
|
omitted from the response data. The data structure will look like this.
|
||||||
|
|
||||||
|
{
|
||||||
|
"results": [
|
||||||
|
...
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
{% endifmeth %}
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
Make a GET request to retrieve the list of aggregated play data associated with a job
|
|
||||||
|
|
||||||
## Filtering
|
|
||||||
|
|
||||||
This endpoints supports a limited filtering subset:
|
|
||||||
|
|
||||||
?event_id__in=1,2,3
|
|
||||||
|
|
||||||
Will show only the given ids.
|
|
||||||
|
|
||||||
?event_id__gt=1
|
|
||||||
|
|
||||||
Will show ids greater than the given one.
|
|
||||||
|
|
||||||
?event_id__lt=3
|
|
||||||
|
|
||||||
Will show ids less than the given one.
|
|
||||||
|
|
||||||
?failed=true
|
|
||||||
|
|
||||||
Will show only failed plays. Alternatively `false` may be used.
|
|
||||||
|
|
||||||
?play__icontains=test
|
|
||||||
|
|
||||||
Will filter plays matching the substring `test`
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
Make a GET request to retrieve the list of aggregated task data associated with the play given by event_id.
|
|
||||||
|
|
||||||
`event_id` is a required query parameter and must match the job event id of the parent play in order to receive the list of tasks associated with the play
|
|
||||||
|
|
||||||
## Filtering
|
|
||||||
|
|
||||||
This endpoints supports a limited filtering subset:
|
|
||||||
|
|
||||||
?event_id__in=1,2,3
|
|
||||||
|
|
||||||
Will show only the given task ids under the play given by `event_id`.
|
|
||||||
|
|
||||||
?event_id__gt=1
|
|
||||||
|
|
||||||
Will show ids greater than the given one.
|
|
||||||
|
|
||||||
?event_id__lt=3
|
|
||||||
|
|
||||||
Will show ids less than the given one.
|
|
||||||
|
|
||||||
?failed=true
|
|
||||||
|
|
||||||
Will show only failed plays. Alternatively `false` may be used.
|
|
||||||
|
|
||||||
?task__icontains=test
|
|
||||||
|
|
||||||
Will filter tasks matching the substring `test`
|
|
||||||
1
awx/api/templates/api/project_update_events_list.md
Normal file
1
awx/api/templates/api/project_update_events_list.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{% include "api/job_job_events_list.md" %}
|
||||||
1
awx/api/templates/api/system_job_events_list.md
Normal file
1
awx/api/templates/api/system_job_events_list.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{% include "api/job_job_events_list.md" %}
|
||||||
@@ -3,11 +3,10 @@
|
|||||||
|
|
||||||
from django.conf.urls import url
|
from django.conf.urls import url
|
||||||
|
|
||||||
from awx.api.views import AdHocCommandEventList, AdHocCommandEventDetail
|
from awx.api.views import AdHocCommandEventDetail
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
urls = [
|
||||||
url(r'^$', AdHocCommandEventList.as_view(), name='ad_hoc_command_event_list'),
|
|
||||||
url(r'^(?P<pk>[0-9]+)/$', AdHocCommandEventDetail.as_view(), name='ad_hoc_command_event_detail'),
|
url(r'^(?P<pk>[0-9]+)/$', AdHocCommandEventDetail.as_view(), name='ad_hoc_command_event_detail'),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,6 @@ from awx.api.views import (
|
|||||||
HostSmartInventoriesList,
|
HostSmartInventoriesList,
|
||||||
HostAdHocCommandsList,
|
HostAdHocCommandsList,
|
||||||
HostAdHocCommandEventsList,
|
HostAdHocCommandEventsList,
|
||||||
HostInsights,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -33,7 +32,6 @@ urls = [
|
|||||||
url(r'^(?P<pk>[0-9]+)/smart_inventories/$', HostSmartInventoriesList.as_view(), name='host_smart_inventories_list'),
|
url(r'^(?P<pk>[0-9]+)/smart_inventories/$', HostSmartInventoriesList.as_view(), name='host_smart_inventories_list'),
|
||||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', HostAdHocCommandsList.as_view(), name='host_ad_hoc_commands_list'),
|
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', HostAdHocCommandsList.as_view(), name='host_ad_hoc_commands_list'),
|
||||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_command_events/$', HostAdHocCommandEventsList.as_view(), name='host_ad_hoc_command_events_list'),
|
url(r'^(?P<pk>[0-9]+)/ad_hoc_command_events/$', HostAdHocCommandEventsList.as_view(), name='host_ad_hoc_command_events_list'),
|
||||||
url(r'^(?P<pk>[0-9]+)/insights/$', HostInsights.as_view(), name='host_insights'),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
__all__ = ['urls']
|
__all__ = ['urls']
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
from django.conf.urls import url
|
from django.conf.urls import url
|
||||||
|
|
||||||
from awx.api.views import InstanceList, InstanceDetail, InstanceUnifiedJobsList, InstanceInstanceGroupsList
|
from awx.api.views import InstanceList, InstanceDetail, InstanceUnifiedJobsList, InstanceInstanceGroupsList, InstanceHealthCheck
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
urls = [
|
||||||
@@ -11,6 +11,7 @@ urls = [
|
|||||||
url(r'^(?P<pk>[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'),
|
url(r'^(?P<pk>[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'),
|
||||||
url(r'^(?P<pk>[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'),
|
url(r'^(?P<pk>[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'),
|
||||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
|
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
|
||||||
|
url(r'^(?P<pk>[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'),
|
||||||
]
|
]
|
||||||
|
|
||||||
__all__ = ['urls']
|
__all__ = ['urls']
|
||||||
|
|||||||
@@ -3,14 +3,11 @@
|
|||||||
|
|
||||||
from django.conf.urls import url
|
from django.conf.urls import url
|
||||||
|
|
||||||
from awx.api.views import JobEventList, JobEventDetail, JobEventChildrenList, JobEventHostsList
|
from awx.api.views import JobEventDetail, JobEventChildrenList
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
urls = [
|
||||||
url(r'^$', JobEventList.as_view(), name='job_event_list'),
|
|
||||||
url(r'^(?P<pk>[0-9]+)/$', JobEventDetail.as_view(), name='job_event_detail'),
|
url(r'^(?P<pk>[0-9]+)/$', JobEventDetail.as_view(), name='job_event_detail'),
|
||||||
url(r'^(?P<pk>[0-9]+)/children/$', JobEventChildrenList.as_view(), name='job_event_children_list'),
|
url(r'^(?P<pk>[0-9]+)/children/$', JobEventChildrenList.as_view(), name='job_event_children_list'),
|
||||||
url(r'^(?P<pk>[0-9]+)/hosts/$', JobEventHostsList.as_view(), name='job_event_hosts_list'),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
__all__ = ['urls']
|
__all__ = ['urls']
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ from urllib3.exceptions import ConnectTimeoutError
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import FieldError, ObjectDoesNotExist
|
from django.core.exceptions import FieldError, ObjectDoesNotExist
|
||||||
from django.db.models import Q, Sum
|
from django.db.models import Q, Sum
|
||||||
from django.db import IntegrityError, transaction, connection
|
from django.db import IntegrityError, ProgrammingError, transaction, connection
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from django.utils.safestring import mark_safe
|
from django.utils.safestring import mark_safe
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
@@ -90,17 +90,14 @@ from awx.main import models
|
|||||||
from awx.main.utils import (
|
from awx.main.utils import (
|
||||||
camelcase_to_underscore,
|
camelcase_to_underscore,
|
||||||
extract_ansible_vars,
|
extract_ansible_vars,
|
||||||
get_awx_http_client_headers,
|
|
||||||
get_object_or_400,
|
get_object_or_400,
|
||||||
getattrd,
|
getattrd,
|
||||||
get_pk_from_dict,
|
get_pk_from_dict,
|
||||||
schedule_task_manager,
|
schedule_task_manager,
|
||||||
ignore_inventory_computed_fields,
|
ignore_inventory_computed_fields,
|
||||||
set_environ,
|
|
||||||
)
|
)
|
||||||
from awx.main.utils.encryption import encrypt_value
|
from awx.main.utils.encryption import encrypt_value
|
||||||
from awx.main.utils.filters import SmartFilter
|
from awx.main.utils.filters import SmartFilter
|
||||||
from awx.main.utils.insights import filter_insights_api_response
|
|
||||||
from awx.main.redact import UriCleaner
|
from awx.main.redact import UriCleaner
|
||||||
from awx.api.permissions import (
|
from awx.api.permissions import (
|
||||||
JobTemplateCallbackPermission,
|
JobTemplateCallbackPermission,
|
||||||
@@ -111,6 +108,7 @@ from awx.api.permissions import (
|
|||||||
InstanceGroupTowerPermission,
|
InstanceGroupTowerPermission,
|
||||||
VariableDataPermission,
|
VariableDataPermission,
|
||||||
WorkflowApprovalPermission,
|
WorkflowApprovalPermission,
|
||||||
|
IsSystemAdminOrAuditor,
|
||||||
)
|
)
|
||||||
from awx.api import renderers
|
from awx.api import renderers
|
||||||
from awx.api import serializers
|
from awx.api import serializers
|
||||||
@@ -172,11 +170,21 @@ from awx.api.views.root import ( # noqa
|
|||||||
ApiV2AttachView,
|
ApiV2AttachView,
|
||||||
)
|
)
|
||||||
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver # noqa
|
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver # noqa
|
||||||
|
from awx.api.pagination import UnifiedJobEventPagination
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.api.views')
|
logger = logging.getLogger('awx.api.views')
|
||||||
|
|
||||||
|
|
||||||
|
def unpartitioned_event_horizon(cls):
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
try:
|
||||||
|
cursor.execute(f'SELECT MAX(id) FROM _unpartitioned_{cls._meta.db_table}')
|
||||||
|
return cursor.fetchone()[0] or -1
|
||||||
|
except ProgrammingError:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def api_exception_handler(exc, context):
|
def api_exception_handler(exc, context):
|
||||||
"""
|
"""
|
||||||
Override default API exception handler to catch IntegrityError exceptions.
|
Override default API exception handler to catch IntegrityError exceptions.
|
||||||
@@ -367,8 +375,8 @@ class InstanceDetail(RetrieveUpdateAPIView):
|
|||||||
r = super(InstanceDetail, self).update(request, *args, **kwargs)
|
r = super(InstanceDetail, self).update(request, *args, **kwargs)
|
||||||
if status.is_success(r.status_code):
|
if status.is_success(r.status_code):
|
||||||
obj = self.get_object()
|
obj = self.get_object()
|
||||||
obj.refresh_capacity()
|
obj.set_capacity_value()
|
||||||
obj.save()
|
obj.save(update_fields=['capacity'])
|
||||||
r.data = serializers.InstanceSerializer(obj, context=self.get_serializer_context()).to_representation(obj)
|
r.data = serializers.InstanceSerializer(obj, context=self.get_serializer_context()).to_representation(obj)
|
||||||
return r
|
return r
|
||||||
|
|
||||||
@@ -395,6 +403,61 @@ class InstanceInstanceGroupsList(InstanceGroupMembershipMixin, SubListCreateAtta
|
|||||||
parent_model = models.Instance
|
parent_model = models.Instance
|
||||||
relationship = 'rampart_groups'
|
relationship = 'rampart_groups'
|
||||||
|
|
||||||
|
def is_valid_relation(self, parent, sub, created=False):
|
||||||
|
if parent.node_type == 'control':
|
||||||
|
return {'msg': _(f"Cannot change instance group membership of control-only node: {parent.hostname}.")}
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class InstanceHealthCheck(GenericAPIView):
|
||||||
|
|
||||||
|
name = _('Instance Health Check')
|
||||||
|
model = models.Instance
|
||||||
|
serializer_class = serializers.InstanceHealthCheckSerializer
|
||||||
|
permission_classes = (IsSystemAdminOrAuditor,)
|
||||||
|
|
||||||
|
def get(self, request, *args, **kwargs):
|
||||||
|
obj = self.get_object()
|
||||||
|
data = self.get_serializer(data=request.data).to_representation(obj)
|
||||||
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
def post(self, request, *args, **kwargs):
|
||||||
|
obj = self.get_object()
|
||||||
|
|
||||||
|
if obj.node_type == 'execution':
|
||||||
|
from awx.main.tasks import execution_node_health_check
|
||||||
|
|
||||||
|
runner_data = execution_node_health_check(obj.hostname)
|
||||||
|
obj.refresh_from_db()
|
||||||
|
data = self.get_serializer(data=request.data).to_representation(obj)
|
||||||
|
# Add in some extra unsaved fields
|
||||||
|
for extra_field in ('transmit_timing', 'run_timing'):
|
||||||
|
if extra_field in runner_data:
|
||||||
|
data[extra_field] = runner_data[extra_field]
|
||||||
|
else:
|
||||||
|
from awx.main.tasks import cluster_node_health_check
|
||||||
|
|
||||||
|
if settings.CLUSTER_HOST_ID == obj.hostname:
|
||||||
|
cluster_node_health_check(obj.hostname)
|
||||||
|
else:
|
||||||
|
cluster_node_health_check.apply_async([obj.hostname], queue=obj.hostname)
|
||||||
|
start_time = time.time()
|
||||||
|
prior_check_time = obj.last_health_check
|
||||||
|
while time.time() - start_time < 50.0:
|
||||||
|
obj.refresh_from_db(fields=['last_health_check'])
|
||||||
|
if obj.last_health_check != prior_check_time:
|
||||||
|
break
|
||||||
|
if time.time() - start_time < 1.0:
|
||||||
|
time.sleep(0.1)
|
||||||
|
else:
|
||||||
|
time.sleep(1.0)
|
||||||
|
else:
|
||||||
|
obj.mark_offline(errors=_('Health check initiated by user determined this instance to be unresponsive'))
|
||||||
|
obj.refresh_from_db()
|
||||||
|
data = self.get_serializer(data=request.data).to_representation(obj)
|
||||||
|
|
||||||
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
class InstanceGroupList(ListCreateAPIView):
|
class InstanceGroupList(ListCreateAPIView):
|
||||||
|
|
||||||
@@ -437,6 +500,11 @@ class InstanceGroupInstanceList(InstanceGroupMembershipMixin, SubListAttachDetac
|
|||||||
relationship = "instances"
|
relationship = "instances"
|
||||||
search_fields = ('hostname',)
|
search_fields = ('hostname',)
|
||||||
|
|
||||||
|
def is_valid_relation(self, parent, sub, created=False):
|
||||||
|
if sub.node_type == 'control':
|
||||||
|
return {'msg': _(f"Cannot change instance group membership of control-only node: {sub.hostname}.")}
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
class ScheduleList(ListCreateAPIView):
|
class ScheduleList(ListCreateAPIView):
|
||||||
|
|
||||||
@@ -685,6 +753,7 @@ class TeamAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
class ExecutionEnvironmentList(ListCreateAPIView):
|
class ExecutionEnvironmentList(ListCreateAPIView):
|
||||||
|
|
||||||
|
always_allow_superuser = False
|
||||||
model = models.ExecutionEnvironment
|
model = models.ExecutionEnvironment
|
||||||
serializer_class = serializers.ExecutionEnvironmentSerializer
|
serializer_class = serializers.ExecutionEnvironmentSerializer
|
||||||
swagger_topic = "Execution Environments"
|
swagger_topic = "Execution Environments"
|
||||||
@@ -692,10 +761,26 @@ class ExecutionEnvironmentList(ListCreateAPIView):
|
|||||||
|
|
||||||
class ExecutionEnvironmentDetail(RetrieveUpdateDestroyAPIView):
|
class ExecutionEnvironmentDetail(RetrieveUpdateDestroyAPIView):
|
||||||
|
|
||||||
|
always_allow_superuser = False
|
||||||
model = models.ExecutionEnvironment
|
model = models.ExecutionEnvironment
|
||||||
serializer_class = serializers.ExecutionEnvironmentSerializer
|
serializer_class = serializers.ExecutionEnvironmentSerializer
|
||||||
swagger_topic = "Execution Environments"
|
swagger_topic = "Execution Environments"
|
||||||
|
|
||||||
|
def update(self, request, *args, **kwargs):
|
||||||
|
instance = self.get_object()
|
||||||
|
fields_to_check = ['name', 'description', 'organization', 'image', 'credential']
|
||||||
|
if instance.managed and request.user.can_access(models.ExecutionEnvironment, 'change', instance):
|
||||||
|
for field in fields_to_check:
|
||||||
|
if kwargs.get('partial') and field not in request.data:
|
||||||
|
continue
|
||||||
|
left = getattr(instance, field, None)
|
||||||
|
if hasattr(left, 'id'):
|
||||||
|
left = left.id
|
||||||
|
right = request.data.get(field)
|
||||||
|
if left != right:
|
||||||
|
raise PermissionDenied(_("Only the 'pull' field can be edited for managed execution environments."))
|
||||||
|
return super().update(request, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class ExecutionEnvironmentJobTemplateList(SubListAPIView):
|
class ExecutionEnvironmentJobTemplateList(SubListAPIView):
|
||||||
|
|
||||||
@@ -878,11 +963,17 @@ class ProjectUpdateEventsList(SubListAPIView):
|
|||||||
relationship = 'project_update_events'
|
relationship = 'project_update_events'
|
||||||
name = _('Project Update Events List')
|
name = _('Project Update Events List')
|
||||||
search_fields = ('stdout',)
|
search_fields = ('stdout',)
|
||||||
|
pagination_class = UnifiedJobEventPagination
|
||||||
|
|
||||||
def finalize_response(self, request, response, *args, **kwargs):
|
def finalize_response(self, request, response, *args, **kwargs):
|
||||||
response['X-UI-Max-Events'] = settings.MAX_UI_JOB_EVENTS
|
response['X-UI-Max-Events'] = settings.MAX_UI_JOB_EVENTS
|
||||||
return super(ProjectUpdateEventsList, self).finalize_response(request, response, *args, **kwargs)
|
return super(ProjectUpdateEventsList, self).finalize_response(request, response, *args, **kwargs)
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
pu = self.get_parent_object()
|
||||||
|
self.check_parent_access(pu)
|
||||||
|
return pu.get_event_queryset()
|
||||||
|
|
||||||
|
|
||||||
class SystemJobEventsList(SubListAPIView):
|
class SystemJobEventsList(SubListAPIView):
|
||||||
|
|
||||||
@@ -892,11 +983,17 @@ class SystemJobEventsList(SubListAPIView):
|
|||||||
relationship = 'system_job_events'
|
relationship = 'system_job_events'
|
||||||
name = _('System Job Events List')
|
name = _('System Job Events List')
|
||||||
search_fields = ('stdout',)
|
search_fields = ('stdout',)
|
||||||
|
pagination_class = UnifiedJobEventPagination
|
||||||
|
|
||||||
def finalize_response(self, request, response, *args, **kwargs):
|
def finalize_response(self, request, response, *args, **kwargs):
|
||||||
response['X-UI-Max-Events'] = settings.MAX_UI_JOB_EVENTS
|
response['X-UI-Max-Events'] = settings.MAX_UI_JOB_EVENTS
|
||||||
return super(SystemJobEventsList, self).finalize_response(request, response, *args, **kwargs)
|
return super(SystemJobEventsList, self).finalize_response(request, response, *args, **kwargs)
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
job = self.get_parent_object()
|
||||||
|
self.check_parent_access(job)
|
||||||
|
return job.get_event_queryset()
|
||||||
|
|
||||||
|
|
||||||
class ProjectUpdateCancel(RetrieveAPIView):
|
class ProjectUpdateCancel(RetrieveAPIView):
|
||||||
|
|
||||||
@@ -1274,7 +1371,7 @@ class CredentialTypeDetail(RetrieveUpdateDestroyAPIView):
|
|||||||
|
|
||||||
def destroy(self, request, *args, **kwargs):
|
def destroy(self, request, *args, **kwargs):
|
||||||
instance = self.get_object()
|
instance = self.get_object()
|
||||||
if instance.managed_by_tower:
|
if instance.managed:
|
||||||
raise PermissionDenied(detail=_("Deletion not allowed for managed credential types"))
|
raise PermissionDenied(detail=_("Deletion not allowed for managed credential types"))
|
||||||
if instance.credentials.exists():
|
if instance.credentials.exists():
|
||||||
raise PermissionDenied(detail=_("Credential types that are in use cannot be deleted"))
|
raise PermissionDenied(detail=_("Credential types that are in use cannot be deleted"))
|
||||||
@@ -1389,7 +1486,7 @@ class CredentialDetail(RetrieveUpdateDestroyAPIView):
|
|||||||
|
|
||||||
def destroy(self, request, *args, **kwargs):
|
def destroy(self, request, *args, **kwargs):
|
||||||
instance = self.get_object()
|
instance = self.get_object()
|
||||||
if instance.managed_by_tower:
|
if instance.managed:
|
||||||
raise PermissionDenied(detail=_("Deletion not allowed for managed credentials"))
|
raise PermissionDenied(detail=_("Deletion not allowed for managed credentials"))
|
||||||
return super(CredentialDetail, self).destroy(request, *args, **kwargs)
|
return super(CredentialDetail, self).destroy(request, *args, **kwargs)
|
||||||
|
|
||||||
@@ -1665,106 +1762,6 @@ class GatewayTimeout(APIException):
|
|||||||
default_code = 'gateway_timeout'
|
default_code = 'gateway_timeout'
|
||||||
|
|
||||||
|
|
||||||
class HostInsights(GenericAPIView):
|
|
||||||
|
|
||||||
model = models.Host
|
|
||||||
serializer_class = serializers.EmptySerializer
|
|
||||||
|
|
||||||
def _call_insights_api(self, url, session, headers):
|
|
||||||
try:
|
|
||||||
with set_environ(**settings.AWX_TASK_ENV):
|
|
||||||
res = session.get(url, headers=headers, timeout=120)
|
|
||||||
except requests.exceptions.SSLError:
|
|
||||||
raise BadGateway(_('SSLError while trying to connect to {}').format(url))
|
|
||||||
except requests.exceptions.Timeout:
|
|
||||||
raise GatewayTimeout(_('Request to {} timed out.').format(url))
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
raise BadGateway(_('Unknown exception {} while trying to GET {}').format(e, url))
|
|
||||||
|
|
||||||
if res.status_code == 401:
|
|
||||||
raise BadGateway(_('Unauthorized access. Please check your Insights Credential username and password.'))
|
|
||||||
elif res.status_code != 200:
|
|
||||||
raise BadGateway(
|
|
||||||
_('Failed to access the Insights API at URL {}.' ' Server responded with {} status code and message {}').format(
|
|
||||||
url, res.status_code, res.content
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
return res.json()
|
|
||||||
except ValueError:
|
|
||||||
raise BadGateway(_('Expected JSON response from Insights at URL {}' ' but instead got {}').format(url, res.content))
|
|
||||||
|
|
||||||
def _get_session(self, username, password):
|
|
||||||
session = requests.Session()
|
|
||||||
session.auth = requests.auth.HTTPBasicAuth(username, password)
|
|
||||||
|
|
||||||
return session
|
|
||||||
|
|
||||||
def _get_platform_info(self, host, session, headers):
|
|
||||||
url = '{}/api/inventory/v1/hosts?insights_id={}'.format(settings.INSIGHTS_URL_BASE, host.insights_system_id)
|
|
||||||
res = self._call_insights_api(url, session, headers)
|
|
||||||
try:
|
|
||||||
res['results'][0]['id']
|
|
||||||
except (IndexError, KeyError):
|
|
||||||
raise NotFound(_('Could not translate Insights system ID {}' ' into an Insights platform ID.').format(host.insights_system_id))
|
|
||||||
|
|
||||||
return res['results'][0]
|
|
||||||
|
|
||||||
def _get_reports(self, platform_id, session, headers):
|
|
||||||
url = '{}/api/insights/v1/system/{}/reports/'.format(settings.INSIGHTS_URL_BASE, platform_id)
|
|
||||||
|
|
||||||
return self._call_insights_api(url, session, headers)
|
|
||||||
|
|
||||||
def _get_remediations(self, platform_id, session, headers):
|
|
||||||
url = '{}/api/remediations/v1/remediations?system={}'.format(settings.INSIGHTS_URL_BASE, platform_id)
|
|
||||||
|
|
||||||
remediations = []
|
|
||||||
|
|
||||||
# Iterate over all of the pages of content.
|
|
||||||
while url:
|
|
||||||
data = self._call_insights_api(url, session, headers)
|
|
||||||
remediations.extend(data['data'])
|
|
||||||
|
|
||||||
url = data['links']['next'] # Will be `None` if this is the last page.
|
|
||||||
|
|
||||||
return remediations
|
|
||||||
|
|
||||||
def _get_insights(self, host, session, headers):
|
|
||||||
platform_info = self._get_platform_info(host, session, headers)
|
|
||||||
platform_id = platform_info['id']
|
|
||||||
reports = self._get_reports(platform_id, session, headers)
|
|
||||||
remediations = self._get_remediations(platform_id, session, headers)
|
|
||||||
|
|
||||||
return {'insights_content': filter_insights_api_response(platform_info, reports, remediations)}
|
|
||||||
|
|
||||||
def get(self, request, *args, **kwargs):
|
|
||||||
host = self.get_object()
|
|
||||||
cred = None
|
|
||||||
|
|
||||||
if host.insights_system_id is None:
|
|
||||||
return Response(dict(error=_('This host is not recognized as an Insights host.')), status=status.HTTP_404_NOT_FOUND)
|
|
||||||
|
|
||||||
if host.inventory and host.inventory.insights_credential:
|
|
||||||
cred = host.inventory.insights_credential
|
|
||||||
else:
|
|
||||||
return Response(dict(error=_('The Insights Credential for "{}" was not found.').format(host.inventory.name)), status=status.HTTP_404_NOT_FOUND)
|
|
||||||
|
|
||||||
username = cred.get_input('username', default='')
|
|
||||||
password = cred.get_input('password', default='')
|
|
||||||
session = self._get_session(username, password)
|
|
||||||
headers = get_awx_http_client_headers()
|
|
||||||
|
|
||||||
data = self._get_insights(host, session, headers)
|
|
||||||
return Response(data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def handle_exception(self, exc):
|
|
||||||
# Continue supporting the slightly different way we have handled error responses on this view.
|
|
||||||
response = super().handle_exception(exc)
|
|
||||||
response.data['error'] = response.data.pop('detail')
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
class GroupList(ListCreateAPIView):
|
class GroupList(ListCreateAPIView):
|
||||||
|
|
||||||
model = models.Group
|
model = models.Group
|
||||||
@@ -3602,7 +3599,7 @@ class JobRelaunch(RetrieveAPIView):
|
|||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
host_qs = obj.retry_qs(retry_hosts)
|
host_qs = obj.retry_qs(retry_hosts)
|
||||||
if not obj.job_events.filter(event='playbook_on_stats').exists():
|
if not obj.get_event_queryset().filter(event='playbook_on_stats').exists():
|
||||||
return Response(
|
return Response(
|
||||||
{'hosts': _('Cannot retry on {status_value} hosts, playbook stats not available.').format(status_value=retry_hosts)},
|
{'hosts': _('Cannot retry on {status_value} hosts, playbook stats not available.').format(status_value=retry_hosts)},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
@@ -3729,18 +3726,22 @@ class JobHostSummaryDetail(RetrieveAPIView):
|
|||||||
serializer_class = serializers.JobHostSummarySerializer
|
serializer_class = serializers.JobHostSummarySerializer
|
||||||
|
|
||||||
|
|
||||||
class JobEventList(NoTruncateMixin, ListAPIView):
|
|
||||||
|
|
||||||
model = models.JobEvent
|
|
||||||
serializer_class = serializers.JobEventSerializer
|
|
||||||
search_fields = ('stdout',)
|
|
||||||
|
|
||||||
|
|
||||||
class JobEventDetail(RetrieveAPIView):
|
class JobEventDetail(RetrieveAPIView):
|
||||||
|
|
||||||
model = models.JobEvent
|
|
||||||
serializer_class = serializers.JobEventSerializer
|
serializer_class = serializers.JobEventSerializer
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_partitioned(self):
|
||||||
|
if 'pk' not in self.kwargs:
|
||||||
|
return True
|
||||||
|
return int(self.kwargs['pk']) > unpartitioned_event_horizon(models.JobEvent)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def model(self):
|
||||||
|
if self.is_partitioned:
|
||||||
|
return models.JobEvent
|
||||||
|
return models.UnpartitionedJobEvent
|
||||||
|
|
||||||
def get_serializer_context(self):
|
def get_serializer_context(self):
|
||||||
context = super().get_serializer_context()
|
context = super().get_serializer_context()
|
||||||
context.update(no_truncate=True)
|
context.update(no_truncate=True)
|
||||||
@@ -3749,33 +3750,31 @@ class JobEventDetail(RetrieveAPIView):
|
|||||||
|
|
||||||
class JobEventChildrenList(NoTruncateMixin, SubListAPIView):
|
class JobEventChildrenList(NoTruncateMixin, SubListAPIView):
|
||||||
|
|
||||||
model = models.JobEvent
|
|
||||||
serializer_class = serializers.JobEventSerializer
|
serializer_class = serializers.JobEventSerializer
|
||||||
parent_model = models.JobEvent
|
|
||||||
relationship = 'children'
|
relationship = 'children'
|
||||||
name = _('Job Event Children List')
|
name = _('Job Event Children List')
|
||||||
search_fields = ('stdout',)
|
search_fields = ('stdout',)
|
||||||
|
|
||||||
def get_queryset(self):
|
@property
|
||||||
parent_event = self.get_parent_object()
|
def is_partitioned(self):
|
||||||
self.check_parent_access(parent_event)
|
if 'pk' not in self.kwargs:
|
||||||
qs = self.request.user.get_queryset(self.model).filter(parent_uuid=parent_event.uuid)
|
return True
|
||||||
return qs
|
return int(self.kwargs['pk']) > unpartitioned_event_horizon(models.JobEvent)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def model(self):
|
||||||
|
if self.is_partitioned:
|
||||||
|
return models.JobEvent
|
||||||
|
return models.UnpartitionedJobEvent
|
||||||
|
|
||||||
class JobEventHostsList(HostRelatedSearchMixin, SubListAPIView):
|
@property
|
||||||
|
def parent_model(self):
|
||||||
model = models.Host
|
return self.model
|
||||||
serializer_class = serializers.HostSerializer
|
|
||||||
parent_model = models.JobEvent
|
|
||||||
relationship = 'hosts'
|
|
||||||
name = _('Job Event Hosts List')
|
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
parent_event = self.get_parent_object()
|
parent_event = self.get_parent_object()
|
||||||
self.check_parent_access(parent_event)
|
self.check_parent_access(parent_event)
|
||||||
qs = self.request.user.get_queryset(self.model).filter(job_events_as_primary_host=parent_event)
|
return parent_event.job.get_event_queryset().filter(parent_uuid=parent_event.uuid)
|
||||||
return qs
|
|
||||||
|
|
||||||
|
|
||||||
class BaseJobEventsList(NoTruncateMixin, SubListAPIView):
|
class BaseJobEventsList(NoTruncateMixin, SubListAPIView):
|
||||||
@@ -3811,12 +3810,12 @@ class GroupJobEventsList(BaseJobEventsList):
|
|||||||
class JobJobEventsList(BaseJobEventsList):
|
class JobJobEventsList(BaseJobEventsList):
|
||||||
|
|
||||||
parent_model = models.Job
|
parent_model = models.Job
|
||||||
|
pagination_class = UnifiedJobEventPagination
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
job = self.get_parent_object()
|
job = self.get_parent_object()
|
||||||
self.check_parent_access(job)
|
self.check_parent_access(job)
|
||||||
qs = job.job_events.select_related('host').order_by('start_line')
|
return job.get_event_queryset().select_related('host').order_by('start_line')
|
||||||
return qs.all()
|
|
||||||
|
|
||||||
|
|
||||||
class AdHocCommandList(ListCreateAPIView):
|
class AdHocCommandList(ListCreateAPIView):
|
||||||
@@ -3968,13 +3967,6 @@ class AdHocCommandRelaunch(GenericAPIView):
|
|||||||
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
|
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
|
||||||
|
|
||||||
|
|
||||||
class AdHocCommandEventList(NoTruncateMixin, ListAPIView):
|
|
||||||
|
|
||||||
model = models.AdHocCommandEvent
|
|
||||||
serializer_class = serializers.AdHocCommandEventSerializer
|
|
||||||
search_fields = ('stdout',)
|
|
||||||
|
|
||||||
|
|
||||||
class AdHocCommandEventDetail(RetrieveAPIView):
|
class AdHocCommandEventDetail(RetrieveAPIView):
|
||||||
|
|
||||||
model = models.AdHocCommandEvent
|
model = models.AdHocCommandEvent
|
||||||
@@ -3994,12 +3986,21 @@ class BaseAdHocCommandEventsList(NoTruncateMixin, SubListAPIView):
|
|||||||
relationship = 'ad_hoc_command_events'
|
relationship = 'ad_hoc_command_events'
|
||||||
name = _('Ad Hoc Command Events List')
|
name = _('Ad Hoc Command Events List')
|
||||||
search_fields = ('stdout',)
|
search_fields = ('stdout',)
|
||||||
|
pagination_class = UnifiedJobEventPagination
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
parent = self.get_parent_object()
|
||||||
|
self.check_parent_access(parent)
|
||||||
|
return parent.get_event_queryset()
|
||||||
|
|
||||||
|
|
||||||
class HostAdHocCommandEventsList(BaseAdHocCommandEventsList):
|
class HostAdHocCommandEventsList(BaseAdHocCommandEventsList):
|
||||||
|
|
||||||
parent_model = models.Host
|
parent_model = models.Host
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
return super(BaseAdHocCommandEventsList, self).get_queryset()
|
||||||
|
|
||||||
|
|
||||||
# class GroupJobEventsList(BaseJobEventsList):
|
# class GroupJobEventsList(BaseJobEventsList):
|
||||||
# parent_model = Group
|
# parent_model = Group
|
||||||
|
|||||||
@@ -38,6 +38,9 @@ from awx.api.serializers import (
|
|||||||
)
|
)
|
||||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, ControlledByScmMixin
|
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, ControlledByScmMixin
|
||||||
|
|
||||||
|
from awx.api.pagination import UnifiedJobEventPagination
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.api.views.organization')
|
logger = logging.getLogger('awx.api.views.organization')
|
||||||
|
|
||||||
|
|
||||||
@@ -49,6 +52,12 @@ class InventoryUpdateEventsList(SubListAPIView):
|
|||||||
relationship = 'inventory_update_events'
|
relationship = 'inventory_update_events'
|
||||||
name = _('Inventory Update Events List')
|
name = _('Inventory Update Events List')
|
||||||
search_fields = ('stdout',)
|
search_fields = ('stdout',)
|
||||||
|
pagination_class = UnifiedJobEventPagination
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
iu = self.get_parent_object()
|
||||||
|
self.check_parent_access(iu)
|
||||||
|
return iu.get_event_queryset()
|
||||||
|
|
||||||
def finalize_response(self, request, response, *args, **kwargs):
|
def finalize_response(self, request, response, *args, **kwargs):
|
||||||
response['X-UI-Max-Events'] = settings.MAX_UI_JOB_EVENTS
|
response['X-UI-Max-Events'] = settings.MAX_UI_JOB_EVENTS
|
||||||
|
|||||||
@@ -52,6 +52,11 @@ class UnifiedJobDeletionMixin(object):
|
|||||||
else:
|
else:
|
||||||
# if it has been > 1 minute, events are probably lost
|
# if it has been > 1 minute, events are probably lost
|
||||||
logger.warning('Allowing deletion of {} through the API without all events ' 'processed.'.format(obj.log_format))
|
logger.warning('Allowing deletion of {} through the API without all events ' 'processed.'.format(obj.log_format))
|
||||||
|
|
||||||
|
# Manually cascade delete events if unpartitioned job
|
||||||
|
if obj.has_unpartitioned_events:
|
||||||
|
obj.get_event_queryset().delete()
|
||||||
|
|
||||||
obj.delete()
|
obj.delete()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
@@ -63,13 +68,23 @@ class InstanceGroupMembershipMixin(object):
|
|||||||
membership.
|
membership.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
def attach_validate(self, request):
|
||||||
|
parent = self.get_parent_object()
|
||||||
|
sub_id, res = super().attach_validate(request)
|
||||||
|
if res: # handle an error
|
||||||
|
return sub_id, res
|
||||||
|
sub = get_object_or_400(self.model, pk=sub_id)
|
||||||
|
attach_errors = self.is_valid_relation(parent, sub)
|
||||||
|
if attach_errors:
|
||||||
|
return sub_id, Response(attach_errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
return sub_id, res
|
||||||
|
|
||||||
def attach(self, request, *args, **kwargs):
|
def attach(self, request, *args, **kwargs):
|
||||||
response = super(InstanceGroupMembershipMixin, self).attach(request, *args, **kwargs)
|
response = super(InstanceGroupMembershipMixin, self).attach(request, *args, **kwargs)
|
||||||
sub_id, res = self.attach_validate(request)
|
sub_id, res = self.attach_validate(request)
|
||||||
if status.is_success(response.status_code):
|
if status.is_success(response.status_code):
|
||||||
if self.parent_model is Instance:
|
if self.parent_model is Instance:
|
||||||
ig_obj = get_object_or_400(self.model, pk=sub_id)
|
inst_name = self.get_parent_object().hostname
|
||||||
inst_name = ig_obj.hostname
|
|
||||||
else:
|
else:
|
||||||
inst_name = get_object_or_400(self.model, pk=sub_id).hostname
|
inst_name = get_object_or_400(self.model, pk=sub_id).hostname
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
@@ -86,11 +101,12 @@ class InstanceGroupMembershipMixin(object):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
def unattach_validate(self, request):
|
def unattach_validate(self, request):
|
||||||
|
parent = self.get_parent_object()
|
||||||
(sub_id, res) = super(InstanceGroupMembershipMixin, self).unattach_validate(request)
|
(sub_id, res) = super(InstanceGroupMembershipMixin, self).unattach_validate(request)
|
||||||
if res:
|
if res:
|
||||||
return (sub_id, res)
|
return (sub_id, res)
|
||||||
sub = get_object_or_400(self.model, pk=sub_id)
|
sub = get_object_or_400(self.model, pk=sub_id)
|
||||||
attach_errors = self.is_valid_relation(None, sub)
|
attach_errors = self.is_valid_relation(parent, sub)
|
||||||
if attach_errors:
|
if attach_errors:
|
||||||
return (sub_id, Response(attach_errors, status=status.HTTP_400_BAD_REQUEST))
|
return (sub_id, Response(attach_errors, status=status.HTTP_400_BAD_REQUEST))
|
||||||
return (sub_id, res)
|
return (sub_id, res)
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ from awx.api.versioning import reverse, drf_reverse
|
|||||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||||
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
||||||
from awx.main.utils import set_environ
|
from awx.main.utils import set_environ
|
||||||
|
from awx.main.utils.licensing import get_licenser
|
||||||
|
|
||||||
logger = logging.getLogger('awx.api.views.root')
|
logger = logging.getLogger('awx.api.views.root')
|
||||||
|
|
||||||
@@ -106,7 +107,6 @@ class ApiVersionRootView(APIView):
|
|||||||
data['hosts'] = reverse('api:host_list', request=request)
|
data['hosts'] = reverse('api:host_list', request=request)
|
||||||
data['job_templates'] = reverse('api:job_template_list', request=request)
|
data['job_templates'] = reverse('api:job_template_list', request=request)
|
||||||
data['jobs'] = reverse('api:job_list', request=request)
|
data['jobs'] = reverse('api:job_list', request=request)
|
||||||
data['job_events'] = reverse('api:job_event_list', request=request)
|
|
||||||
data['ad_hoc_commands'] = reverse('api:ad_hoc_command_list', request=request)
|
data['ad_hoc_commands'] = reverse('api:ad_hoc_command_list', request=request)
|
||||||
data['system_job_templates'] = reverse('api:system_job_template_list', request=request)
|
data['system_job_templates'] = reverse('api:system_job_template_list', request=request)
|
||||||
data['system_jobs'] = reverse('api:system_job_list', request=request)
|
data['system_jobs'] = reverse('api:system_job_list', request=request)
|
||||||
@@ -151,14 +151,22 @@ class ApiV2PingView(APIView):
|
|||||||
response['instances'] = []
|
response['instances'] = []
|
||||||
for instance in Instance.objects.all():
|
for instance in Instance.objects.all():
|
||||||
response['instances'].append(
|
response['instances'].append(
|
||||||
dict(node=instance.hostname, uuid=instance.uuid, heartbeat=instance.modified, capacity=instance.capacity, version=instance.version)
|
dict(
|
||||||
|
node=instance.hostname,
|
||||||
|
node_type=instance.node_type,
|
||||||
|
uuid=instance.uuid,
|
||||||
|
heartbeat=instance.modified,
|
||||||
|
capacity=instance.capacity,
|
||||||
|
version=instance.version,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
sorted(response['instances'], key=operator.itemgetter('node'))
|
response['instances'] = sorted(response['instances'], key=operator.itemgetter('node'))
|
||||||
response['instance_groups'] = []
|
response['instance_groups'] = []
|
||||||
for instance_group in InstanceGroup.objects.prefetch_related('instances'):
|
for instance_group in InstanceGroup.objects.prefetch_related('instances'):
|
||||||
response['instance_groups'].append(
|
response['instance_groups'].append(
|
||||||
dict(name=instance_group.name, capacity=instance_group.capacity, instances=[x.hostname for x in instance_group.instances.all()])
|
dict(name=instance_group.name, capacity=instance_group.capacity, instances=[x.hostname for x in instance_group.instances.all()])
|
||||||
)
|
)
|
||||||
|
response['instance_groups'] = sorted(response['instance_groups'], key=lambda x: x['name'].lower())
|
||||||
return Response(response)
|
return Response(response)
|
||||||
|
|
||||||
|
|
||||||
@@ -174,8 +182,6 @@ class ApiV2SubscriptionView(APIView):
|
|||||||
self.permission_denied(request) # Raises PermissionDenied exception.
|
self.permission_denied(request) # Raises PermissionDenied exception.
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
from awx.main.utils.common import get_licenser
|
|
||||||
|
|
||||||
data = request.data.copy()
|
data = request.data.copy()
|
||||||
if data.get('subscriptions_password') == '$encrypted$':
|
if data.get('subscriptions_password') == '$encrypted$':
|
||||||
data['subscriptions_password'] = settings.SUBSCRIPTIONS_PASSWORD
|
data['subscriptions_password'] = settings.SUBSCRIPTIONS_PASSWORD
|
||||||
@@ -223,7 +229,6 @@ class ApiV2AttachView(APIView):
|
|||||||
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||||
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||||
if pool_id and user and pw:
|
if pool_id and user and pw:
|
||||||
from awx.main.utils.common import get_licenser
|
|
||||||
|
|
||||||
data = request.data.copy()
|
data = request.data.copy()
|
||||||
try:
|
try:
|
||||||
@@ -265,8 +270,6 @@ class ApiV2ConfigView(APIView):
|
|||||||
def get(self, request, format=None):
|
def get(self, request, format=None):
|
||||||
'''Return various sitewide configuration settings'''
|
'''Return various sitewide configuration settings'''
|
||||||
|
|
||||||
from awx.main.utils.common import get_licenser
|
|
||||||
|
|
||||||
license_data = get_licenser().validate()
|
license_data = get_licenser().validate()
|
||||||
|
|
||||||
if not license_data.get('valid_key', False):
|
if not license_data.get('valid_key', False):
|
||||||
@@ -302,7 +305,9 @@ class ApiV2ConfigView(APIView):
|
|||||||
):
|
):
|
||||||
data.update(
|
data.update(
|
||||||
dict(
|
dict(
|
||||||
project_base_dir=settings.PROJECTS_ROOT, project_local_paths=Project.get_local_path_choices(), custom_virtualenvs=get_custom_venv_choices()
|
project_base_dir=settings.PROJECTS_ROOT,
|
||||||
|
project_local_paths=Project.get_local_path_choices(),
|
||||||
|
custom_virtualenvs=get_custom_venv_choices(),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
elif JobTemplate.accessible_objects(request.user, 'admin_role').exists():
|
elif JobTemplate.accessible_objects(request.user, 'admin_role').exists():
|
||||||
@@ -319,8 +324,6 @@ class ApiV2ConfigView(APIView):
|
|||||||
logger.info(smart_text(u"Invalid JSON submitted for license."), extra=dict(actor=request.user.username))
|
logger.info(smart_text(u"Invalid JSON submitted for license."), extra=dict(actor=request.user.username))
|
||||||
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
|
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
from awx.main.utils.common import get_licenser
|
|
||||||
|
|
||||||
license_data = json.loads(data_actual)
|
license_data = json.loads(data_actual)
|
||||||
if 'license_key' in license_data:
|
if 'license_key' in license_data:
|
||||||
return Response({"error": _('Legacy license submitted. A subscription manifest is now required.')}, status=status.HTTP_400_BAD_REQUEST)
|
return Response({"error": _('Legacy license submitted. A subscription manifest is now required.')}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|||||||
@@ -1,62 +0,0 @@
|
|||||||
import base64
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
from cryptography.hazmat.backends import default_backend
|
|
||||||
from cryptography.hazmat.primitives.ciphers import Cipher
|
|
||||||
from cryptography.hazmat.primitives.ciphers.algorithms import AES
|
|
||||||
from cryptography.hazmat.primitives.ciphers.modes import ECB
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['get_encryption_key', 'decrypt_field']
|
|
||||||
|
|
||||||
|
|
||||||
def get_encryption_key(field_name, pk=None):
|
|
||||||
"""
|
|
||||||
Generate key for encrypted password based on field name,
|
|
||||||
``settings.SECRET_KEY``, and instance pk (if available).
|
|
||||||
|
|
||||||
:param pk: (optional) the primary key of the ``awx.conf.model.Setting``;
|
|
||||||
can be omitted in situations where you're encrypting a setting
|
|
||||||
that is not database-persistent (like a read-only setting)
|
|
||||||
"""
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
h = hashlib.sha1()
|
|
||||||
h.update(settings.SECRET_KEY)
|
|
||||||
if pk is not None:
|
|
||||||
h.update(str(pk))
|
|
||||||
h.update(field_name)
|
|
||||||
return h.digest()[:16]
|
|
||||||
|
|
||||||
|
|
||||||
def decrypt_value(encryption_key, value):
|
|
||||||
raw_data = value[len('$encrypted$') :]
|
|
||||||
# If the encrypted string contains a UTF8 marker, discard it
|
|
||||||
utf8 = raw_data.startswith('UTF8$')
|
|
||||||
if utf8:
|
|
||||||
raw_data = raw_data[len('UTF8$') :]
|
|
||||||
algo, b64data = raw_data.split('$', 1)
|
|
||||||
if algo != 'AES':
|
|
||||||
raise ValueError('unsupported algorithm: %s' % algo)
|
|
||||||
encrypted = base64.b64decode(b64data)
|
|
||||||
decryptor = Cipher(AES(encryption_key), ECB(), default_backend()).decryptor()
|
|
||||||
value = decryptor.update(encrypted) + decryptor.finalize()
|
|
||||||
value = value.rstrip('\x00')
|
|
||||||
# If the encrypted string contained a UTF8 marker, decode the data
|
|
||||||
if utf8:
|
|
||||||
value = value.decode('utf-8')
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def decrypt_field(instance, field_name, subfield=None):
|
|
||||||
"""
|
|
||||||
Return content of the given instance and field name decrypted.
|
|
||||||
"""
|
|
||||||
value = getattr(instance, field_name)
|
|
||||||
if isinstance(value, dict) and subfield is not None:
|
|
||||||
value = value[subfield]
|
|
||||||
if not value or not value.startswith('$encrypted$'):
|
|
||||||
return value
|
|
||||||
key = get_encryption_key(field_name, getattr(instance, 'pk', None))
|
|
||||||
|
|
||||||
return decrypt_value(key, value)
|
|
||||||
@@ -23,8 +23,8 @@ import cachetools
|
|||||||
# AWX
|
# AWX
|
||||||
from awx.main.utils import encrypt_field, decrypt_field
|
from awx.main.utils import encrypt_field, decrypt_field
|
||||||
from awx.conf import settings_registry
|
from awx.conf import settings_registry
|
||||||
|
from awx.conf.fields import PrimaryKeyRelatedField
|
||||||
from awx.conf.models import Setting
|
from awx.conf.models import Setting
|
||||||
from awx.conf.migrations._reencrypt import decrypt_field as old_decrypt_field
|
|
||||||
|
|
||||||
# FIXME: Gracefully handle when settings are accessed before the database is
|
# FIXME: Gracefully handle when settings are accessed before the database is
|
||||||
# ready (or during migrations).
|
# ready (or during migrations).
|
||||||
@@ -298,13 +298,7 @@ class SettingsWrapper(UserSettingsHolder):
|
|||||||
continue
|
continue
|
||||||
if self.registry.is_setting_encrypted(setting.key):
|
if self.registry.is_setting_encrypted(setting.key):
|
||||||
setting_ids[setting.key] = setting.id
|
setting_ids[setting.key] = setting.id
|
||||||
try:
|
value = decrypt_field(setting, 'value')
|
||||||
value = decrypt_field(setting, 'value')
|
|
||||||
except ValueError as e:
|
|
||||||
# TODO: Remove in Tower 3.3
|
|
||||||
logger.debug('encountered error decrypting field: %s - attempting fallback to old', e)
|
|
||||||
value = old_decrypt_field(setting, 'value')
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
value = setting.value
|
value = setting.value
|
||||||
settings_to_cache[setting.key] = get_cache_value(value)
|
settings_to_cache[setting.key] = get_cache_value(value)
|
||||||
@@ -420,9 +414,9 @@ class SettingsWrapper(UserSettingsHolder):
|
|||||||
raise ImproperlyConfigured('Setting "{}" is read only.'.format(name))
|
raise ImproperlyConfigured('Setting "{}" is read only.'.format(name))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = field.to_representation(value)
|
data = None if value is None and isinstance(field, PrimaryKeyRelatedField) else field.to_representation(value)
|
||||||
setting_value = field.run_validation(data)
|
setting_value = field.run_validation(data)
|
||||||
db_value = field.to_representation(setting_value)
|
db_value = None if setting_value is None and isinstance(field, PrimaryKeyRelatedField) else field.to_representation(setting_value)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception('Unable to assign value "%r" to setting "%s".', value, name, exc_info=True)
|
logger.exception('Unable to assign value "%r" to setting "%s".', value, name, exc_info=True)
|
||||||
raise e
|
raise e
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ from rest_framework import status
|
|||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.api.generics import APIView, GenericAPIView, ListAPIView, RetrieveUpdateDestroyAPIView
|
from awx.api.generics import APIView, GenericAPIView, ListAPIView, RetrieveUpdateDestroyAPIView
|
||||||
from awx.api.permissions import IsSuperUser
|
from awx.api.permissions import IsSystemAdminOrAuditor
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main.utils import camelcase_to_underscore
|
from awx.main.utils import camelcase_to_underscore
|
||||||
from awx.main.tasks import handle_setting_changes
|
from awx.main.tasks import handle_setting_changes
|
||||||
@@ -150,7 +150,7 @@ class SettingLoggingTest(GenericAPIView):
|
|||||||
name = _('Logging Connectivity Test')
|
name = _('Logging Connectivity Test')
|
||||||
model = Setting
|
model = Setting
|
||||||
serializer_class = SettingSingletonSerializer
|
serializer_class = SettingSingletonSerializer
|
||||||
permission_classes = (IsSuperUser,)
|
permission_classes = (IsSystemAdminOrAuditor,)
|
||||||
filter_backends = []
|
filter_backends = []
|
||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def post(self, request, *args, **kwargs):
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -45,6 +45,7 @@ from awx.main.models import (
|
|||||||
InventoryUpdateEvent,
|
InventoryUpdateEvent,
|
||||||
Job,
|
Job,
|
||||||
JobEvent,
|
JobEvent,
|
||||||
|
UnpartitionedJobEvent,
|
||||||
JobHostSummary,
|
JobHostSummary,
|
||||||
JobLaunchConfig,
|
JobLaunchConfig,
|
||||||
JobTemplate,
|
JobTemplate,
|
||||||
@@ -464,7 +465,7 @@ class BaseAccess(object):
|
|||||||
if display_method == 'schedule':
|
if display_method == 'schedule':
|
||||||
user_capabilities['schedule'] = user_capabilities['start']
|
user_capabilities['schedule'] = user_capabilities['start']
|
||||||
continue
|
continue
|
||||||
elif display_method == 'delete' and not isinstance(obj, (User, UnifiedJob, CredentialInputSource)):
|
elif display_method == 'delete' and not isinstance(obj, (User, UnifiedJob, CredentialInputSource, ExecutionEnvironment)):
|
||||||
user_capabilities['delete'] = user_capabilities['edit']
|
user_capabilities['delete'] = user_capabilities['edit']
|
||||||
continue
|
continue
|
||||||
elif display_method == 'copy' and isinstance(obj, (Group, Host)):
|
elif display_method == 'copy' and isinstance(obj, (Group, Host)):
|
||||||
@@ -866,13 +867,11 @@ class InventoryAccess(BaseAccess):
|
|||||||
# If no data is specified, just checking for generic add permission?
|
# If no data is specified, just checking for generic add permission?
|
||||||
if not data:
|
if not data:
|
||||||
return Organization.accessible_objects(self.user, 'inventory_admin_role').exists()
|
return Organization.accessible_objects(self.user, 'inventory_admin_role').exists()
|
||||||
return self.check_related('organization', Organization, data, role_field='inventory_admin_role') and self.check_related(
|
return self.check_related('organization', Organization, data, role_field='inventory_admin_role')
|
||||||
'insights_credential', Credential, data, role_field='use_role'
|
|
||||||
)
|
|
||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_change(self, obj, data):
|
def can_change(self, obj, data):
|
||||||
return self.can_admin(obj, data) and self.check_related('insights_credential', Credential, data, obj=obj, role_field='use_role')
|
return self.can_admin(obj, data)
|
||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_admin(self, obj, data):
|
def can_admin(self, obj, data):
|
||||||
@@ -1037,7 +1036,7 @@ class InventorySourceAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
|
|
||||||
def can_add(self, data):
|
def can_add(self, data):
|
||||||
if not data or 'inventory' not in data:
|
if not data or 'inventory' not in data:
|
||||||
return Organization.accessible_objects(self.user, 'admin_role').exists()
|
return Inventory.accessible_objects(self.user, 'admin_role').exists()
|
||||||
|
|
||||||
if not self.check_related('source_project', Project, data, role_field='use_role'):
|
if not self.check_related('source_project', Project, data, role_field='use_role'):
|
||||||
return False
|
return False
|
||||||
@@ -1120,7 +1119,7 @@ class CredentialTypeAccess(BaseAccess):
|
|||||||
I can create when:
|
I can create when:
|
||||||
- I'm a superuser:
|
- I'm a superuser:
|
||||||
I can change when:
|
I can change when:
|
||||||
- I'm a superuser and the type is not "managed by Tower"
|
- I'm a superuser and the type is not "managed"
|
||||||
"""
|
"""
|
||||||
|
|
||||||
model = CredentialType
|
model = CredentialType
|
||||||
@@ -1206,7 +1205,7 @@ class CredentialAccess(BaseAccess):
|
|||||||
def get_user_capabilities(self, obj, **kwargs):
|
def get_user_capabilities(self, obj, **kwargs):
|
||||||
user_capabilities = super(CredentialAccess, self).get_user_capabilities(obj, **kwargs)
|
user_capabilities = super(CredentialAccess, self).get_user_capabilities(obj, **kwargs)
|
||||||
user_capabilities['use'] = self.can_use(obj)
|
user_capabilities['use'] = self.can_use(obj)
|
||||||
if getattr(obj, 'managed_by_tower', False) is True:
|
if getattr(obj, 'managed', False) is True:
|
||||||
user_capabilities['edit'] = user_capabilities['delete'] = False
|
user_capabilities['edit'] = user_capabilities['delete'] = False
|
||||||
return user_capabilities
|
return user_capabilities
|
||||||
|
|
||||||
@@ -1369,6 +1368,8 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
|||||||
return self.check_related('organization', Organization, data, obj=obj, mandatory=True, role_field='execution_environment_admin_role')
|
return self.check_related('organization', Organization, data, obj=obj, mandatory=True, role_field='execution_environment_admin_role')
|
||||||
|
|
||||||
def can_delete(self, obj):
|
def can_delete(self, obj):
|
||||||
|
if obj.managed:
|
||||||
|
raise PermissionDenied
|
||||||
return self.can_change(obj, None)
|
return self.can_change(obj, None)
|
||||||
|
|
||||||
|
|
||||||
@@ -2352,6 +2353,11 @@ class JobEventAccess(BaseAccess):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class UnpartitionedJobEventAccess(JobEventAccess):
|
||||||
|
|
||||||
|
model = UnpartitionedJobEvent
|
||||||
|
|
||||||
|
|
||||||
class ProjectUpdateEventAccess(BaseAccess):
|
class ProjectUpdateEventAccess(BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can see project update event records whenever I can access the project update
|
I can see project update event records whenever I can access the project update
|
||||||
@@ -2895,3 +2901,4 @@ class WorkflowApprovalTemplateAccess(BaseAccess):
|
|||||||
|
|
||||||
for cls in BaseAccess.__subclasses__():
|
for cls in BaseAccess.__subclasses__():
|
||||||
access_registry[cls.model] = cls
|
access_registry[cls.model] = cls
|
||||||
|
access_registry[UnpartitionedJobEvent] = UnpartitionedJobEventAccess
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import platform
|
|||||||
import distro
|
import distro
|
||||||
|
|
||||||
from django.db import connection
|
from django.db import connection
|
||||||
from django.db.models import Count, Max, Min
|
from django.db.models import Count
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.sessions.models import Session
|
from django.contrib.sessions.models import Session
|
||||||
from django.utils.timezone import now, timedelta
|
from django.utils.timezone import now, timedelta
|
||||||
@@ -15,7 +15,7 @@ from django.utils.translation import ugettext_lazy as _
|
|||||||
from psycopg2.errors import UntranslatableCharacter
|
from psycopg2.errors import UntranslatableCharacter
|
||||||
|
|
||||||
from awx.conf.license import get_license
|
from awx.conf.license import get_license
|
||||||
from awx.main.utils import get_awx_version, get_custom_venv_choices, camelcase_to_underscore, datetime_hook
|
from awx.main.utils import get_awx_version, camelcase_to_underscore, datetime_hook
|
||||||
from awx.main import models
|
from awx.main import models
|
||||||
from awx.main.analytics import register
|
from awx.main.analytics import register
|
||||||
|
|
||||||
@@ -58,7 +58,10 @@ def four_hour_slicing(key, since, until, last_gather):
|
|||||||
horizon = until - timedelta(weeks=4)
|
horizon = until - timedelta(weeks=4)
|
||||||
last_entries = Setting.objects.filter(key='AUTOMATION_ANALYTICS_LAST_ENTRIES').first()
|
last_entries = Setting.objects.filter(key='AUTOMATION_ANALYTICS_LAST_ENTRIES').first()
|
||||||
last_entries = json.loads((last_entries.value if last_entries is not None else '') or '{}', object_hook=datetime_hook)
|
last_entries = json.loads((last_entries.value if last_entries is not None else '') or '{}', object_hook=datetime_hook)
|
||||||
last_entry = max(last_entries.get(key) or last_gather, horizon)
|
try:
|
||||||
|
last_entry = max(last_entries.get(key) or last_gather, horizon)
|
||||||
|
except TypeError: # last_entries has a stale non-datetime entry for this collector
|
||||||
|
last_entry = max(last_gather, horizon)
|
||||||
|
|
||||||
start, end = last_entry, None
|
start, end = last_entry, None
|
||||||
while start < until:
|
while start < until:
|
||||||
@@ -67,7 +70,7 @@ def four_hour_slicing(key, since, until, last_gather):
|
|||||||
start = end
|
start = end
|
||||||
|
|
||||||
|
|
||||||
def events_slicing(key, since, until, last_gather):
|
def _identify_lower(key, since, until, last_gather):
|
||||||
from awx.conf.models import Setting
|
from awx.conf.models import Setting
|
||||||
|
|
||||||
last_entries = Setting.objects.filter(key='AUTOMATION_ANALYTICS_LAST_ENTRIES').first()
|
last_entries = Setting.objects.filter(key='AUTOMATION_ANALYTICS_LAST_ENTRIES').first()
|
||||||
@@ -77,16 +80,8 @@ def events_slicing(key, since, until, last_gather):
|
|||||||
lower = since or last_gather
|
lower = since or last_gather
|
||||||
if not since and last_entries.get(key):
|
if not since and last_entries.get(key):
|
||||||
lower = horizon
|
lower = horizon
|
||||||
pk_values = models.JobEvent.objects.filter(created__gte=lower, created__lte=until).aggregate(Min('pk'), Max('pk'))
|
|
||||||
|
|
||||||
previous_pk = pk_values['pk__min'] - 1 if pk_values['pk__min'] is not None else 0
|
return lower, last_entries
|
||||||
if not since and last_entries.get(key):
|
|
||||||
previous_pk = max(last_entries[key], previous_pk)
|
|
||||||
final_pk = pk_values['pk__max'] or 0
|
|
||||||
|
|
||||||
step = 100000
|
|
||||||
for start in range(previous_pk, final_pk + 1, step):
|
|
||||||
yield (start, min(start + step, final_pk))
|
|
||||||
|
|
||||||
|
|
||||||
@register('config', '1.3', description=_('General platform configuration.'))
|
@register('config', '1.3', description=_('General platform configuration.'))
|
||||||
@@ -120,7 +115,7 @@ def config(since, **kwargs):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@register('counts', '1.0', description=_('Counts of objects such as organizations, inventories, and projects'))
|
@register('counts', '1.1', description=_('Counts of objects such as organizations, inventories, and projects'))
|
||||||
def counts(since, **kwargs):
|
def counts(since, **kwargs):
|
||||||
counts = {}
|
counts = {}
|
||||||
for cls in (
|
for cls in (
|
||||||
@@ -138,9 +133,6 @@ def counts(since, **kwargs):
|
|||||||
):
|
):
|
||||||
counts[camelcase_to_underscore(cls.__name__)] = cls.objects.count()
|
counts[camelcase_to_underscore(cls.__name__)] = cls.objects.count()
|
||||||
|
|
||||||
venvs = get_custom_venv_choices()
|
|
||||||
counts['custom_virtualenvs'] = len([v for v in venvs if os.path.basename(v.rstrip('/')) != 'ansible'])
|
|
||||||
|
|
||||||
inv_counts = dict(models.Inventory.objects.order_by().values_list('kind').annotate(Count('kind')))
|
inv_counts = dict(models.Inventory.objects.order_by().values_list('kind').annotate(Count('kind')))
|
||||||
inv_counts['normal'] = inv_counts.get('', 0)
|
inv_counts['normal'] = inv_counts.get('', 0)
|
||||||
inv_counts.pop('', None)
|
inv_counts.pop('', None)
|
||||||
@@ -183,12 +175,12 @@ def org_counts(since, **kwargs):
|
|||||||
def cred_type_counts(since, **kwargs):
|
def cred_type_counts(since, **kwargs):
|
||||||
counts = {}
|
counts = {}
|
||||||
for cred_type in models.CredentialType.objects.annotate(num_credentials=Count('credentials', distinct=True)).values(
|
for cred_type in models.CredentialType.objects.annotate(num_credentials=Count('credentials', distinct=True)).values(
|
||||||
'name', 'id', 'managed_by_tower', 'num_credentials'
|
'name', 'id', 'managed', 'num_credentials'
|
||||||
):
|
):
|
||||||
counts[cred_type['id']] = {
|
counts[cred_type['id']] = {
|
||||||
'name': cred_type['name'],
|
'name': cred_type['name'],
|
||||||
'credential_count': cred_type['num_credentials'],
|
'credential_count': cred_type['num_credentials'],
|
||||||
'managed_by_tower': cred_type['managed_by_tower'],
|
'managed': cred_type['managed'],
|
||||||
}
|
}
|
||||||
return counts
|
return counts
|
||||||
|
|
||||||
@@ -335,39 +327,49 @@ def _copy_table(table, query, path):
|
|||||||
return file.file_list()
|
return file.file_list()
|
||||||
|
|
||||||
|
|
||||||
@register('events_table', '1.2', format='csv', description=_('Automation task records'), expensive=events_slicing)
|
def _events_table(since, full_path, until, tbl, where_column, project_job_created=False, **kwargs):
|
||||||
def events_table(since, full_path, until, **kwargs):
|
|
||||||
def query(event_data):
|
def query(event_data):
|
||||||
return f'''COPY (SELECT main_jobevent.id,
|
query = f'''COPY (SELECT {tbl}.id,
|
||||||
main_jobevent.created,
|
{tbl}.created,
|
||||||
main_jobevent.modified,
|
{tbl}.modified,
|
||||||
main_jobevent.uuid,
|
{tbl + '.job_created' if project_job_created else 'NULL'} as job_created,
|
||||||
main_jobevent.parent_uuid,
|
{tbl}.uuid,
|
||||||
main_jobevent.event,
|
{tbl}.parent_uuid,
|
||||||
{event_data}->'task_action' AS task_action,
|
{tbl}.event,
|
||||||
(CASE WHEN event = 'playbook_on_stats' THEN event_data END) as playbook_on_stats,
|
task_action,
|
||||||
main_jobevent.failed,
|
(CASE WHEN event = 'playbook_on_stats' THEN event_data END) as playbook_on_stats,
|
||||||
main_jobevent.changed,
|
{tbl}.failed,
|
||||||
main_jobevent.playbook,
|
{tbl}.changed,
|
||||||
main_jobevent.play,
|
{tbl}.playbook,
|
||||||
main_jobevent.task,
|
{tbl}.play,
|
||||||
main_jobevent.role,
|
{tbl}.task,
|
||||||
main_jobevent.job_id,
|
{tbl}.role,
|
||||||
main_jobevent.host_id,
|
{tbl}.job_id,
|
||||||
main_jobevent.host_name,
|
{tbl}.host_id,
|
||||||
CAST({event_data}->>'start' AS TIMESTAMP WITH TIME ZONE) AS start,
|
{tbl}.host_name,
|
||||||
CAST({event_data}->>'end' AS TIMESTAMP WITH TIME ZONE) AS end,
|
CAST(x.start AS TIMESTAMP WITH TIME ZONE) AS start,
|
||||||
{event_data}->'duration' AS duration,
|
CAST(x.end AS TIMESTAMP WITH TIME ZONE) AS end,
|
||||||
{event_data}->'res'->'warnings' AS warnings,
|
x.duration AS duration,
|
||||||
{event_data}->'res'->'deprecations' AS deprecations
|
x.res->'warnings' AS warnings,
|
||||||
FROM main_jobevent
|
x.res->'deprecations' AS deprecations
|
||||||
WHERE (main_jobevent.id > {since} AND main_jobevent.id <= {until})
|
FROM {tbl}, json_to_record({event_data}) AS x("res" json, "duration" text, "task_action" text, "start" text, "end" text)
|
||||||
ORDER BY main_jobevent.id ASC) TO STDOUT WITH CSV HEADER'''
|
WHERE ({tbl}.{where_column} > '{since.isoformat()}' AND {tbl}.{where_column} <= '{until.isoformat()}')) TO STDOUT WITH CSV HEADER'''
|
||||||
|
return query
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return _copy_table(table='events', query=query("main_jobevent.event_data::json"), path=full_path)
|
return _copy_table(table='events', query=query(f"{tbl}.event_data::json"), path=full_path)
|
||||||
except UntranslatableCharacter:
|
except UntranslatableCharacter:
|
||||||
return _copy_table(table='events', query=query("replace(main_jobevent.event_data::text, '\\u0000', '')::json"), path=full_path)
|
return _copy_table(table='events', query=query(f"replace({tbl}.event_data::text, '\\u0000', '')::json"), path=full_path)
|
||||||
|
|
||||||
|
|
||||||
|
@register('events_table', '1.3', format='csv', description=_('Automation task records'), expensive=four_hour_slicing)
|
||||||
|
def events_table_unpartitioned(since, full_path, until, **kwargs):
|
||||||
|
return _events_table(since, full_path, until, '_unpartitioned_main_jobevent', 'created', **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@register('events_table', '1.3', format='csv', description=_('Automation task records'), expensive=four_hour_slicing)
|
||||||
|
def events_table_partitioned_modified(since, full_path, until, **kwargs):
|
||||||
|
return _events_table(since, full_path, until, 'main_jobevent', 'modified', project_job_created=True, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@register('unified_jobs_table', '1.2', format='csv', description=_('Data on jobs run'), expensive=four_hour_slicing)
|
@register('unified_jobs_table', '1.2', format='csv', description=_('Data on jobs run'), expensive=four_hour_slicing)
|
||||||
|
|||||||
@@ -270,7 +270,8 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti
|
|||||||
if not files:
|
if not files:
|
||||||
if collection_type != 'dry-run':
|
if collection_type != 'dry-run':
|
||||||
with disable_activity_stream():
|
with disable_activity_stream():
|
||||||
last_entries[key] = max(last_entries[key], end) if last_entries.get(key) else end
|
entry = last_entries.get(key)
|
||||||
|
last_entries[key] = max(entry, end) if entry and type(entry) == type(end) else end
|
||||||
settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder)
|
settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -293,7 +294,8 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti
|
|||||||
|
|
||||||
if slice_succeeded and collection_type != 'dry-run':
|
if slice_succeeded and collection_type != 'dry-run':
|
||||||
with disable_activity_stream():
|
with disable_activity_stream():
|
||||||
last_entries[key] = max(last_entries[key], end) if last_entries.get(key) else end
|
entry = last_entries.get(key)
|
||||||
|
last_entries[key] = max(entry, end) if entry and type(entry) == type(end) else end
|
||||||
settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder)
|
settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder)
|
||||||
except Exception:
|
except Exception:
|
||||||
succeeded = False
|
succeeded = False
|
||||||
|
|||||||
@@ -39,7 +39,6 @@ def metrics():
|
|||||||
],
|
],
|
||||||
registry=REGISTRY,
|
registry=REGISTRY,
|
||||||
)
|
)
|
||||||
CUSTOM_VENVS = Gauge('awx_custom_virtualenvs_total', 'Number of virtualenvs', registry=REGISTRY)
|
|
||||||
RUNNING_JOBS = Gauge('awx_running_jobs_total', 'Number of running jobs on the system', registry=REGISTRY)
|
RUNNING_JOBS = Gauge('awx_running_jobs_total', 'Number of running jobs on the system', registry=REGISTRY)
|
||||||
PENDING_JOBS = Gauge('awx_pending_jobs_total', 'Number of pending jobs on the system', registry=REGISTRY)
|
PENDING_JOBS = Gauge('awx_pending_jobs_total', 'Number of pending jobs on the system', registry=REGISTRY)
|
||||||
STATUS = Gauge(
|
STATUS = Gauge(
|
||||||
@@ -159,7 +158,6 @@ def metrics():
|
|||||||
HOST_COUNT.labels(type='active').set(current_counts['active_host_count'])
|
HOST_COUNT.labels(type='active').set(current_counts['active_host_count'])
|
||||||
|
|
||||||
SCHEDULE_COUNT.set(current_counts['schedule'])
|
SCHEDULE_COUNT.set(current_counts['schedule'])
|
||||||
CUSTOM_VENVS.set(current_counts['custom_virtualenvs'])
|
|
||||||
|
|
||||||
USER_SESSIONS.labels(type='all').set(current_counts['active_sessions'])
|
USER_SESSIONS.labels(type='all').set(current_counts['active_sessions'])
|
||||||
USER_SESSIONS.labels(type='user').set(current_counts['active_user_sessions'])
|
USER_SESSIONS.labels(type='user').set(current_counts['active_user_sessions'])
|
||||||
|
|||||||
@@ -177,6 +177,24 @@ register(
|
|||||||
read_only=True,
|
read_only=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
register(
|
||||||
|
'DEFAULT_CONTROL_PLANE_QUEUE_NAME',
|
||||||
|
field_class=fields.CharField,
|
||||||
|
label=_('The instance group where control plane tasks run'),
|
||||||
|
category=_('System'),
|
||||||
|
category_slug='system',
|
||||||
|
read_only=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
register(
|
||||||
|
'DEFAULT_EXECUTION_QUEUE_NAME',
|
||||||
|
field_class=fields.CharField,
|
||||||
|
label=_('The instance group where user jobs run (currently only on non-VM installs)'),
|
||||||
|
category=_('System'),
|
||||||
|
category_slug='system',
|
||||||
|
read_only=True,
|
||||||
|
)
|
||||||
|
|
||||||
register(
|
register(
|
||||||
'DEFAULT_EXECUTION_ENVIRONMENT',
|
'DEFAULT_EXECUTION_ENVIRONMENT',
|
||||||
field_class=fields.PrimaryKeyRelatedField,
|
field_class=fields.PrimaryKeyRelatedField,
|
||||||
@@ -344,6 +362,17 @@ register(
|
|||||||
category_slug='jobs',
|
category_slug='jobs',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
register(
|
||||||
|
'MAX_WEBSOCKET_EVENT_RATE',
|
||||||
|
field_class=fields.IntegerField,
|
||||||
|
min_value=0,
|
||||||
|
default=30,
|
||||||
|
label=_('Job Event Maximum Websocket Messages Per Second'),
|
||||||
|
help_text=_('Maximum number of messages to update the UI live job output with per second. Value of 0 means no limit.'),
|
||||||
|
category=_('Jobs'),
|
||||||
|
category_slug='jobs',
|
||||||
|
)
|
||||||
|
|
||||||
register(
|
register(
|
||||||
'SCHEDULE_MAX_JOBS',
|
'SCHEDULE_MAX_JOBS',
|
||||||
field_class=fields.IntegerField,
|
field_class=fields.IntegerField,
|
||||||
@@ -663,6 +692,15 @@ register(
|
|||||||
unit=_('seconds'),
|
unit=_('seconds'),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
register(
|
||||||
|
'IS_K8S',
|
||||||
|
field_class=fields.BooleanField,
|
||||||
|
read_only=True,
|
||||||
|
category=_('System'),
|
||||||
|
category_slug='system',
|
||||||
|
help_text=_('Indicates whether the instance is part of a kubernetes-based deployment.'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def logging_validate(serializer, attrs):
|
def logging_validate(serializer, attrs):
|
||||||
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ __all__ = [
|
|||||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||||
]
|
]
|
||||||
|
|
||||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'tower')
|
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights')
|
||||||
PRIVILEGE_ESCALATION_METHODS = [
|
PRIVILEGE_ESCALATION_METHODS = [
|
||||||
('sudo', _('Sudo')),
|
('sudo', _('Sudo')),
|
||||||
('su', _('Su')),
|
('su', _('Su')),
|
||||||
@@ -41,6 +41,7 @@ STANDARD_INVENTORY_UPDATE_ENV = {
|
|||||||
}
|
}
|
||||||
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
||||||
ACTIVE_STATES = CAN_CANCEL
|
ACTIVE_STATES = CAN_CANCEL
|
||||||
|
MINIMAL_EVENTS = set(['playbook_on_play_start', 'playbook_on_task_start', 'playbook_on_stats', 'EOF'])
|
||||||
CENSOR_VALUE = '************'
|
CENSOR_VALUE = '************'
|
||||||
ENV_BLOCKLIST = frozenset(
|
ENV_BLOCKLIST = frozenset(
|
||||||
(
|
(
|
||||||
@@ -76,3 +77,7 @@ LOGGER_BLOCKLIST = (
|
|||||||
# loggers that may be called getting logging settings
|
# loggers that may be called getting logging settings
|
||||||
'awx.conf',
|
'awx.conf',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Reported version for node seen in receptor mesh but for which capacity check
|
||||||
|
# failed or is in progress
|
||||||
|
RECEPTOR_PENDING = 'ansible-runner-???'
|
||||||
|
|||||||
56
awx/main/credential_plugins/dsv.py
Normal file
56
awx/main/credential_plugins/dsv.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
from .plugin import CredentialPlugin
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
from thycotic.secrets.vault import SecretsVault
|
||||||
|
|
||||||
|
|
||||||
|
dsv_inputs = {
|
||||||
|
'fields': [
|
||||||
|
{
|
||||||
|
'id': 'tenant',
|
||||||
|
'label': _('Tenant'),
|
||||||
|
'help_text': _('The tenant e.g. "ex" when the URL is https://ex.secretservercloud.com'),
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'tld',
|
||||||
|
'label': _('Top-level Domain (TLD)'),
|
||||||
|
'help_text': _('The TLD of the tenant e.g. "com" when the URL is https://ex.secretservercloud.com'),
|
||||||
|
'choices': ['ca', 'com', 'com.au', 'com.sg', 'eu'],
|
||||||
|
'default': 'com',
|
||||||
|
},
|
||||||
|
{'id': 'client_id', 'label': _('Client ID'), 'type': 'string'},
|
||||||
|
{
|
||||||
|
'id': 'client_secret',
|
||||||
|
'label': _('Client Secret'),
|
||||||
|
'type': 'string',
|
||||||
|
'secret': True,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'metadata': [
|
||||||
|
{
|
||||||
|
'id': 'path',
|
||||||
|
'label': _('Secret Path'),
|
||||||
|
'type': 'string',
|
||||||
|
'help_text': _('The secret path e.g. /test/secret1'),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'required': ['tenant', 'client_id', 'client_secret', 'path'],
|
||||||
|
}
|
||||||
|
|
||||||
|
if settings.DEBUG:
|
||||||
|
dsv_inputs['fields'].append(
|
||||||
|
{
|
||||||
|
'id': 'url_template',
|
||||||
|
'label': _('URL template'),
|
||||||
|
'type': 'string',
|
||||||
|
'default': 'https://{}.secretsvaultcloud.{}/v1',
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
dsv_plugin = CredentialPlugin(
|
||||||
|
'Thycotic DevOps Secrets Vault',
|
||||||
|
dsv_inputs,
|
||||||
|
lambda **kwargs: SecretsVault(**{k: v for (k, v) in kwargs.items() if k in [field['id'] for field in dsv_inputs['fields']]}).get_secret(kwargs['path']),
|
||||||
|
)
|
||||||
@@ -63,7 +63,15 @@ base_inputs = {
|
|||||||
'id': 'secret_path',
|
'id': 'secret_path',
|
||||||
'label': _('Path to Secret'),
|
'label': _('Path to Secret'),
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'help_text': _('The path to the secret stored in the secret backend e.g, /some/secret/'),
|
'help_text': _(
|
||||||
|
(
|
||||||
|
'The path to the secret stored in the secret backend e.g, /some/secret/. It is recommended'
|
||||||
|
' that you use the secret backend field to identify the storage backend and to use this field'
|
||||||
|
' for locating a specific secret within that store. However, if you prefer to fully identify'
|
||||||
|
' both the secret backend and one of its secrets using only this field, join their locations'
|
||||||
|
' into a single path without any additional separators, e.g, /location/of/backend/some/secret.'
|
||||||
|
)
|
||||||
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'id': 'auth_path',
|
'id': 'auth_path',
|
||||||
|
|||||||
59
awx/main/credential_plugins/tss.py
Normal file
59
awx/main/credential_plugins/tss.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
from .plugin import CredentialPlugin
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
from thycotic.secrets.server import PasswordGrantAuthorizer, SecretServer, ServerSecret
|
||||||
|
|
||||||
|
tss_inputs = {
|
||||||
|
'fields': [
|
||||||
|
{
|
||||||
|
'id': 'server_url',
|
||||||
|
'label': _('Secret Server URL'),
|
||||||
|
'help_text': _('The Base URL of Secret Server e.g. https://myserver/SecretServer or https://mytenant.secretservercloud.com'),
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'username',
|
||||||
|
'label': _('Username'),
|
||||||
|
'help_text': _('The (Application) user username'),
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'password',
|
||||||
|
'label': _('Password'),
|
||||||
|
'help_text': _('The corresponding password'),
|
||||||
|
'type': 'string',
|
||||||
|
'secret': True,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'metadata': [
|
||||||
|
{
|
||||||
|
'id': 'secret_id',
|
||||||
|
'label': _('Secret ID'),
|
||||||
|
'help_text': _('The integer ID of the secret'),
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'secret_field',
|
||||||
|
'label': _('Secret Field'),
|
||||||
|
'help_text': _('The field to extract from the secret'),
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'required': ['server_url', 'username', 'password', 'secret_id', 'secret_field'],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def tss_backend(**kwargs):
|
||||||
|
authorizer = PasswordGrantAuthorizer(kwargs['server_url'], kwargs['username'], kwargs['password'])
|
||||||
|
secret_server = SecretServer(kwargs['server_url'], authorizer)
|
||||||
|
secret_dict = secret_server.get_secret(kwargs['secret_id'])
|
||||||
|
secret = ServerSecret(**secret_dict)
|
||||||
|
|
||||||
|
return secret.fields[kwargs['secret_field']]
|
||||||
|
|
||||||
|
|
||||||
|
tss_plugin = CredentialPlugin(
|
||||||
|
'Thycotic Secret Server',
|
||||||
|
tss_inputs,
|
||||||
|
tss_backend,
|
||||||
|
)
|
||||||
@@ -142,7 +142,8 @@ class CallbackBrokerWorker(BaseWorker):
|
|||||||
logger.exception('Database Error Saving Job Event')
|
logger.exception('Database Error Saving Job Event')
|
||||||
duration_to_save = time.perf_counter() - duration_to_save
|
duration_to_save = time.perf_counter() - duration_to_save
|
||||||
for e in events:
|
for e in events:
|
||||||
emit_event_detail(e)
|
if not getattr(e, '_skip_websocket_message', False):
|
||||||
|
emit_event_detail(e)
|
||||||
self.buff = {}
|
self.buff = {}
|
||||||
self.last_flush = time.time()
|
self.last_flush = time.time()
|
||||||
# only update metrics if we saved events
|
# only update metrics if we saved events
|
||||||
@@ -207,7 +208,13 @@ class CallbackBrokerWorker(BaseWorker):
|
|||||||
GuidMiddleware.set_guid('')
|
GuidMiddleware.set_guid('')
|
||||||
return
|
return
|
||||||
|
|
||||||
|
skip_websocket_message = body.pop('skip_websocket_message', False)
|
||||||
|
|
||||||
event = cls.create_from_data(**body)
|
event = cls.create_from_data(**body)
|
||||||
|
|
||||||
|
if skip_websocket_message:
|
||||||
|
event._skip_websocket_message = True
|
||||||
|
|
||||||
self.buff.setdefault(cls, []).append(event)
|
self.buff.setdefault(cls, []).append(event)
|
||||||
|
|
||||||
retries = 0
|
retries = 0
|
||||||
|
|||||||
@@ -642,7 +642,7 @@ class CredentialInputField(JSONSchemaField):
|
|||||||
|
|
||||||
# `ssh_key_unlock` requirements are very specific and can't be
|
# `ssh_key_unlock` requirements are very specific and can't be
|
||||||
# represented without complicated JSON schema
|
# represented without complicated JSON schema
|
||||||
if model_instance.credential_type.managed_by_tower is True and 'ssh_key_unlock' in defined_fields:
|
if model_instance.credential_type.managed is True and 'ssh_key_unlock' in defined_fields:
|
||||||
|
|
||||||
# in order to properly test the necessity of `ssh_key_unlock`, we
|
# in order to properly test the necessity of `ssh_key_unlock`, we
|
||||||
# need to know the real value of `ssh_key_data`; for a payload like:
|
# need to know the real value of `ssh_key_data`; for a payload like:
|
||||||
@@ -711,7 +711,7 @@ class CredentialTypeInputField(JSONSchemaField):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def validate(self, value, model_instance):
|
def validate(self, value, model_instance):
|
||||||
if isinstance(value, dict) and 'dependencies' in value and not model_instance.managed_by_tower:
|
if isinstance(value, dict) and 'dependencies' in value and not model_instance.managed:
|
||||||
raise django_exceptions.ValidationError(
|
raise django_exceptions.ValidationError(
|
||||||
_("'dependencies' is not supported for custom credentials."),
|
_("'dependencies' is not supported for custom credentials."),
|
||||||
code='invalid',
|
code='invalid',
|
||||||
|
|||||||
@@ -4,11 +4,13 @@
|
|||||||
# Python
|
# Python
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
|
import pytz
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.core.management.base import BaseCommand, CommandError
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
from django.db import transaction
|
from django.db import transaction, connection
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
@@ -18,6 +20,132 @@ from awx.main.signals import disable_activity_stream, disable_computed_fields
|
|||||||
from awx.main.utils.deletion import AWXCollector, pre_delete
|
from awx.main.utils.deletion import AWXCollector, pre_delete
|
||||||
|
|
||||||
|
|
||||||
|
def unified_job_class_to_event_table_name(job_class):
|
||||||
|
return f'main_{job_class().event_class.__name__.lower()}'
|
||||||
|
|
||||||
|
|
||||||
|
def partition_table_name(job_class, dt):
|
||||||
|
suffix = dt.replace(microsecond=0, second=0, minute=0).strftime('%Y%m%d_%H')
|
||||||
|
|
||||||
|
event_tbl_name = unified_job_class_to_event_table_name(job_class)
|
||||||
|
event_tbl_name += f'_{suffix}'
|
||||||
|
return event_tbl_name
|
||||||
|
|
||||||
|
|
||||||
|
def partition_name_dt(part_name):
|
||||||
|
"""
|
||||||
|
part_name examples:
|
||||||
|
main_jobevent_20210318_09
|
||||||
|
main_projectupdateevent_20210318_11
|
||||||
|
main_inventoryupdateevent_20210318_03
|
||||||
|
"""
|
||||||
|
if '_unpartitioned' in part_name:
|
||||||
|
return None
|
||||||
|
p = re.compile('([a-z]+)_([a-z]+)_([0-9]+)_([0-9][0-9])')
|
||||||
|
m = p.match(part_name)
|
||||||
|
if not m:
|
||||||
|
return m
|
||||||
|
dt_str = f"{m.group(3)}_{m.group(4)}"
|
||||||
|
dt = datetime.datetime.strptime(dt_str, '%Y%m%d_%H').replace(tzinfo=pytz.UTC)
|
||||||
|
return dt
|
||||||
|
|
||||||
|
|
||||||
|
def dt_to_partition_name(tbl_name, dt):
|
||||||
|
return f"{tbl_name}_{dt.strftime('%Y%m%d_%H')}"
|
||||||
|
|
||||||
|
|
||||||
|
class DeleteMeta:
|
||||||
|
def __init__(self, logger, job_class, cutoff, dry_run):
|
||||||
|
self.logger = logger
|
||||||
|
self.job_class = job_class
|
||||||
|
self.cutoff = cutoff
|
||||||
|
self.dry_run = dry_run
|
||||||
|
|
||||||
|
self.jobs_qs = None # Set in by find_jobs_to_delete()
|
||||||
|
|
||||||
|
self.parts_no_drop = set() # Set in identify_excluded_partitions()
|
||||||
|
self.parts_to_drop = set() # Set in find_partitions_to_drop()
|
||||||
|
self.jobs_pk_list = [] # Set in find_jobs_to_delete()
|
||||||
|
self.jobs_to_delete_count = 0 # Set in find_jobs_to_delete()
|
||||||
|
self.jobs_no_delete_count = 0 # Set in find_jobs_to_delete()
|
||||||
|
|
||||||
|
def find_jobs_to_delete(self):
|
||||||
|
self.jobs_qs = self.job_class.objects.filter(created__lt=self.cutoff).values_list('pk', 'status', 'created')
|
||||||
|
for pk, status, created in self.jobs_qs:
|
||||||
|
if status not in ['pending', 'waiting', 'running']:
|
||||||
|
self.jobs_to_delete_count += 1
|
||||||
|
self.jobs_pk_list.append(pk)
|
||||||
|
self.jobs_no_delete_count = (
|
||||||
|
self.job_class.objects.filter(created__gte=self.cutoff) | self.job_class.objects.filter(status__in=['pending', 'waiting', 'running'])
|
||||||
|
).count()
|
||||||
|
|
||||||
|
def identify_excluded_partitions(self):
|
||||||
|
|
||||||
|
part_drop = {}
|
||||||
|
|
||||||
|
for pk, status, created in self.jobs_qs:
|
||||||
|
|
||||||
|
part_key = partition_table_name(self.job_class, created)
|
||||||
|
if status in ['pending', 'waiting', 'running']:
|
||||||
|
part_drop[part_key] = False
|
||||||
|
else:
|
||||||
|
part_drop.setdefault(part_key, True)
|
||||||
|
|
||||||
|
# Note that parts_no_drop _may_ contain the names of partitions that don't exist
|
||||||
|
# This can happen when the cleanup of _unpartitioned_* logic leaves behind jobs with status pending, waiting, running. The find_jobs_to_delete() will
|
||||||
|
# pick these jobs up.
|
||||||
|
self.parts_no_drop = set([k for k, v in part_drop.items() if v is False])
|
||||||
|
|
||||||
|
def delete_jobs(self):
|
||||||
|
if not self.dry_run:
|
||||||
|
self.job_class.objects.filter(pk__in=self.jobs_pk_list).delete()
|
||||||
|
|
||||||
|
def find_partitions_to_drop(self):
|
||||||
|
tbl_name = unified_job_class_to_event_table_name(self.job_class)
|
||||||
|
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
query = "SELECT inhrelid::regclass::text AS child FROM pg_catalog.pg_inherits"
|
||||||
|
query += f" WHERE inhparent = 'public.{tbl_name}'::regclass"
|
||||||
|
query += f" AND TO_TIMESTAMP(LTRIM(inhrelid::regclass::text, '{tbl_name}_'), 'YYYYMMDD_HH24') < '{self.cutoff}'"
|
||||||
|
query += " ORDER BY inhrelid::regclass::text"
|
||||||
|
|
||||||
|
cursor.execute(query)
|
||||||
|
partitions_from_db = [r[0] for r in cursor.fetchall()]
|
||||||
|
|
||||||
|
partitions_dt = [partition_name_dt(p) for p in partitions_from_db if not None]
|
||||||
|
partitions_dt = [p for p in partitions_dt if not None]
|
||||||
|
|
||||||
|
# convert datetime partition back to string partition
|
||||||
|
partitions_maybe_drop = set([dt_to_partition_name(tbl_name, dt) for dt in partitions_dt])
|
||||||
|
|
||||||
|
# Do not drop partition if there is a job that will not be deleted pointing at it
|
||||||
|
self.parts_to_drop = partitions_maybe_drop - self.parts_no_drop
|
||||||
|
|
||||||
|
def drop_partitions(self):
|
||||||
|
if len(self.parts_to_drop) > 0:
|
||||||
|
parts_to_drop = list(self.parts_to_drop)
|
||||||
|
parts_to_drop.sort() # sort it to make reading it easier for humans
|
||||||
|
parts_to_drop_str = ','.join(parts_to_drop)
|
||||||
|
if self.dry_run:
|
||||||
|
self.logger.debug(f"Would drop event partition(s) {parts_to_drop_str}")
|
||||||
|
else:
|
||||||
|
self.logger.debug(f"Dropping event partition(s) {parts_to_drop_str}")
|
||||||
|
|
||||||
|
if not self.dry_run:
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
cursor.execute(f"DROP TABLE {parts_to_drop_str}")
|
||||||
|
else:
|
||||||
|
self.logger.debug("No event partitions to drop")
|
||||||
|
|
||||||
|
def delete(self):
|
||||||
|
self.find_jobs_to_delete()
|
||||||
|
self.identify_excluded_partitions()
|
||||||
|
self.find_partitions_to_drop()
|
||||||
|
self.drop_partitions()
|
||||||
|
self.delete_jobs()
|
||||||
|
return (self.jobs_no_delete_count, self.jobs_to_delete_count)
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
"""
|
"""
|
||||||
Management command to cleanup old jobs and project updates.
|
Management command to cleanup old jobs and project updates.
|
||||||
@@ -36,6 +164,43 @@ class Command(BaseCommand):
|
|||||||
parser.add_argument('--notifications', dest='only_notifications', action='store_true', default=False, help='Remove notifications')
|
parser.add_argument('--notifications', dest='only_notifications', action='store_true', default=False, help='Remove notifications')
|
||||||
parser.add_argument('--workflow-jobs', default=False, action='store_true', dest='only_workflow_jobs', help='Remove workflow jobs')
|
parser.add_argument('--workflow-jobs', default=False, action='store_true', dest='only_workflow_jobs', help='Remove workflow jobs')
|
||||||
|
|
||||||
|
def cleanup(self, job_class):
|
||||||
|
delete_meta = DeleteMeta(self.logger, job_class, self.cutoff, self.dry_run)
|
||||||
|
skipped, deleted = delete_meta.delete()
|
||||||
|
|
||||||
|
return (delete_meta.jobs_no_delete_count, delete_meta.jobs_to_delete_count)
|
||||||
|
|
||||||
|
def cleanup_jobs_partition(self):
|
||||||
|
return self.cleanup(Job)
|
||||||
|
|
||||||
|
def cleanup_ad_hoc_commands_partition(self):
|
||||||
|
return self.cleanup(AdHocCommand)
|
||||||
|
|
||||||
|
def cleanup_project_updates_partition(self):
|
||||||
|
return self.cleanup(ProjectUpdate)
|
||||||
|
|
||||||
|
def cleanup_inventory_updates_partition(self):
|
||||||
|
return self.cleanup(InventoryUpdate)
|
||||||
|
|
||||||
|
def cleanup_management_jobs_partition(self):
|
||||||
|
return self.cleanup(SystemJob)
|
||||||
|
|
||||||
|
def cleanup_workflow_jobs_partition(self):
|
||||||
|
delete_meta = DeleteMeta(self.logger, WorkflowJob, self.cutoff, self.dry_run)
|
||||||
|
|
||||||
|
delete_meta.find_jobs_to_delete()
|
||||||
|
delete_meta.delete_jobs()
|
||||||
|
return (delete_meta.jobs_no_delete_count, delete_meta.jobs_to_delete_count)
|
||||||
|
|
||||||
|
def _cascade_delete_job_events(self, model, pk_list):
|
||||||
|
if len(pk_list) > 0:
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
tblname = unified_job_class_to_event_table_name(model)
|
||||||
|
|
||||||
|
pk_list_csv = ','.join(map(str, pk_list))
|
||||||
|
rel_name = model().event_parent_key
|
||||||
|
cursor.execute(f"DELETE FROM _unpartitioned_{tblname} WHERE {rel_name} IN ({pk_list_csv})")
|
||||||
|
|
||||||
def cleanup_jobs(self):
|
def cleanup_jobs(self):
|
||||||
skipped, deleted = 0, 0
|
skipped, deleted = 0, 0
|
||||||
|
|
||||||
@@ -45,12 +210,14 @@ class Command(BaseCommand):
|
|||||||
# get queryset for available jobs to remove
|
# get queryset for available jobs to remove
|
||||||
qs = Job.objects.filter(created__lt=self.cutoff).exclude(status__in=['pending', 'waiting', 'running'])
|
qs = Job.objects.filter(created__lt=self.cutoff).exclude(status__in=['pending', 'waiting', 'running'])
|
||||||
# get pk list for the first N (batch_size) objects
|
# get pk list for the first N (batch_size) objects
|
||||||
pk_list = qs[0:batch_size].values_list('pk')
|
pk_list = qs[0:batch_size].values_list('pk', flat=True)
|
||||||
# You cannot delete queries with sql LIMIT set, so we must
|
# You cannot delete queries with sql LIMIT set, so we must
|
||||||
# create a new query from this pk_list
|
# create a new query from this pk_list
|
||||||
qs_batch = Job.objects.filter(pk__in=pk_list)
|
qs_batch = Job.objects.filter(pk__in=pk_list)
|
||||||
just_deleted = 0
|
just_deleted = 0
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
|
self._cascade_delete_job_events(Job, pk_list)
|
||||||
|
|
||||||
del_query = pre_delete(qs_batch)
|
del_query = pre_delete(qs_batch)
|
||||||
collector = AWXCollector(del_query.db)
|
collector = AWXCollector(del_query.db)
|
||||||
collector.collect(del_query)
|
collector.collect(del_query)
|
||||||
@@ -71,6 +238,7 @@ class Command(BaseCommand):
|
|||||||
def cleanup_ad_hoc_commands(self):
|
def cleanup_ad_hoc_commands(self):
|
||||||
skipped, deleted = 0, 0
|
skipped, deleted = 0, 0
|
||||||
ad_hoc_commands = AdHocCommand.objects.filter(created__lt=self.cutoff)
|
ad_hoc_commands = AdHocCommand.objects.filter(created__lt=self.cutoff)
|
||||||
|
pk_list = []
|
||||||
for ad_hoc_command in ad_hoc_commands.iterator():
|
for ad_hoc_command in ad_hoc_commands.iterator():
|
||||||
ad_hoc_command_display = '"%s" (%d events)' % (str(ad_hoc_command), ad_hoc_command.ad_hoc_command_events.count())
|
ad_hoc_command_display = '"%s" (%d events)' % (str(ad_hoc_command), ad_hoc_command.ad_hoc_command_events.count())
|
||||||
if ad_hoc_command.status in ('pending', 'waiting', 'running'):
|
if ad_hoc_command.status in ('pending', 'waiting', 'running'):
|
||||||
@@ -81,15 +249,20 @@ class Command(BaseCommand):
|
|||||||
action_text = 'would delete' if self.dry_run else 'deleting'
|
action_text = 'would delete' if self.dry_run else 'deleting'
|
||||||
self.logger.info('%s %s', action_text, ad_hoc_command_display)
|
self.logger.info('%s %s', action_text, ad_hoc_command_display)
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
|
pk_list.append(ad_hoc_command.pk)
|
||||||
ad_hoc_command.delete()
|
ad_hoc_command.delete()
|
||||||
deleted += 1
|
deleted += 1
|
||||||
|
|
||||||
|
if not self.dry_run:
|
||||||
|
self._cascade_delete_job_events(AdHocCommand, pk_list)
|
||||||
|
|
||||||
skipped += AdHocCommand.objects.filter(created__gte=self.cutoff).count()
|
skipped += AdHocCommand.objects.filter(created__gte=self.cutoff).count()
|
||||||
return skipped, deleted
|
return skipped, deleted
|
||||||
|
|
||||||
def cleanup_project_updates(self):
|
def cleanup_project_updates(self):
|
||||||
skipped, deleted = 0, 0
|
skipped, deleted = 0, 0
|
||||||
project_updates = ProjectUpdate.objects.filter(created__lt=self.cutoff)
|
project_updates = ProjectUpdate.objects.filter(created__lt=self.cutoff)
|
||||||
|
pk_list = []
|
||||||
for pu in project_updates.iterator():
|
for pu in project_updates.iterator():
|
||||||
pu_display = '"%s" (type %s)' % (str(pu), str(pu.launch_type))
|
pu_display = '"%s" (type %s)' % (str(pu), str(pu.launch_type))
|
||||||
if pu.status in ('pending', 'waiting', 'running'):
|
if pu.status in ('pending', 'waiting', 'running'):
|
||||||
@@ -104,15 +277,20 @@ class Command(BaseCommand):
|
|||||||
action_text = 'would delete' if self.dry_run else 'deleting'
|
action_text = 'would delete' if self.dry_run else 'deleting'
|
||||||
self.logger.info('%s %s', action_text, pu_display)
|
self.logger.info('%s %s', action_text, pu_display)
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
|
pk_list.append(pu.pk)
|
||||||
pu.delete()
|
pu.delete()
|
||||||
deleted += 1
|
deleted += 1
|
||||||
|
|
||||||
|
if not self.dry_run:
|
||||||
|
self._cascade_delete_job_events(ProjectUpdate, pk_list)
|
||||||
|
|
||||||
skipped += ProjectUpdate.objects.filter(created__gte=self.cutoff).count()
|
skipped += ProjectUpdate.objects.filter(created__gte=self.cutoff).count()
|
||||||
return skipped, deleted
|
return skipped, deleted
|
||||||
|
|
||||||
def cleanup_inventory_updates(self):
|
def cleanup_inventory_updates(self):
|
||||||
skipped, deleted = 0, 0
|
skipped, deleted = 0, 0
|
||||||
inventory_updates = InventoryUpdate.objects.filter(created__lt=self.cutoff)
|
inventory_updates = InventoryUpdate.objects.filter(created__lt=self.cutoff)
|
||||||
|
pk_list = []
|
||||||
for iu in inventory_updates.iterator():
|
for iu in inventory_updates.iterator():
|
||||||
iu_display = '"%s" (source %s)' % (str(iu), str(iu.source))
|
iu_display = '"%s" (source %s)' % (str(iu), str(iu.source))
|
||||||
if iu.status in ('pending', 'waiting', 'running'):
|
if iu.status in ('pending', 'waiting', 'running'):
|
||||||
@@ -127,15 +305,20 @@ class Command(BaseCommand):
|
|||||||
action_text = 'would delete' if self.dry_run else 'deleting'
|
action_text = 'would delete' if self.dry_run else 'deleting'
|
||||||
self.logger.info('%s %s', action_text, iu_display)
|
self.logger.info('%s %s', action_text, iu_display)
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
|
pk_list.append(iu.pk)
|
||||||
iu.delete()
|
iu.delete()
|
||||||
deleted += 1
|
deleted += 1
|
||||||
|
|
||||||
|
if not self.dry_run:
|
||||||
|
self._cascade_delete_job_events(InventoryUpdate, pk_list)
|
||||||
|
|
||||||
skipped += InventoryUpdate.objects.filter(created__gte=self.cutoff).count()
|
skipped += InventoryUpdate.objects.filter(created__gte=self.cutoff).count()
|
||||||
return skipped, deleted
|
return skipped, deleted
|
||||||
|
|
||||||
def cleanup_management_jobs(self):
|
def cleanup_management_jobs(self):
|
||||||
skipped, deleted = 0, 0
|
skipped, deleted = 0, 0
|
||||||
system_jobs = SystemJob.objects.filter(created__lt=self.cutoff)
|
system_jobs = SystemJob.objects.filter(created__lt=self.cutoff)
|
||||||
|
pk_list = []
|
||||||
for sj in system_jobs.iterator():
|
for sj in system_jobs.iterator():
|
||||||
sj_display = '"%s" (type %s)' % (str(sj), str(sj.job_type))
|
sj_display = '"%s" (type %s)' % (str(sj), str(sj.job_type))
|
||||||
if sj.status in ('pending', 'waiting', 'running'):
|
if sj.status in ('pending', 'waiting', 'running'):
|
||||||
@@ -146,9 +329,13 @@ class Command(BaseCommand):
|
|||||||
action_text = 'would delete' if self.dry_run else 'deleting'
|
action_text = 'would delete' if self.dry_run else 'deleting'
|
||||||
self.logger.info('%s %s', action_text, sj_display)
|
self.logger.info('%s %s', action_text, sj_display)
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
|
pk_list.append(sj.pk)
|
||||||
sj.delete()
|
sj.delete()
|
||||||
deleted += 1
|
deleted += 1
|
||||||
|
|
||||||
|
if not self.dry_run:
|
||||||
|
self._cascade_delete_job_events(SystemJob, pk_list)
|
||||||
|
|
||||||
skipped += SystemJob.objects.filter(created__gte=self.cutoff).count()
|
skipped += SystemJob.objects.filter(created__gte=self.cutoff).count()
|
||||||
return skipped, deleted
|
return skipped, deleted
|
||||||
|
|
||||||
@@ -222,6 +409,13 @@ class Command(BaseCommand):
|
|||||||
for m in model_names:
|
for m in model_names:
|
||||||
if m in models_to_cleanup:
|
if m in models_to_cleanup:
|
||||||
skipped, deleted = getattr(self, 'cleanup_%s' % m)()
|
skipped, deleted = getattr(self, 'cleanup_%s' % m)()
|
||||||
|
|
||||||
|
func = getattr(self, 'cleanup_%s_partition' % m, None)
|
||||||
|
if func:
|
||||||
|
skipped_partition, deleted_partition = func()
|
||||||
|
skipped += skipped_partition
|
||||||
|
deleted += deleted_partition
|
||||||
|
|
||||||
if self.dry_run:
|
if self.dry_run:
|
||||||
self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '), deleted, skipped)
|
self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '), deleted, skipped)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -2,9 +2,8 @@
|
|||||||
# All Rights Reserved
|
# All Rights Reserved
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from django.conf import settings
|
|
||||||
from crum import impersonate
|
from crum import impersonate
|
||||||
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate, ExecutionEnvironment
|
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate
|
||||||
from awx.main.signals import disable_computed_fields
|
from awx.main.signals import disable_computed_fields
|
||||||
|
|
||||||
|
|
||||||
@@ -45,7 +44,7 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
public_galaxy_credential = Credential(
|
public_galaxy_credential = Credential(
|
||||||
name='Ansible Galaxy',
|
name='Ansible Galaxy',
|
||||||
managed_by_tower=True,
|
managed=True,
|
||||||
credential_type=CredentialType.objects.get(kind='galaxy'),
|
credential_type=CredentialType.objects.get(kind='galaxy'),
|
||||||
inputs={'url': 'https://galaxy.ansible.com/'},
|
inputs={'url': 'https://galaxy.ansible.com/'},
|
||||||
)
|
)
|
||||||
@@ -68,13 +67,6 @@ class Command(BaseCommand):
|
|||||||
print('Demo Credential, Inventory, and Job Template added.')
|
print('Demo Credential, Inventory, and Job Template added.')
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
for ee in reversed(settings.DEFAULT_EXECUTION_ENVIRONMENTS):
|
|
||||||
_, created = ExecutionEnvironment.objects.update_or_create(name=ee['name'], defaults={'image': ee['image'], 'managed_by_tower': True})
|
|
||||||
|
|
||||||
if created:
|
|
||||||
changed = True
|
|
||||||
print('Default Execution Environment(s) registered.')
|
|
||||||
|
|
||||||
if changed:
|
if changed:
|
||||||
print('(changed: True)')
|
print('(changed: True)')
|
||||||
else:
|
else:
|
||||||
|
|||||||
59
awx/main/management/commands/custom_venv_associations.py
Normal file
59
awx/main/management/commands/custom_venv_associations.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
# Copyright (c) 2021 Ansible, Inc.
|
||||||
|
# All Rights Reserved
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from awx.main.utils.common import get_custom_venv_choices
|
||||||
|
from awx.main.models import Organization, InventorySource, JobTemplate, Project
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""Returns the pip freeze from the path passed in the argument"""
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
'path',
|
||||||
|
type=str,
|
||||||
|
nargs=1,
|
||||||
|
default='',
|
||||||
|
help='run this with a path to a virtual environment as an argument to see the associated Job Templates, Organizations, Projects, and Inventory Sources.',
|
||||||
|
)
|
||||||
|
parser.add_argument('-q', action='store_true', help='run with -q to output only the results of the query.')
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
# look organiztions and unified job templates (which include JTs, workflows, and Inventory updates)
|
||||||
|
super(Command, self).__init__()
|
||||||
|
results = {}
|
||||||
|
path = options.get('path')
|
||||||
|
if path:
|
||||||
|
all_venvs = get_custom_venv_choices()
|
||||||
|
if path[0] in all_venvs: # verify this is a valid path
|
||||||
|
path = path[0]
|
||||||
|
orgs = [{"name": org.name, "id": org.id} for org in Organization.objects.filter(custom_virtualenv=path)]
|
||||||
|
jts = [{"name": jt.name, "id": jt.id} for jt in JobTemplate.objects.filter(custom_virtualenv=path)]
|
||||||
|
proj = [{"name": proj.name, "id": proj.id} for proj in Project.objects.filter(custom_virtualenv=path)]
|
||||||
|
invsrc = [{"name": inv.name, "id": inv.id} for inv in InventorySource.objects.filter(custom_virtualenv=path)]
|
||||||
|
results["organizations"] = orgs
|
||||||
|
results["job_templates"] = jts
|
||||||
|
results["projects"] = proj
|
||||||
|
results["inventory_sources"] = invsrc
|
||||||
|
if not options.get('q'):
|
||||||
|
msg = [
|
||||||
|
'# Virtual Environments Associations:',
|
||||||
|
yaml.dump(results),
|
||||||
|
'- To list all (now deprecated) custom virtual environments run:',
|
||||||
|
'awx-manage list_custom_venvs',
|
||||||
|
'',
|
||||||
|
'- To export the contents of a (deprecated) virtual environment, ' 'run the following command while supplying the path as an argument:',
|
||||||
|
'awx-manage export_custom_venv /path/to/venv',
|
||||||
|
'',
|
||||||
|
'- Run these commands with `-q` to remove tool tips.',
|
||||||
|
'',
|
||||||
|
]
|
||||||
|
print('\n'.join(msg))
|
||||||
|
else:
|
||||||
|
print(yaml.dump(results))
|
||||||
|
|
||||||
|
else:
|
||||||
|
print('\n', '# Incorrect path, verify your path is from the following list:')
|
||||||
|
print('\n'.join(all_venvs), '\n')
|
||||||
48
awx/main/management/commands/export_custom_venv.py
Normal file
48
awx/main/management/commands/export_custom_venv.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# Copyright (c) 2021 Ansible, Inc.
|
||||||
|
# All Rights Reserved
|
||||||
|
|
||||||
|
from awx.main.utils.common import get_custom_venv_pip_freeze, get_custom_venv_choices
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""Returns the pip freeze from the path passed in the argument"""
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
'path',
|
||||||
|
type=str,
|
||||||
|
nargs=1,
|
||||||
|
default='',
|
||||||
|
help='run this with a path to a virtual environment as an argument to see the pip freeze data',
|
||||||
|
)
|
||||||
|
parser.add_argument('-q', action='store_true', help='run with -q to output only the results of the query.')
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
super(Command, self).__init__()
|
||||||
|
if options.get('path'):
|
||||||
|
path = options.get('path')
|
||||||
|
all_venvs = get_custom_venv_choices()
|
||||||
|
if path[0] in all_venvs:
|
||||||
|
pip_data = get_custom_venv_pip_freeze(options.get('path')[0])
|
||||||
|
if pip_data:
|
||||||
|
if not options.get('q'):
|
||||||
|
msg = [
|
||||||
|
'# Virtual environment contents:',
|
||||||
|
pip_data,
|
||||||
|
'- To list all (now deprecated) custom virtual environments run:',
|
||||||
|
'awx-manage list_custom_venvs',
|
||||||
|
'',
|
||||||
|
'- To view the connections a (deprecated) virtual environment had in the database, run the following command while supplying the path as an argument:',
|
||||||
|
'awx-manage custom_venv_associations /path/to/venv',
|
||||||
|
'',
|
||||||
|
'- Run these commands with `-q` to remove tool tips.',
|
||||||
|
'',
|
||||||
|
]
|
||||||
|
print('\n'.join(msg))
|
||||||
|
else:
|
||||||
|
print(pip_data)
|
||||||
|
|
||||||
|
else:
|
||||||
|
print('\n', '# Incorrect path, verify your path is from the following list:')
|
||||||
|
print('\n'.join(all_venvs))
|
||||||
@@ -10,6 +10,7 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@@ -36,20 +37,20 @@ from awx.main.utils.pglock import advisory_lock
|
|||||||
logger = logging.getLogger('awx.main.commands.inventory_import')
|
logger = logging.getLogger('awx.main.commands.inventory_import')
|
||||||
|
|
||||||
LICENSE_EXPIRED_MESSAGE = '''\
|
LICENSE_EXPIRED_MESSAGE = '''\
|
||||||
License expired.
|
Subscription expired.
|
||||||
See http://www.ansible.com/renew for license extension information.'''
|
Contact us (https://www.redhat.com/contact) for subscription extension information.'''
|
||||||
|
|
||||||
LICENSE_NON_EXISTANT_MESSAGE = '''\
|
LICENSE_NON_EXISTANT_MESSAGE = '''\
|
||||||
No license.
|
No subscription.
|
||||||
See http://www.ansible.com/renew for license information.'''
|
Contact us (https://www.redhat.com/contact) for subscription information.'''
|
||||||
|
|
||||||
LICENSE_MESSAGE = '''\
|
LICENSE_MESSAGE = '''\
|
||||||
Number of licensed instances exceeded, would bring available instances to %(new_count)d, system is licensed for %(instance_count)d.
|
%(new_count)d instances have been automated, system is subscribed for %(instance_count)d.
|
||||||
See http://www.ansible.com/renew for license extension information.'''
|
Contact us (https://www.redhat.com/contact) for upgrade information.'''
|
||||||
|
|
||||||
DEMO_LICENSE_MESSAGE = '''\
|
DEMO_LICENSE_MESSAGE = '''\
|
||||||
Demo mode free license count exceeded, would bring available instances to %(new_count)d, demo mode allows %(instance_count)d.
|
Demo mode free subscription count exceeded. Current automated instances are %(new_count)d, demo mode allows %(instance_count)d.
|
||||||
See http://www.ansible.com/renew for licensing information.'''
|
Contact us (https://www.redhat.com/contact) for subscription information.'''
|
||||||
|
|
||||||
|
|
||||||
def functioning_dir(path):
|
def functioning_dir(path):
|
||||||
@@ -66,13 +67,9 @@ class AnsibleInventoryLoader(object):
|
|||||||
/usr/bin/ansible/ansible-inventory -i hosts --list
|
/usr/bin/ansible/ansible-inventory -i hosts --list
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, source, venv_path=None, verbosity=0):
|
def __init__(self, source, verbosity=0):
|
||||||
self.source = source
|
self.source = source
|
||||||
self.verbosity = verbosity
|
self.verbosity = verbosity
|
||||||
if venv_path:
|
|
||||||
self.venv_path = venv_path
|
|
||||||
else:
|
|
||||||
self.venv_path = settings.ANSIBLE_VENV_PATH
|
|
||||||
|
|
||||||
def get_base_args(self):
|
def get_base_args(self):
|
||||||
bargs = ['podman', 'run', '--user=root', '--quiet']
|
bargs = ['podman', 'run', '--user=root', '--quiet']
|
||||||
@@ -131,7 +128,6 @@ class Command(BaseCommand):
|
|||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
parser.add_argument('--inventory-name', dest='inventory_name', type=str, default=None, metavar='n', help='name of inventory to sync')
|
parser.add_argument('--inventory-name', dest='inventory_name', type=str, default=None, metavar='n', help='name of inventory to sync')
|
||||||
parser.add_argument('--inventory-id', dest='inventory_id', type=int, default=None, metavar='i', help='id of inventory to sync')
|
parser.add_argument('--inventory-id', dest='inventory_id', type=int, default=None, metavar='i', help='id of inventory to sync')
|
||||||
parser.add_argument('--venv', dest='venv', type=str, default=None, help='absolute path to the AWX custom virtualenv to use')
|
|
||||||
parser.add_argument('--overwrite', dest='overwrite', action='store_true', default=False, help='overwrite the destination hosts and groups')
|
parser.add_argument('--overwrite', dest='overwrite', action='store_true', default=False, help='overwrite the destination hosts and groups')
|
||||||
parser.add_argument('--overwrite-vars', dest='overwrite_vars', action='store_true', default=False, help='overwrite (rather than merge) variables')
|
parser.add_argument('--overwrite-vars', dest='overwrite_vars', action='store_true', default=False, help='overwrite (rather than merge) variables')
|
||||||
parser.add_argument('--keep-vars', dest='keep_vars', action='store_true', default=False, help='DEPRECATED legacy option, has no effect')
|
parser.add_argument('--keep-vars', dest='keep_vars', action='store_true', default=False, help='DEPRECATED legacy option, has no effect')
|
||||||
@@ -274,12 +270,13 @@ class Command(BaseCommand):
|
|||||||
self.db_instance_id_map = {}
|
self.db_instance_id_map = {}
|
||||||
if self.instance_id_var:
|
if self.instance_id_var:
|
||||||
host_qs = self.inventory_source.hosts.all()
|
host_qs = self.inventory_source.hosts.all()
|
||||||
host_qs = host_qs.filter(instance_id='', variables__contains=self.instance_id_var.split('.')[0])
|
for instance_id_part in reversed(self.instance_id_var.split(',')):
|
||||||
for host in host_qs:
|
host_qs = host_qs.filter(instance_id='', variables__contains=instance_id_part.split('.')[0])
|
||||||
instance_id = self._get_instance_id(host.variables_dict)
|
for host in host_qs:
|
||||||
if not instance_id:
|
instance_id = self._get_instance_id(host.variables_dict)
|
||||||
continue
|
if not instance_id:
|
||||||
self.db_instance_id_map[instance_id] = host.pk
|
continue
|
||||||
|
self.db_instance_id_map[instance_id] = host.pk
|
||||||
|
|
||||||
def _build_mem_instance_id_map(self):
|
def _build_mem_instance_id_map(self):
|
||||||
"""
|
"""
|
||||||
@@ -305,7 +302,7 @@ class Command(BaseCommand):
|
|||||||
self._cached_host_pk_set = frozenset(self.inventory_source.hosts.values_list('pk', flat=True))
|
self._cached_host_pk_set = frozenset(self.inventory_source.hosts.values_list('pk', flat=True))
|
||||||
return self._cached_host_pk_set
|
return self._cached_host_pk_set
|
||||||
|
|
||||||
def _delete_hosts(self):
|
def _delete_hosts(self, pk_mem_host_map):
|
||||||
"""
|
"""
|
||||||
For each host in the database that is NOT in the local list, delete
|
For each host in the database that is NOT in the local list, delete
|
||||||
it. When importing from a cloud inventory source attached to a
|
it. When importing from a cloud inventory source attached to a
|
||||||
@@ -314,25 +311,10 @@ class Command(BaseCommand):
|
|||||||
"""
|
"""
|
||||||
if settings.SQL_DEBUG:
|
if settings.SQL_DEBUG:
|
||||||
queries_before = len(connection.queries)
|
queries_before = len(connection.queries)
|
||||||
|
|
||||||
hosts_qs = self.inventory_source.hosts
|
hosts_qs = self.inventory_source.hosts
|
||||||
# Build list of all host pks, remove all that should not be deleted.
|
del_host_pks = hosts_qs.exclude(pk__in=pk_mem_host_map.keys()).values_list('pk', flat=True)
|
||||||
del_host_pks = set(self._existing_host_pks()) # makes mutable copy
|
|
||||||
if self.instance_id_var:
|
|
||||||
all_instance_ids = list(self.mem_instance_id_map.keys())
|
|
||||||
instance_ids = []
|
|
||||||
for offset in range(0, len(all_instance_ids), self._batch_size):
|
|
||||||
instance_ids = all_instance_ids[offset : (offset + self._batch_size)]
|
|
||||||
for host_pk in hosts_qs.filter(instance_id__in=instance_ids).values_list('pk', flat=True):
|
|
||||||
del_host_pks.discard(host_pk)
|
|
||||||
for host_pk in set([v for k, v in self.db_instance_id_map.items() if k in instance_ids]):
|
|
||||||
del_host_pks.discard(host_pk)
|
|
||||||
all_host_names = list(set(self.mem_instance_id_map.values()) - set(self.all_group.all_hosts.keys()))
|
|
||||||
else:
|
|
||||||
all_host_names = list(self.all_group.all_hosts.keys())
|
|
||||||
for offset in range(0, len(all_host_names), self._batch_size):
|
|
||||||
host_names = all_host_names[offset : (offset + self._batch_size)]
|
|
||||||
for host_pk in hosts_qs.filter(name__in=host_names).values_list('pk', flat=True):
|
|
||||||
del_host_pks.discard(host_pk)
|
|
||||||
# Now delete all remaining hosts in batches.
|
# Now delete all remaining hosts in batches.
|
||||||
all_del_pks = sorted(list(del_host_pks))
|
all_del_pks = sorted(list(del_host_pks))
|
||||||
for offset in range(0, len(all_del_pks), self._batch_size):
|
for offset in range(0, len(all_del_pks), self._batch_size):
|
||||||
@@ -573,7 +555,63 @@ class Command(BaseCommand):
|
|||||||
logger.debug('Host "%s" is now disabled', mem_host.name)
|
logger.debug('Host "%s" is now disabled', mem_host.name)
|
||||||
self._batch_add_m2m(self.inventory_source.hosts, db_host)
|
self._batch_add_m2m(self.inventory_source.hosts, db_host)
|
||||||
|
|
||||||
def _create_update_hosts(self):
|
def _build_pk_mem_host_map(self):
|
||||||
|
"""
|
||||||
|
Creates and returns a data structure that maps DB hosts to in-memory host that
|
||||||
|
they correspond to - meaning that those hosts will be updated to in-memory host values
|
||||||
|
"""
|
||||||
|
mem_host_pk_map = OrderedDict() # keys are mem_host name, values are matching DB host pk
|
||||||
|
host_pks_updated = set() # same as items of mem_host_pk_map but used for efficiency
|
||||||
|
mem_host_pk_map_by_id = {} # incomplete mapping by new instance_id to be sorted and pushed to mem_host_pk_map
|
||||||
|
mem_host_instance_id_map = {}
|
||||||
|
for k, v in self.all_group.all_hosts.items():
|
||||||
|
instance_id = self._get_instance_id(v.variables)
|
||||||
|
if instance_id in self.db_instance_id_map:
|
||||||
|
mem_host_pk_map_by_id[self.db_instance_id_map[instance_id]] = v
|
||||||
|
elif instance_id:
|
||||||
|
mem_host_instance_id_map[instance_id] = v
|
||||||
|
|
||||||
|
# Update all existing hosts where we know the PK based on instance_id.
|
||||||
|
all_host_pks = sorted(mem_host_pk_map_by_id.keys())
|
||||||
|
for offset in range(0, len(all_host_pks), self._batch_size):
|
||||||
|
host_pks = all_host_pks[offset : (offset + self._batch_size)]
|
||||||
|
for db_host in self.inventory.hosts.only('pk').filter(pk__in=host_pks):
|
||||||
|
if db_host.pk in host_pks_updated:
|
||||||
|
continue
|
||||||
|
mem_host = mem_host_pk_map_by_id[db_host.pk]
|
||||||
|
mem_host_pk_map[mem_host.name] = db_host.pk
|
||||||
|
host_pks_updated.add(db_host.pk)
|
||||||
|
|
||||||
|
# Update all existing hosts where we know the DB (the prior) instance_id.
|
||||||
|
all_instance_ids = sorted(mem_host_instance_id_map.keys())
|
||||||
|
for offset in range(0, len(all_instance_ids), self._batch_size):
|
||||||
|
instance_ids = all_instance_ids[offset : (offset + self._batch_size)]
|
||||||
|
for db_host in self.inventory.hosts.only('pk', 'instance_id').filter(instance_id__in=instance_ids):
|
||||||
|
if db_host.pk in host_pks_updated:
|
||||||
|
continue
|
||||||
|
mem_host = mem_host_instance_id_map[db_host.instance_id]
|
||||||
|
mem_host_pk_map[mem_host.name] = db_host.pk
|
||||||
|
host_pks_updated.add(db_host.pk)
|
||||||
|
|
||||||
|
# Update all existing hosts by name.
|
||||||
|
all_host_names = sorted(self.all_group.all_hosts.keys())
|
||||||
|
for offset in range(0, len(all_host_names), self._batch_size):
|
||||||
|
host_names = all_host_names[offset : (offset + self._batch_size)]
|
||||||
|
for db_host in self.inventory.hosts.only('pk', 'name').filter(name__in=host_names):
|
||||||
|
if db_host.pk in host_pks_updated:
|
||||||
|
continue
|
||||||
|
mem_host = self.all_group.all_hosts[db_host.name]
|
||||||
|
mem_host_pk_map[mem_host.name] = db_host.pk
|
||||||
|
host_pks_updated.add(db_host.pk)
|
||||||
|
|
||||||
|
# Rotate the dictionary so that lookups are done by the host pk
|
||||||
|
pk_mem_host_map = OrderedDict()
|
||||||
|
for name, host_pk in mem_host_pk_map.items():
|
||||||
|
pk_mem_host_map[host_pk] = name
|
||||||
|
|
||||||
|
return pk_mem_host_map # keys are DB host pk, keys are matching mem host name
|
||||||
|
|
||||||
|
def _create_update_hosts(self, pk_mem_host_map):
|
||||||
"""
|
"""
|
||||||
For each host in the local list, create it if it doesn't exist in the
|
For each host in the local list, create it if it doesn't exist in the
|
||||||
database. Otherwise, update/replace database variables from the
|
database. Otherwise, update/replace database variables from the
|
||||||
@@ -582,57 +620,22 @@ class Command(BaseCommand):
|
|||||||
"""
|
"""
|
||||||
if settings.SQL_DEBUG:
|
if settings.SQL_DEBUG:
|
||||||
queries_before = len(connection.queries)
|
queries_before = len(connection.queries)
|
||||||
host_pks_updated = set()
|
|
||||||
mem_host_pk_map = {}
|
|
||||||
mem_host_instance_id_map = {}
|
|
||||||
mem_host_name_map = {}
|
|
||||||
mem_host_names_to_update = set(self.all_group.all_hosts.keys())
|
|
||||||
for k, v in self.all_group.all_hosts.items():
|
|
||||||
mem_host_name_map[k] = v
|
|
||||||
instance_id = self._get_instance_id(v.variables)
|
|
||||||
if instance_id in self.db_instance_id_map:
|
|
||||||
mem_host_pk_map[self.db_instance_id_map[instance_id]] = v
|
|
||||||
elif instance_id:
|
|
||||||
mem_host_instance_id_map[instance_id] = v
|
|
||||||
|
|
||||||
# Update all existing hosts where we know the PK based on instance_id.
|
updated_mem_host_names = set()
|
||||||
all_host_pks = sorted(mem_host_pk_map.keys())
|
|
||||||
|
all_host_pks = sorted(pk_mem_host_map.keys())
|
||||||
for offset in range(0, len(all_host_pks), self._batch_size):
|
for offset in range(0, len(all_host_pks), self._batch_size):
|
||||||
host_pks = all_host_pks[offset : (offset + self._batch_size)]
|
host_pks = all_host_pks[offset : (offset + self._batch_size)]
|
||||||
for db_host in self.inventory.hosts.filter(pk__in=host_pks):
|
for db_host in self.inventory.hosts.filter(pk__in=host_pks):
|
||||||
if db_host.pk in host_pks_updated:
|
mem_host_name = pk_mem_host_map[db_host.pk]
|
||||||
continue
|
mem_host = self.all_group.all_hosts[mem_host_name]
|
||||||
mem_host = mem_host_pk_map[db_host.pk]
|
|
||||||
self._update_db_host_from_mem_host(db_host, mem_host)
|
self._update_db_host_from_mem_host(db_host, mem_host)
|
||||||
host_pks_updated.add(db_host.pk)
|
updated_mem_host_names.add(mem_host.name)
|
||||||
mem_host_names_to_update.discard(mem_host.name)
|
|
||||||
|
|
||||||
# Update all existing hosts where we know the instance_id.
|
mem_host_names_to_create = set(self.all_group.all_hosts.keys()) - updated_mem_host_names
|
||||||
all_instance_ids = sorted(mem_host_instance_id_map.keys())
|
|
||||||
for offset in range(0, len(all_instance_ids), self._batch_size):
|
|
||||||
instance_ids = all_instance_ids[offset : (offset + self._batch_size)]
|
|
||||||
for db_host in self.inventory.hosts.filter(instance_id__in=instance_ids):
|
|
||||||
if db_host.pk in host_pks_updated:
|
|
||||||
continue
|
|
||||||
mem_host = mem_host_instance_id_map[db_host.instance_id]
|
|
||||||
self._update_db_host_from_mem_host(db_host, mem_host)
|
|
||||||
host_pks_updated.add(db_host.pk)
|
|
||||||
mem_host_names_to_update.discard(mem_host.name)
|
|
||||||
|
|
||||||
# Update all existing hosts by name.
|
|
||||||
all_host_names = sorted(mem_host_name_map.keys())
|
|
||||||
for offset in range(0, len(all_host_names), self._batch_size):
|
|
||||||
host_names = all_host_names[offset : (offset + self._batch_size)]
|
|
||||||
for db_host in self.inventory.hosts.filter(name__in=host_names):
|
|
||||||
if db_host.pk in host_pks_updated:
|
|
||||||
continue
|
|
||||||
mem_host = mem_host_name_map[db_host.name]
|
|
||||||
self._update_db_host_from_mem_host(db_host, mem_host)
|
|
||||||
host_pks_updated.add(db_host.pk)
|
|
||||||
mem_host_names_to_update.discard(mem_host.name)
|
|
||||||
|
|
||||||
# Create any new hosts.
|
# Create any new hosts.
|
||||||
for mem_host_name in sorted(mem_host_names_to_update):
|
for mem_host_name in sorted(mem_host_names_to_create):
|
||||||
mem_host = self.all_group.all_hosts[mem_host_name]
|
mem_host = self.all_group.all_hosts[mem_host_name]
|
||||||
import_vars = mem_host.variables
|
import_vars = mem_host.variables
|
||||||
host_desc = import_vars.pop('_awx_description', 'imported')
|
host_desc = import_vars.pop('_awx_description', 'imported')
|
||||||
@@ -731,13 +734,14 @@ class Command(BaseCommand):
|
|||||||
self._batch_size = 500
|
self._batch_size = 500
|
||||||
self._build_db_instance_id_map()
|
self._build_db_instance_id_map()
|
||||||
self._build_mem_instance_id_map()
|
self._build_mem_instance_id_map()
|
||||||
|
pk_mem_host_map = self._build_pk_mem_host_map()
|
||||||
if self.overwrite:
|
if self.overwrite:
|
||||||
self._delete_hosts()
|
self._delete_hosts(pk_mem_host_map)
|
||||||
self._delete_groups()
|
self._delete_groups()
|
||||||
self._delete_group_children_and_hosts()
|
self._delete_group_children_and_hosts()
|
||||||
self._update_inventory()
|
self._update_inventory()
|
||||||
self._create_update_groups()
|
self._create_update_groups()
|
||||||
self._create_update_hosts()
|
self._create_update_hosts(pk_mem_host_map)
|
||||||
self._create_update_group_children()
|
self._create_update_group_children()
|
||||||
self._create_update_group_hosts()
|
self._create_update_group_hosts()
|
||||||
|
|
||||||
@@ -761,29 +765,22 @@ class Command(BaseCommand):
|
|||||||
instance_count = license_info.get('instance_count', 0)
|
instance_count = license_info.get('instance_count', 0)
|
||||||
free_instances = license_info.get('free_instances', 0)
|
free_instances = license_info.get('free_instances', 0)
|
||||||
time_remaining = license_info.get('time_remaining', 0)
|
time_remaining = license_info.get('time_remaining', 0)
|
||||||
|
automated_count = license_info.get('automated_instances', 0)
|
||||||
hard_error = license_info.get('trial', False) is True or license_info['instance_count'] == 10
|
hard_error = license_info.get('trial', False) is True or license_info['instance_count'] == 10
|
||||||
new_count = Host.objects.active_count()
|
|
||||||
if time_remaining <= 0:
|
if time_remaining <= 0:
|
||||||
if hard_error:
|
if hard_error:
|
||||||
logger.error(LICENSE_EXPIRED_MESSAGE)
|
logger.error(LICENSE_EXPIRED_MESSAGE)
|
||||||
raise PermissionDenied("License has expired!")
|
raise PermissionDenied("Subscription has expired!")
|
||||||
else:
|
else:
|
||||||
logger.warning(LICENSE_EXPIRED_MESSAGE)
|
logger.warning(LICENSE_EXPIRED_MESSAGE)
|
||||||
# special check for tower-type inventory sources
|
|
||||||
# but only if running the plugin
|
|
||||||
TOWER_SOURCE_FILES = ['tower.yml', 'tower.yaml']
|
|
||||||
if self.inventory_source.source == 'tower' and any(f in self.inventory_source.source_path for f in TOWER_SOURCE_FILES):
|
|
||||||
# only if this is the 2nd call to license check, we cannot compare before running plugin
|
|
||||||
if hasattr(self, 'all_group'):
|
|
||||||
self.remote_tower_license_compare(local_license_type)
|
|
||||||
if free_instances < 0:
|
if free_instances < 0:
|
||||||
d = {
|
d = {
|
||||||
'new_count': new_count,
|
'new_count': automated_count,
|
||||||
'instance_count': instance_count,
|
'instance_count': instance_count,
|
||||||
}
|
}
|
||||||
if hard_error:
|
if hard_error:
|
||||||
logger.error(LICENSE_MESSAGE % d)
|
logger.error(LICENSE_MESSAGE % d)
|
||||||
raise PermissionDenied('License count exceeded!')
|
raise PermissionDenied('Subscription count exceeded!')
|
||||||
else:
|
else:
|
||||||
logger.warning(LICENSE_MESSAGE % d)
|
logger.warning(LICENSE_MESSAGE % d)
|
||||||
|
|
||||||
@@ -824,7 +821,6 @@ class Command(BaseCommand):
|
|||||||
raise CommandError('--source is required')
|
raise CommandError('--source is required')
|
||||||
verbosity = int(options.get('verbosity', 1))
|
verbosity = int(options.get('verbosity', 1))
|
||||||
self.set_logging_level(verbosity)
|
self.set_logging_level(verbosity)
|
||||||
venv_path = options.get('venv', None)
|
|
||||||
|
|
||||||
# Load inventory object based on name or ID.
|
# Load inventory object based on name or ID.
|
||||||
if inventory_id:
|
if inventory_id:
|
||||||
@@ -854,7 +850,7 @@ class Command(BaseCommand):
|
|||||||
_eager_fields=dict(job_args=json.dumps(sys.argv), job_env=dict(os.environ.items()), job_cwd=os.getcwd())
|
_eager_fields=dict(job_args=json.dumps(sys.argv), job_env=dict(os.environ.items()), job_cwd=os.getcwd())
|
||||||
)
|
)
|
||||||
|
|
||||||
data = AnsibleInventoryLoader(source=source, venv_path=venv_path, verbosity=verbosity).load()
|
data = AnsibleInventoryLoader(source=source, verbosity=verbosity).load()
|
||||||
|
|
||||||
logger.debug('Finished loading from source: %s', source)
|
logger.debug('Finished loading from source: %s', source)
|
||||||
|
|
||||||
|
|||||||
43
awx/main/management/commands/list_custom_venvs.py
Normal file
43
awx/main/management/commands/list_custom_venvs.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
# Copyright (c) 2021 Ansible, Inc.
|
||||||
|
# All Rights Reserved
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from awx.main.utils.common import get_custom_venv_choices
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""Returns a list of custom venv paths from the path passed in the argument"""
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('-q', action='store_true', help='run with -q to output only the results of the query.')
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
super(Command, self).__init__()
|
||||||
|
venvs = get_custom_venv_choices()
|
||||||
|
if venvs:
|
||||||
|
if not options.get('q'):
|
||||||
|
msg = [
|
||||||
|
'# Discovered Virtual Environments:',
|
||||||
|
'\n'.join(venvs),
|
||||||
|
'',
|
||||||
|
'- To export the contents of a (deprecated) virtual environment, ' 'run the following command while supplying the path as an argument:',
|
||||||
|
'awx-manage export_custom_venv /path/to/venv',
|
||||||
|
'',
|
||||||
|
'- To view the connections a (deprecated) virtual environment had in the database, run the following command while supplying the path as an argument:',
|
||||||
|
'awx-manage custom_venv_associations /path/to/venv',
|
||||||
|
'',
|
||||||
|
'- Run these commands with `-q` to remove tool tips.',
|
||||||
|
'',
|
||||||
|
]
|
||||||
|
print('\n'.join(msg))
|
||||||
|
else:
|
||||||
|
print('\n'.join(venvs), '\n')
|
||||||
|
else:
|
||||||
|
msg = ["No custom virtual environments detected in:", settings.BASE_VENV_PATH]
|
||||||
|
|
||||||
|
for path in settings.CUSTOM_VENV_PATHS:
|
||||||
|
msg.append(path)
|
||||||
|
|
||||||
|
print('\n'.join(msg), file=sys.stderr)
|
||||||
@@ -25,6 +25,7 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
super(Command, self).__init__()
|
super(Command, self).__init__()
|
||||||
|
no_color = options.get("no_color", False)
|
||||||
|
|
||||||
groups = list(InstanceGroup.objects.all())
|
groups = list(InstanceGroup.objects.all())
|
||||||
ungrouped = Ungrouped()
|
ungrouped = Ungrouped()
|
||||||
@@ -44,6 +45,8 @@ class Command(BaseCommand):
|
|||||||
color = '\033[91m'
|
color = '\033[91m'
|
||||||
if x.enabled is False:
|
if x.enabled is False:
|
||||||
color = '\033[90m[DISABLED] '
|
color = '\033[90m[DISABLED] '
|
||||||
|
if no_color:
|
||||||
|
color = ''
|
||||||
fmt = '\t' + color + '{0.hostname} capacity={0.capacity} version={1}'
|
fmt = '\t' + color + '{0.hostname} capacity={0.capacity} version={1}'
|
||||||
if x.capacity:
|
if x.capacity:
|
||||||
fmt += ' heartbeat="{0.modified:%Y-%m-%d %H:%M:%S}"'
|
fmt += ' heartbeat="{0.modified:%Y-%m-%d %H:%M:%S}"'
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
# Copyright (c) 2015 Ansible, Inc.
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
# All Rights Reserved
|
# All Rights Reserved
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.management.base import BaseCommand, CommandError
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
|
|
||||||
@@ -18,11 +17,13 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
parser.add_argument('--hostname', dest='hostname', type=str, help='Hostname used during provisioning')
|
parser.add_argument('--hostname', dest='hostname', type=str, help='Hostname used during provisioning')
|
||||||
|
parser.add_argument('--node_type', type=str, default="hybrid", choices=["control", "execution", "hybrid"], help='Instance Node type')
|
||||||
|
parser.add_argument('--uuid', type=str, help='Instance UUID')
|
||||||
|
|
||||||
def _register_hostname(self, hostname):
|
def _register_hostname(self, hostname, node_type, uuid):
|
||||||
if not hostname:
|
if not hostname:
|
||||||
return
|
return
|
||||||
(changed, instance) = Instance.objects.register(uuid=self.uuid, hostname=hostname)
|
(changed, instance) = Instance.objects.register(hostname=hostname, node_type=node_type, uuid=uuid)
|
||||||
if changed:
|
if changed:
|
||||||
print('Successfully registered instance {}'.format(hostname))
|
print('Successfully registered instance {}'.format(hostname))
|
||||||
else:
|
else:
|
||||||
@@ -33,8 +34,7 @@ class Command(BaseCommand):
|
|||||||
def handle(self, **options):
|
def handle(self, **options):
|
||||||
if not options.get('hostname'):
|
if not options.get('hostname'):
|
||||||
raise CommandError("Specify `--hostname` to use this command.")
|
raise CommandError("Specify `--hostname` to use this command.")
|
||||||
self.uuid = settings.SYSTEM_UUID
|
|
||||||
self.changed = False
|
self.changed = False
|
||||||
self._register_hostname(options.get('hostname'))
|
self._register_hostname(options.get('hostname'), options.get('node_type'), options.get('uuid'))
|
||||||
if self.changed:
|
if self.changed:
|
||||||
print('(changed: True)')
|
print('(changed: True)')
|
||||||
|
|||||||
@@ -0,0 +1,135 @@
|
|||||||
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
|
# All Rights Reserved
|
||||||
|
import sys
|
||||||
|
from distutils.util import strtobool
|
||||||
|
from argparse import RawTextHelpFormatter
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.conf import settings
|
||||||
|
from awx.main.models import CredentialType, Credential, ExecutionEnvironment
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""Create default execution environments, intended for new installs"""
|
||||||
|
|
||||||
|
help = """
|
||||||
|
Creates or updates the execution environments set in settings.DEFAULT_EXECUTION_ENVIRONMENTS if they are not yet created.
|
||||||
|
Optionally provide authentication details to create or update a container registry credential that will be set on all of these default execution environments.
|
||||||
|
Note that settings.DEFAULT_EXECUTION_ENVIRONMENTS is and ordered list, the first in the list will be used for project updates.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Preserves newlines in the help text
|
||||||
|
def create_parser(self, *args, **kwargs):
|
||||||
|
parser = super(Command, self).create_parser(*args, **kwargs)
|
||||||
|
parser.formatter_class = RawTextHelpFormatter
|
||||||
|
return parser
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
"--registry-url",
|
||||||
|
type=str,
|
||||||
|
default="",
|
||||||
|
help="URL for the container registry",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--registry-username",
|
||||||
|
type=str,
|
||||||
|
default="",
|
||||||
|
help="username for the container registry",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--registry-password",
|
||||||
|
type=str,
|
||||||
|
default="",
|
||||||
|
help="Password or token for CLI authentication with the container registry",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--verify-ssl",
|
||||||
|
type=lambda x: bool(strtobool(str(x))),
|
||||||
|
default=True,
|
||||||
|
help="Verify SSL when authenticating with the container registry",
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
changed = False
|
||||||
|
registry_cred = None
|
||||||
|
|
||||||
|
if options.get("registry_username"):
|
||||||
|
if not options.get("registry_password"):
|
||||||
|
sys.stderr.write("Registry password must be provided when providing registry username\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not options.get("registry_url"):
|
||||||
|
sys.stderr.write("Registry url must be provided when providing registry username\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
registry_cred_type = CredentialType.objects.filter(kind="registry")
|
||||||
|
if not registry_cred_type.exists():
|
||||||
|
sys.stderr.write("No registry credential type found")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
inputs = {
|
||||||
|
"host": options.get("registry_url"),
|
||||||
|
"password": options.get("registry_password"),
|
||||||
|
"username": options.get("registry_username"),
|
||||||
|
"verify_ssl": options.get("verify_ssl"),
|
||||||
|
}
|
||||||
|
registry_cred, cred_created = Credential.objects.get_or_create(
|
||||||
|
name="Default Execution Environment Registry Credential",
|
||||||
|
managed=True,
|
||||||
|
credential_type=registry_cred_type[0],
|
||||||
|
defaults={'inputs': inputs},
|
||||||
|
)
|
||||||
|
|
||||||
|
if cred_created:
|
||||||
|
changed = True
|
||||||
|
print("'Default Execution Environment Credential' registered.")
|
||||||
|
|
||||||
|
for key, value in inputs.items():
|
||||||
|
if not registry_cred.inputs.get(key) or registry_cred.get_input(key) != value:
|
||||||
|
registry_cred.inputs[key] = value
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if changed:
|
||||||
|
registry_cred.save()
|
||||||
|
print("'Default Execution Environment Credential' updated.")
|
||||||
|
|
||||||
|
# Create default globally available Execution Environments
|
||||||
|
for ee in reversed(settings.GLOBAL_JOB_EXECUTION_ENVIRONMENTS):
|
||||||
|
_this_ee, ee_created = ExecutionEnvironment.objects.get_or_create(name=ee["name"], defaults={'image': ee["image"], 'credential': registry_cred})
|
||||||
|
if ee_created:
|
||||||
|
changed = True
|
||||||
|
print(f"'{ee['name']}' Default Execution Environment registered.")
|
||||||
|
else:
|
||||||
|
if _this_ee.image != ee["image"]:
|
||||||
|
_this_ee.image = ee["image"]
|
||||||
|
changed = True
|
||||||
|
if _this_ee.credential != registry_cred:
|
||||||
|
_this_ee.credential = registry_cred
|
||||||
|
changed = True
|
||||||
|
if changed:
|
||||||
|
_this_ee.save()
|
||||||
|
print(f"'{ee['name']}' Default Execution Environment updated.")
|
||||||
|
|
||||||
|
# Create the control plane execution environment that is used for project updates and system jobs
|
||||||
|
ee = settings.CONTROL_PLANE_EXECUTION_ENVIRONMENT
|
||||||
|
_this_ee, cp_created = ExecutionEnvironment.objects.get_or_create(
|
||||||
|
name="Control Plane Execution Environment", defaults={'image': ee, 'managed': True, 'credential': registry_cred}
|
||||||
|
)
|
||||||
|
if cp_created:
|
||||||
|
changed = True
|
||||||
|
print("Control Plane Execution Environment registered.")
|
||||||
|
else:
|
||||||
|
if _this_ee.image != ee:
|
||||||
|
_this_ee.image = ee
|
||||||
|
changed = True
|
||||||
|
if _this_ee.credential != registry_cred:
|
||||||
|
_this_ee.credential = registry_cred
|
||||||
|
changed = True
|
||||||
|
if changed:
|
||||||
|
_this_ee.save()
|
||||||
|
|
||||||
|
if changed:
|
||||||
|
print("(changed: True)")
|
||||||
|
else:
|
||||||
|
print("(changed: False)")
|
||||||
@@ -97,27 +97,29 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
executor = MigrationExecutor(connection)
|
executor = MigrationExecutor(connection)
|
||||||
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
|
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
|
||||||
registered = False
|
|
||||||
|
|
||||||
if not migrating:
|
# In containerized deployments, migrations happen in the task container,
|
||||||
try:
|
# and the services running there don't start until migrations are
|
||||||
Instance.objects.me()
|
# finished.
|
||||||
registered = True
|
# *This* service runs in the web container, and it's possible that it can
|
||||||
except RuntimeError:
|
# start _before_ migrations are finished, thus causing issues with the ORM
|
||||||
pass
|
# queries it makes (specifically, conf.settings queries).
|
||||||
|
# This block is meant to serve as a sort of bail-out for the situation
|
||||||
|
# where migrations aren't yet finished (similar to the migration
|
||||||
|
# detection middleware that the uwsgi processes have) or when instance
|
||||||
|
# registration isn't done yet
|
||||||
|
if migrating:
|
||||||
|
logger.info('AWX is currently migrating, retry in 10s...')
|
||||||
|
time.sleep(10)
|
||||||
|
return
|
||||||
|
|
||||||
if migrating or not registered:
|
try:
|
||||||
# In containerized deployments, migrations happen in the task container,
|
me = Instance.objects.me()
|
||||||
# and the services running there don't start until migrations are
|
logger.info('Active instance with hostname {} is registered.'.format(me.hostname))
|
||||||
# finished.
|
except RuntimeError as e:
|
||||||
# *This* service runs in the web container, and it's possible that it can
|
# the CLUSTER_HOST_ID in the task, and web instance must match and
|
||||||
# start _before_ migrations are finished, thus causing issues with the ORM
|
# ensure network connectivity between the task and web instance
|
||||||
# queries it makes (specifically, conf.settings queries).
|
logger.info('Unable to return currently active instance: {}, retry in 5s...'.format(e))
|
||||||
# This block is meant to serve as a sort of bail-out for the situation
|
|
||||||
# where migrations aren't yet finished (similar to the migration
|
|
||||||
# detection middleware that the uwsgi processes have) or when instance
|
|
||||||
# registration isn't done yet
|
|
||||||
logger.error('AWX is currently installing/upgrading. Trying again in 5s...')
|
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -4,16 +4,23 @@
|
|||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
from awx.main.utils.filters import SmartFilter
|
from awx.main.utils.filters import SmartFilter
|
||||||
from awx.main.utils.pglock import advisory_lock
|
from awx.main.utils.pglock import advisory_lock
|
||||||
|
from awx.main.utils.common import get_capacity_type
|
||||||
|
from awx.main.constants import RECEPTOR_PENDING
|
||||||
|
|
||||||
___all__ = ['HostManager', 'InstanceManager', 'InstanceGroupManager']
|
___all__ = ['HostManager', 'InstanceManager', 'InstanceGroupManager', 'DeferJobCreatedManager', 'UUID_DEFAULT']
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.managers')
|
logger = logging.getLogger('awx.main.managers')
|
||||||
|
UUID_DEFAULT = '00000000-0000-0000-0000-000000000000'
|
||||||
|
|
||||||
|
|
||||||
|
class DeferJobCreatedManager(models.Manager):
|
||||||
|
def get_queryset(self):
|
||||||
|
return super(DeferJobCreatedManager, self).get_queryset().defer('job_created')
|
||||||
|
|
||||||
|
|
||||||
class HostManager(models.Manager):
|
class HostManager(models.Manager):
|
||||||
@@ -28,7 +35,7 @@ class HostManager(models.Manager):
|
|||||||
- Only consider results that are unique
|
- Only consider results that are unique
|
||||||
- Return the count of this query
|
- Return the count of this query
|
||||||
"""
|
"""
|
||||||
return self.order_by().exclude(inventory_sources__source='tower').values('name').distinct().count()
|
return self.order_by().exclude(inventory_sources__source='controller').values('name').distinct().count()
|
||||||
|
|
||||||
def org_active_count(self, org_id):
|
def org_active_count(self, org_id):
|
||||||
"""Return count of active, unique hosts used by an organization.
|
"""Return count of active, unique hosts used by an organization.
|
||||||
@@ -40,7 +47,7 @@ class HostManager(models.Manager):
|
|||||||
- Only consider results that are unique
|
- Only consider results that are unique
|
||||||
- Return the count of this query
|
- Return the count of this query
|
||||||
"""
|
"""
|
||||||
return self.order_by().exclude(inventory_sources__source='tower').filter(inventory__organization=org_id).values('name').distinct().count()
|
return self.order_by().exclude(inventory_sources__source='controller').filter(inventory__organization=org_id).values('name').distinct().count()
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
"""When the parent instance of the host query set has a `kind=smart` and a `host_filter`
|
"""When the parent instance of the host query set has a `kind=smart` and a `host_filter`
|
||||||
@@ -99,18 +106,17 @@ class InstanceManager(models.Manager):
|
|||||||
"""Return the currently active instance."""
|
"""Return the currently active instance."""
|
||||||
# If we are running unit tests, return a stub record.
|
# If we are running unit tests, return a stub record.
|
||||||
if settings.IS_TESTING(sys.argv) or hasattr(sys, '_called_from_test'):
|
if settings.IS_TESTING(sys.argv) or hasattr(sys, '_called_from_test'):
|
||||||
return self.model(id=1, hostname='localhost', uuid='00000000-0000-0000-0000-000000000000')
|
return self.model(id=1, hostname=settings.CLUSTER_HOST_ID, uuid=UUID_DEFAULT)
|
||||||
|
|
||||||
node = self.filter(hostname=settings.CLUSTER_HOST_ID)
|
node = self.filter(hostname=settings.CLUSTER_HOST_ID)
|
||||||
if node.exists():
|
if node.exists():
|
||||||
return node[0]
|
return node[0]
|
||||||
raise RuntimeError("No instance found with the current cluster host id")
|
raise RuntimeError("No instance found with the current cluster host id")
|
||||||
|
|
||||||
def register(self, uuid=None, hostname=None, ip_address=None):
|
def register(self, uuid=None, hostname=None, ip_address=None, node_type='hybrid', defaults=None):
|
||||||
if not uuid:
|
|
||||||
uuid = settings.SYSTEM_UUID
|
|
||||||
if not hostname:
|
if not hostname:
|
||||||
hostname = settings.CLUSTER_HOST_ID
|
hostname = settings.CLUSTER_HOST_ID
|
||||||
|
|
||||||
with advisory_lock('instance_registration_%s' % hostname):
|
with advisory_lock('instance_registration_%s' % hostname):
|
||||||
if settings.AWX_AUTO_DEPROVISION_INSTANCES:
|
if settings.AWX_AUTO_DEPROVISION_INSTANCES:
|
||||||
# detect any instances with the same IP address.
|
# detect any instances with the same IP address.
|
||||||
@@ -123,16 +129,44 @@ class InstanceManager(models.Manager):
|
|||||||
other_inst.save(update_fields=['ip_address'])
|
other_inst.save(update_fields=['ip_address'])
|
||||||
logger.warning("IP address {0} conflict detected, ip address unset for host {1}.".format(ip_address, other_hostname))
|
logger.warning("IP address {0} conflict detected, ip address unset for host {1}.".format(ip_address, other_hostname))
|
||||||
|
|
||||||
instance = self.filter(hostname=hostname)
|
# Return existing instance that matches hostname or UUID (default to UUID)
|
||||||
|
if uuid is not None and uuid != UUID_DEFAULT and self.filter(uuid=uuid).exists():
|
||||||
|
instance = self.filter(uuid=uuid)
|
||||||
|
else:
|
||||||
|
# if instance was not retrieved by uuid and hostname was, use the hostname
|
||||||
|
instance = self.filter(hostname=hostname)
|
||||||
|
|
||||||
|
# Return existing instance
|
||||||
if instance.exists():
|
if instance.exists():
|
||||||
instance = instance.get()
|
instance = instance.first() # in the unusual occasion that there is more than one, only get one
|
||||||
|
update_fields = []
|
||||||
|
# if instance was retrieved by uuid and hostname has changed, update hostname
|
||||||
|
if instance.hostname != hostname:
|
||||||
|
logger.warning("passed in hostname {0} is different from the original hostname {1}, updating to {0}".format(hostname, instance.hostname))
|
||||||
|
instance.hostname = hostname
|
||||||
|
update_fields.append('hostname')
|
||||||
|
# if any other fields are to be updated
|
||||||
if instance.ip_address != ip_address:
|
if instance.ip_address != ip_address:
|
||||||
instance.ip_address = ip_address
|
instance.ip_address = ip_address
|
||||||
instance.save(update_fields=['ip_address'])
|
if instance.node_type != node_type:
|
||||||
|
instance.node_type = node_type
|
||||||
|
update_fields.append('node_type')
|
||||||
|
if update_fields:
|
||||||
|
instance.save(update_fields=update_fields)
|
||||||
return (True, instance)
|
return (True, instance)
|
||||||
else:
|
else:
|
||||||
return (False, instance)
|
return (False, instance)
|
||||||
instance = self.create(uuid=uuid, hostname=hostname, ip_address=ip_address, capacity=0)
|
|
||||||
|
# Create new instance, and fill in default values
|
||||||
|
create_defaults = dict(capacity=0)
|
||||||
|
if defaults is not None:
|
||||||
|
create_defaults.update(defaults)
|
||||||
|
uuid_option = {}
|
||||||
|
if uuid is not None:
|
||||||
|
uuid_option = dict(uuid=uuid)
|
||||||
|
if node_type == 'execution' and 'version' not in create_defaults:
|
||||||
|
create_defaults['version'] = RECEPTOR_PENDING
|
||||||
|
instance = self.create(hostname=hostname, ip_address=ip_address, node_type=node_type, **create_defaults, **uuid_option)
|
||||||
return (True, instance)
|
return (True, instance)
|
||||||
|
|
||||||
def get_or_register(self):
|
def get_or_register(self):
|
||||||
@@ -140,9 +174,12 @@ class InstanceManager(models.Manager):
|
|||||||
from awx.main.management.commands.register_queue import RegisterQueue
|
from awx.main.management.commands.register_queue import RegisterQueue
|
||||||
|
|
||||||
pod_ip = os.environ.get('MY_POD_IP')
|
pod_ip = os.environ.get('MY_POD_IP')
|
||||||
registered = self.register(ip_address=pod_ip)
|
if settings.IS_K8S:
|
||||||
is_container_group = settings.IS_K8S
|
registered = self.register(ip_address=pod_ip, node_type='control', uuid=settings.SYSTEM_UUID)
|
||||||
RegisterQueue('tower', 100, 0, [], is_container_group).register()
|
else:
|
||||||
|
registered = self.register(ip_address=pod_ip, uuid=settings.SYSTEM_UUID)
|
||||||
|
RegisterQueue(settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME, 100, 0, [], is_container_group=False).register()
|
||||||
|
RegisterQueue(settings.DEFAULT_EXECUTION_QUEUE_NAME, 100, 0, [], is_container_group=True).register()
|
||||||
return registered
|
return registered
|
||||||
else:
|
else:
|
||||||
return (False, self.me())
|
return (False, self.me())
|
||||||
@@ -151,10 +188,6 @@ class InstanceManager(models.Manager):
|
|||||||
"""Return count of active Tower nodes for licensing."""
|
"""Return count of active Tower nodes for licensing."""
|
||||||
return self.all().count()
|
return self.all().count()
|
||||||
|
|
||||||
def my_role(self):
|
|
||||||
# NOTE: TODO: Likely to repurpose this once standalone ramparts are a thing
|
|
||||||
return "tower"
|
|
||||||
|
|
||||||
|
|
||||||
class InstanceGroupManager(models.Manager):
|
class InstanceGroupManager(models.Manager):
|
||||||
"""A custom manager class for the Instance model.
|
"""A custom manager class for the Instance model.
|
||||||
@@ -188,6 +221,8 @@ class InstanceGroupManager(models.Manager):
|
|||||||
if name not in graph:
|
if name not in graph:
|
||||||
graph[name] = {}
|
graph[name] = {}
|
||||||
graph[name]['consumed_capacity'] = 0
|
graph[name]['consumed_capacity'] = 0
|
||||||
|
for capacity_type in ('execution', 'control'):
|
||||||
|
graph[name][f'consumed_{capacity_type}_capacity'] = 0
|
||||||
if breakdown:
|
if breakdown:
|
||||||
graph[name]['committed_capacity'] = 0
|
graph[name]['committed_capacity'] = 0
|
||||||
graph[name]['running_capacity'] = 0
|
graph[name]['running_capacity'] = 0
|
||||||
@@ -223,6 +258,8 @@ class InstanceGroupManager(models.Manager):
|
|||||||
if group_name not in graph:
|
if group_name not in graph:
|
||||||
self.zero_out_group(graph, group_name, breakdown)
|
self.zero_out_group(graph, group_name, breakdown)
|
||||||
graph[group_name]['consumed_capacity'] += impact
|
graph[group_name]['consumed_capacity'] += impact
|
||||||
|
capacity_type = get_capacity_type(t)
|
||||||
|
graph[group_name][f'consumed_{capacity_type}_capacity'] += impact
|
||||||
if breakdown:
|
if breakdown:
|
||||||
graph[group_name]['committed_capacity'] += impact
|
graph[group_name]['committed_capacity'] += impact
|
||||||
elif t.status == 'running':
|
elif t.status == 'running':
|
||||||
@@ -240,6 +277,8 @@ class InstanceGroupManager(models.Manager):
|
|||||||
if group_name not in graph:
|
if group_name not in graph:
|
||||||
self.zero_out_group(graph, group_name, breakdown)
|
self.zero_out_group(graph, group_name, breakdown)
|
||||||
graph[group_name]['consumed_capacity'] += impact
|
graph[group_name]['consumed_capacity'] += impact
|
||||||
|
capacity_type = get_capacity_type(t)
|
||||||
|
graph[group_name][f'consumed_{capacity_type}_capacity'] += impact
|
||||||
if breakdown:
|
if breakdown:
|
||||||
graph[group_name]['running_capacity'] += impact
|
graph[group_name]['running_capacity'] += impact
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ class TimingMiddleware(threading.local, MiddlewareMixin):
|
|||||||
response['X-API-Total-Time'] = '%0.3fs' % total_time
|
response['X-API-Total-Time'] = '%0.3fs' % total_time
|
||||||
if settings.AWX_REQUEST_PROFILE:
|
if settings.AWX_REQUEST_PROFILE:
|
||||||
response['X-API-Profile-File'] = self.prof.stop()
|
response['X-API-Profile-File'] = self.prof.stop()
|
||||||
perf_logger.info(
|
perf_logger.debug(
|
||||||
f'request: {request}, response_time: {response["X-API-Total-Time"]}',
|
f'request: {request}, response_time: {response["X-API-Total-Time"]}',
|
||||||
extra=dict(python_objects=dict(request=request, response=response, X_API_TOTAL_TIME=response["X-API-Total-Time"])),
|
extra=dict(python_objects=dict(request=request, response=response, X_API_TOTAL_TIME=response["X-API-Total-Time"])),
|
||||||
)
|
)
|
||||||
@@ -197,4 +197,4 @@ class MigrationRanCheckMiddleware(MiddlewareMixin):
|
|||||||
executor = MigrationExecutor(connection)
|
executor = MigrationExecutor(connection)
|
||||||
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
|
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
|
||||||
if bool(plan) and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
if bool(plan) and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
||||||
return redirect(reverse("ui_next:migrations_notran"))
|
return redirect(reverse("ui:migrations_notran"))
|
||||||
|
|||||||
@@ -5,10 +5,6 @@ from __future__ import unicode_literals
|
|||||||
# Django
|
# Django
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
# AWX
|
|
||||||
from awx.main.migrations import _migration_utils as migration_utils
|
|
||||||
from awx.main.migrations._reencrypt import blank_old_start_args
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
@@ -17,6 +13,8 @@ class Migration(migrations.Migration):
|
|||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations, migrations.RunPython.noop),
|
# This list is intentionally empty.
|
||||||
migrations.RunPython(blank_old_start_args, migrations.RunPython.noop),
|
# Tower 3.3 included several data migrations that are no longer
|
||||||
|
# necessary (this list is now empty because Tower 3.3 is past EOL and
|
||||||
|
# cannot be directly upgraded to modern versions)
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ from awx.main.utils.common import set_current_apps
|
|||||||
|
|
||||||
def migrate_to_static_inputs(apps, schema_editor):
|
def migrate_to_static_inputs(apps, schema_editor):
|
||||||
set_current_apps(apps)
|
set_current_apps(apps)
|
||||||
CredentialType.setup_tower_managed_defaults()
|
CredentialType.setup_tower_managed_defaults(apps)
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ from awx.main.utils.common import set_current_apps
|
|||||||
|
|
||||||
def setup_tower_managed_defaults(apps, schema_editor):
|
def setup_tower_managed_defaults(apps, schema_editor):
|
||||||
set_current_apps(apps)
|
set_current_apps(apps)
|
||||||
CredentialType.setup_tower_managed_defaults()
|
CredentialType.setup_tower_managed_defaults(apps)
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from awx.main.utils.common import set_current_apps
|
|||||||
|
|
||||||
def setup_tower_managed_defaults(apps, schema_editor):
|
def setup_tower_managed_defaults(apps, schema_editor):
|
||||||
set_current_apps(apps)
|
set_current_apps(apps)
|
||||||
CredentialType.setup_tower_managed_defaults()
|
CredentialType.setup_tower_managed_defaults(apps)
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from awx.main.utils.common import set_current_apps
|
|||||||
|
|
||||||
def create_new_credential_types(apps, schema_editor):
|
def create_new_credential_types(apps, schema_editor):
|
||||||
set_current_apps(apps)
|
set_current_apps(apps)
|
||||||
CredentialType.setup_tower_managed_defaults()
|
CredentialType.setup_tower_managed_defaults(apps)
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from awx.main.models import CredentialType
|
|||||||
|
|
||||||
|
|
||||||
def update_cyberark_aim_name(apps, schema_editor):
|
def update_cyberark_aim_name(apps, schema_editor):
|
||||||
CredentialType.setup_tower_managed_defaults()
|
CredentialType.setup_tower_managed_defaults(apps)
|
||||||
aim_types = apps.get_model('main', 'CredentialType').objects.filter(namespace='aim').order_by('id')
|
aim_types = apps.get_model('main', 'CredentialType').objects.filter(namespace='aim').order_by('id')
|
||||||
|
|
||||||
if aim_types.count() == 2:
|
if aim_types.count() == 2:
|
||||||
|
|||||||
@@ -10,15 +10,6 @@ def migrate_event_data(apps, schema_editor):
|
|||||||
# that have a bigint primary key (because the old usage of an integer
|
# that have a bigint primary key (because the old usage of an integer
|
||||||
# numeric isn't enough, as its range is about 2.1B, see:
|
# numeric isn't enough, as its range is about 2.1B, see:
|
||||||
# https://www.postgresql.org/docs/9.1/datatype-numeric.html)
|
# https://www.postgresql.org/docs/9.1/datatype-numeric.html)
|
||||||
|
|
||||||
# unfortunately, we can't do this with a simple ALTER TABLE, because
|
|
||||||
# for tables with hundreds of millions or billions of rows, the ALTER TABLE
|
|
||||||
# can take *hours* on modest hardware.
|
|
||||||
#
|
|
||||||
# the approach in this migration means that post-migration, event data will
|
|
||||||
# *not* immediately show up, but will be repopulated over time progressively
|
|
||||||
# the trade-off here is not having to wait hours for the full data migration
|
|
||||||
# before you can start and run AWX again (including new playbook runs)
|
|
||||||
for tblname in ('main_jobevent', 'main_inventoryupdateevent', 'main_projectupdateevent', 'main_adhoccommandevent', 'main_systemjobevent'):
|
for tblname in ('main_jobevent', 'main_inventoryupdateevent', 'main_projectupdateevent', 'main_adhoccommandevent', 'main_systemjobevent'):
|
||||||
with connection.cursor() as cursor:
|
with connection.cursor() as cursor:
|
||||||
# rename the current event table
|
# rename the current event table
|
||||||
@@ -35,30 +26,7 @@ def migrate_event_data(apps, schema_editor):
|
|||||||
cursor.execute(f'CREATE SEQUENCE "{tblname}_id_seq";')
|
cursor.execute(f'CREATE SEQUENCE "{tblname}_id_seq";')
|
||||||
cursor.execute(f'ALTER TABLE "{tblname}" ALTER COLUMN "id" ' f"SET DEFAULT nextval('{tblname}_id_seq');")
|
cursor.execute(f'ALTER TABLE "{tblname}" ALTER COLUMN "id" ' f"SET DEFAULT nextval('{tblname}_id_seq');")
|
||||||
cursor.execute(f"SELECT setval('{tblname}_id_seq', (SELECT MAX(id) FROM _old_{tblname}), true);")
|
cursor.execute(f"SELECT setval('{tblname}_id_seq', (SELECT MAX(id) FROM _old_{tblname}), true);")
|
||||||
|
cursor.execute(f'DROP TABLE _old_{tblname};')
|
||||||
# replace the BTREE index on main_jobevent.job_id with
|
|
||||||
# a BRIN index to drastically improve per-UJ lookup performance
|
|
||||||
# see: https://info.crunchydata.com/blog/postgresql-brin-indexes-big-data-performance-with-minimal-storage
|
|
||||||
if tblname == 'main_jobevent':
|
|
||||||
cursor.execute("SELECT indexname FROM pg_indexes WHERE tablename='main_jobevent' AND indexdef LIKE '%USING btree (job_id)';")
|
|
||||||
old_index = cursor.fetchone()[0]
|
|
||||||
cursor.execute(f'DROP INDEX {old_index}')
|
|
||||||
cursor.execute('CREATE INDEX main_jobevent_job_id_brin_idx ON main_jobevent USING brin (job_id);')
|
|
||||||
|
|
||||||
# remove all of the indexes and constraints from the old table
|
|
||||||
# (they just slow down the data migration)
|
|
||||||
cursor.execute(f"SELECT indexname, indexdef FROM pg_indexes WHERE tablename='_old_{tblname}' AND indexname != '{tblname}_pkey';")
|
|
||||||
indexes = cursor.fetchall()
|
|
||||||
|
|
||||||
cursor.execute(
|
|
||||||
f"SELECT conname, contype, pg_catalog.pg_get_constraintdef(r.oid, true) as condef FROM pg_catalog.pg_constraint r WHERE r.conrelid = '_old_{tblname}'::regclass AND conname != '{tblname}_pkey';"
|
|
||||||
)
|
|
||||||
constraints = cursor.fetchall()
|
|
||||||
|
|
||||||
for indexname, indexdef in indexes:
|
|
||||||
cursor.execute(f'DROP INDEX IF EXISTS {indexname}')
|
|
||||||
for conname, contype, condef in constraints:
|
|
||||||
cursor.execute(f'ALTER TABLE _old_{tblname} DROP CONSTRAINT IF EXISTS {conname}')
|
|
||||||
|
|
||||||
|
|
||||||
class FakeAlterField(migrations.AlterField):
|
class FakeAlterField(migrations.AlterField):
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from awx.main.utils.common import set_current_apps
|
|||||||
|
|
||||||
def setup_tower_managed_defaults(apps, schema_editor):
|
def setup_tower_managed_defaults(apps, schema_editor):
|
||||||
set_current_apps(apps)
|
set_current_apps(apps)
|
||||||
CredentialType.setup_tower_managed_defaults()
|
CredentialType.setup_tower_managed_defaults(apps)
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -1,15 +1,30 @@
|
|||||||
# Generated by Django 2.2.16 on 2021-04-21 15:02
|
# Generated by Django 2.2.16 on 2021-04-21 15:02
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models, transaction
|
||||||
|
|
||||||
|
|
||||||
|
def remove_iso_instances(apps, schema_editor):
|
||||||
|
Instance = apps.get_model('main', 'Instance')
|
||||||
|
with transaction.atomic():
|
||||||
|
Instance.objects.filter(rampart_groups__controller__isnull=False).delete()
|
||||||
|
|
||||||
|
|
||||||
|
def remove_iso_groups(apps, schema_editor):
|
||||||
|
InstanceGroup = apps.get_model('main', 'InstanceGroup')
|
||||||
|
with transaction.atomic():
|
||||||
|
InstanceGroup.objects.filter(controller__isnull=False).delete()
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
atomic = False
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('main', '0138_custom_inventory_scripts_removal'),
|
('main', '0138_custom_inventory_scripts_removal'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
|
migrations.RunPython(remove_iso_instances),
|
||||||
|
migrations.RunPython(remove_iso_groups),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name='instance',
|
model_name='instance',
|
||||||
name='last_isolated_check',
|
name='last_isolated_check',
|
||||||
|
|||||||
268
awx/main/migrations/0144_event_partitions.py
Normal file
268
awx/main/migrations/0144_event_partitions.py
Normal file
@@ -0,0 +1,268 @@
|
|||||||
|
from django.db import migrations, models, connection
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_event_data(apps, schema_editor):
|
||||||
|
# see: https://github.com/ansible/awx/issues/9039
|
||||||
|
#
|
||||||
|
# the goal of this function is -- for each job event table -- to:
|
||||||
|
# - create a parent partition table
|
||||||
|
# - .. with a single partition
|
||||||
|
# - .. that includes all existing job events
|
||||||
|
#
|
||||||
|
# the new main_jobevent_parent table should have a new
|
||||||
|
# denormalized column, job_created, this is used as a
|
||||||
|
# basis for partitioning job event rows
|
||||||
|
#
|
||||||
|
# The initial partion will be a unique case. After
|
||||||
|
# the migration is completed, awx should create
|
||||||
|
# new partitions on an hourly basis, as needed.
|
||||||
|
# All events for a given job should be placed in
|
||||||
|
# a partition based on the job's _created time_.
|
||||||
|
|
||||||
|
for tblname in ('main_jobevent', 'main_inventoryupdateevent', 'main_projectupdateevent', 'main_adhoccommandevent', 'main_systemjobevent'):
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
# mark existing table as _unpartitioned_*
|
||||||
|
# we will drop this table after its data
|
||||||
|
# has been moved over
|
||||||
|
cursor.execute(f'ALTER TABLE {tblname} RENAME TO _unpartitioned_{tblname}')
|
||||||
|
|
||||||
|
# create a copy of the table that we will use as a reference for schema
|
||||||
|
# otherwise, the schema changes we would make on the old jobevents table
|
||||||
|
# (namely, dropping the primary key constraint) would cause the migration
|
||||||
|
# to suffer a serious performance degradation
|
||||||
|
cursor.execute(f'CREATE TABLE tmp_{tblname} ' f'(LIKE _unpartitioned_{tblname} INCLUDING ALL)')
|
||||||
|
|
||||||
|
# drop primary key constraint; in a partioned table
|
||||||
|
# constraints must include the partition key itself
|
||||||
|
# TODO: do more generic search for pkey constraints
|
||||||
|
# instead of hardcoding this one that applies to main_jobevent
|
||||||
|
cursor.execute(f'ALTER TABLE tmp_{tblname} DROP CONSTRAINT tmp_{tblname}_pkey')
|
||||||
|
|
||||||
|
# create parent table
|
||||||
|
cursor.execute(
|
||||||
|
f'CREATE TABLE {tblname} '
|
||||||
|
f'(LIKE tmp_{tblname} INCLUDING ALL, job_created TIMESTAMP WITH TIME ZONE NOT NULL) '
|
||||||
|
f'PARTITION BY RANGE(job_created);'
|
||||||
|
)
|
||||||
|
|
||||||
|
cursor.execute(f'DROP TABLE tmp_{tblname}')
|
||||||
|
|
||||||
|
# recreate primary key constraint
|
||||||
|
cursor.execute(f'ALTER TABLE ONLY {tblname} ' f'ADD CONSTRAINT {tblname}_pkey_new PRIMARY KEY (id, job_created);')
|
||||||
|
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
"""
|
||||||
|
Big int migration introduced the brin index main_jobevent_job_id_brin_idx index. For upgardes, we drop the index, new installs do nothing.
|
||||||
|
I have seen the second index in my dev environment. I can not find where in the code it was created. Drop it just in case
|
||||||
|
"""
|
||||||
|
cursor.execute('DROP INDEX IF EXISTS main_jobevent_job_id_brin_idx')
|
||||||
|
cursor.execute('DROP INDEX IF EXISTS main_jobevent_job_id_idx')
|
||||||
|
|
||||||
|
|
||||||
|
class FakeAddField(migrations.AddField):
|
||||||
|
def database_forwards(self, *args):
|
||||||
|
# this is intentionally left blank, because we're
|
||||||
|
# going to accomplish the migration with some custom raw SQL
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0143_hostmetric'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(migrate_event_data),
|
||||||
|
FakeAddField(
|
||||||
|
model_name='jobevent',
|
||||||
|
name='job_created',
|
||||||
|
field=models.DateTimeField(null=True, editable=False),
|
||||||
|
),
|
||||||
|
FakeAddField(
|
||||||
|
model_name='inventoryupdateevent',
|
||||||
|
name='job_created',
|
||||||
|
field=models.DateTimeField(null=True, editable=False),
|
||||||
|
),
|
||||||
|
FakeAddField(
|
||||||
|
model_name='projectupdateevent',
|
||||||
|
name='job_created',
|
||||||
|
field=models.DateTimeField(null=True, editable=False),
|
||||||
|
),
|
||||||
|
FakeAddField(
|
||||||
|
model_name='adhoccommandevent',
|
||||||
|
name='job_created',
|
||||||
|
field=models.DateTimeField(null=True, editable=False),
|
||||||
|
),
|
||||||
|
FakeAddField(
|
||||||
|
model_name='systemjobevent',
|
||||||
|
name='job_created',
|
||||||
|
field=models.DateTimeField(null=True, editable=False),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='UnpartitionedAdHocCommandEvent',
|
||||||
|
fields=[],
|
||||||
|
options={
|
||||||
|
'proxy': True,
|
||||||
|
'indexes': [],
|
||||||
|
'constraints': [],
|
||||||
|
},
|
||||||
|
bases=('main.adhoccommandevent',),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='UnpartitionedInventoryUpdateEvent',
|
||||||
|
fields=[],
|
||||||
|
options={
|
||||||
|
'proxy': True,
|
||||||
|
'indexes': [],
|
||||||
|
'constraints': [],
|
||||||
|
},
|
||||||
|
bases=('main.inventoryupdateevent',),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='UnpartitionedJobEvent',
|
||||||
|
fields=[],
|
||||||
|
options={
|
||||||
|
'proxy': True,
|
||||||
|
'indexes': [],
|
||||||
|
'constraints': [],
|
||||||
|
},
|
||||||
|
bases=('main.jobevent',),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='UnpartitionedProjectUpdateEvent',
|
||||||
|
fields=[],
|
||||||
|
options={
|
||||||
|
'proxy': True,
|
||||||
|
'indexes': [],
|
||||||
|
'constraints': [],
|
||||||
|
},
|
||||||
|
bases=('main.projectupdateevent',),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='UnpartitionedSystemJobEvent',
|
||||||
|
fields=[],
|
||||||
|
options={
|
||||||
|
'proxy': True,
|
||||||
|
'indexes': [],
|
||||||
|
'constraints': [],
|
||||||
|
},
|
||||||
|
bases=('main.systemjobevent',),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='adhoccommandevent',
|
||||||
|
name='ad_hoc_command',
|
||||||
|
field=models.ForeignKey(
|
||||||
|
db_index=False, editable=False, on_delete=models.deletion.DO_NOTHING, related_name='ad_hoc_command_events', to='main.AdHocCommand'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='adhoccommandevent',
|
||||||
|
name='created',
|
||||||
|
field=models.DateTimeField(default=None, editable=False, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='adhoccommandevent',
|
||||||
|
name='modified',
|
||||||
|
field=models.DateTimeField(db_index=True, default=None, editable=False),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='inventoryupdateevent',
|
||||||
|
name='created',
|
||||||
|
field=models.DateTimeField(default=None, editable=False, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='inventoryupdateevent',
|
||||||
|
name='inventory_update',
|
||||||
|
field=models.ForeignKey(
|
||||||
|
db_index=False, editable=False, on_delete=models.deletion.DO_NOTHING, related_name='inventory_update_events', to='main.InventoryUpdate'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='inventoryupdateevent',
|
||||||
|
name='modified',
|
||||||
|
field=models.DateTimeField(db_index=True, default=None, editable=False),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='jobevent',
|
||||||
|
name='created',
|
||||||
|
field=models.DateTimeField(default=None, editable=False, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='jobevent',
|
||||||
|
name='job',
|
||||||
|
field=models.ForeignKey(db_index=False, editable=False, null=True, on_delete=models.deletion.DO_NOTHING, related_name='job_events', to='main.Job'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='jobevent',
|
||||||
|
name='modified',
|
||||||
|
field=models.DateTimeField(db_index=True, default=None, editable=False),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='projectupdateevent',
|
||||||
|
name='created',
|
||||||
|
field=models.DateTimeField(default=None, editable=False, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='projectupdateevent',
|
||||||
|
name='modified',
|
||||||
|
field=models.DateTimeField(db_index=True, default=None, editable=False),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='projectupdateevent',
|
||||||
|
name='project_update',
|
||||||
|
field=models.ForeignKey(
|
||||||
|
db_index=False, editable=False, on_delete=models.deletion.DO_NOTHING, related_name='project_update_events', to='main.ProjectUpdate'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='systemjobevent',
|
||||||
|
name='created',
|
||||||
|
field=models.DateTimeField(default=None, editable=False, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='systemjobevent',
|
||||||
|
name='modified',
|
||||||
|
field=models.DateTimeField(db_index=True, default=None, editable=False),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='systemjobevent',
|
||||||
|
name='system_job',
|
||||||
|
field=models.ForeignKey(
|
||||||
|
db_index=False, editable=False, on_delete=models.deletion.DO_NOTHING, related_name='system_job_events', to='main.SystemJob'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterIndexTogether(
|
||||||
|
name='adhoccommandevent',
|
||||||
|
index_together={
|
||||||
|
('ad_hoc_command', 'job_created', 'event'),
|
||||||
|
('ad_hoc_command', 'job_created', 'counter'),
|
||||||
|
('ad_hoc_command', 'job_created', 'uuid'),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterIndexTogether(
|
||||||
|
name='inventoryupdateevent',
|
||||||
|
index_together={('inventory_update', 'job_created', 'counter'), ('inventory_update', 'job_created', 'uuid')},
|
||||||
|
),
|
||||||
|
migrations.AlterIndexTogether(
|
||||||
|
name='jobevent',
|
||||||
|
index_together={
|
||||||
|
('job', 'job_created', 'counter'),
|
||||||
|
('job', 'job_created', 'uuid'),
|
||||||
|
('job', 'job_created', 'event'),
|
||||||
|
('job', 'job_created', 'parent_uuid'),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterIndexTogether(
|
||||||
|
name='projectupdateevent',
|
||||||
|
index_together={
|
||||||
|
('project_update', 'job_created', 'uuid'),
|
||||||
|
('project_update', 'job_created', 'event'),
|
||||||
|
('project_update', 'job_created', 'counter'),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterIndexTogether(
|
||||||
|
name='systemjobevent',
|
||||||
|
index_together={('system_job', 'job_created', 'uuid'), ('system_job', 'job_created', 'counter')},
|
||||||
|
),
|
||||||
|
]
|
||||||
21
awx/main/migrations/0145_deregister_managed_ee_objs.py
Normal file
21
awx/main/migrations/0145_deregister_managed_ee_objs.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# Generated by Django 2.2.16 on 2021-06-07 19:36
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
def forwards(apps, schema_editor):
|
||||||
|
ExecutionEnvironment = apps.get_model('main', 'ExecutionEnvironment')
|
||||||
|
for row in ExecutionEnvironment.objects.filter(managed_by_tower=True):
|
||||||
|
row.managed_by_tower = False
|
||||||
|
row.save(update_fields=['managed_by_tower'])
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0144_event_partitions'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(forwards),
|
||||||
|
]
|
||||||
59
awx/main/migrations/0146_add_insights_inventory.py
Normal file
59
awx/main/migrations/0146_add_insights_inventory.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
# Generated by Django 2.2.16 on 2021-06-08 18:59
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0145_deregister_managed_ee_objs'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='host',
|
||||||
|
name='insights_system_id',
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='inventorysource',
|
||||||
|
name='source',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
('file', 'File, Directory or Script'),
|
||||||
|
('scm', 'Sourced from a Project'),
|
||||||
|
('ec2', 'Amazon EC2'),
|
||||||
|
('gce', 'Google Compute Engine'),
|
||||||
|
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||||
|
('vmware', 'VMware vCenter'),
|
||||||
|
('satellite6', 'Red Hat Satellite 6'),
|
||||||
|
('openstack', 'OpenStack'),
|
||||||
|
('rhv', 'Red Hat Virtualization'),
|
||||||
|
('tower', 'Ansible Tower'),
|
||||||
|
('insights', 'Red Hat Insights'),
|
||||||
|
],
|
||||||
|
default=None,
|
||||||
|
max_length=32,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='inventoryupdate',
|
||||||
|
name='source',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
('file', 'File, Directory or Script'),
|
||||||
|
('scm', 'Sourced from a Project'),
|
||||||
|
('ec2', 'Amazon EC2'),
|
||||||
|
('gce', 'Google Compute Engine'),
|
||||||
|
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||||
|
('vmware', 'VMware vCenter'),
|
||||||
|
('satellite6', 'Red Hat Satellite 6'),
|
||||||
|
('openstack', 'OpenStack'),
|
||||||
|
('rhv', 'Red Hat Virtualization'),
|
||||||
|
('tower', 'Ansible Tower'),
|
||||||
|
('insights', 'Red Hat Insights'),
|
||||||
|
],
|
||||||
|
default=None,
|
||||||
|
max_length=32,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
24
awx/main/migrations/0147_validate_ee_image_field.py
Normal file
24
awx/main/migrations/0147_validate_ee_image_field.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# Generated by Django 2.2.16 on 2021-06-15 02:49
|
||||||
|
|
||||||
|
import awx.main.validators
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0146_add_insights_inventory'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='executionenvironment',
|
||||||
|
name='image',
|
||||||
|
field=models.CharField(
|
||||||
|
help_text='The full image location, including the container registry, image name, and version tag.',
|
||||||
|
max_length=1024,
|
||||||
|
validators=[awx.main.validators.validate_container_image_name],
|
||||||
|
verbose_name='image location',
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
20
awx/main/migrations/0148_unifiedjob_receptor_unit_id.py
Normal file
20
awx/main/migrations/0148_unifiedjob_receptor_unit_id.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Generated by Django 2.2.16 on 2021-06-11 04:50
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0147_validate_ee_image_field'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='unifiedjob',
|
||||||
|
name='work_unit_id',
|
||||||
|
field=models.CharField(
|
||||||
|
blank=True, default=None, editable=False, help_text='The Receptor work unit ID associated with this job.', max_length=255, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,17 @@
|
|||||||
|
# Generated by Django 2.2.16 on 2021-06-16 21:00
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0148_unifiedjob_receptor_unit_id'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='inventory',
|
||||||
|
name='insights_credential',
|
||||||
|
),
|
||||||
|
]
|
||||||
113
awx/main/migrations/0150_rename_inv_sources_inv_updates.py
Normal file
113
awx/main/migrations/0150_rename_inv_sources_inv_updates.py
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
# Generated by Django 2.2.16 on 2021-06-17 13:12
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
from awx.main.models.credential import ManagedCredentialType, CredentialType as ModernCredentialType
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def forwards(apps, schema_editor):
|
||||||
|
InventoryUpdate = apps.get_model('main', 'InventoryUpdate')
|
||||||
|
InventorySource = apps.get_model('main', 'InventorySource')
|
||||||
|
|
||||||
|
r = InventoryUpdate.objects.filter(source='tower').update(source='controller')
|
||||||
|
if r:
|
||||||
|
logger.warn(f'Renamed {r} tower inventory updates to controller')
|
||||||
|
InventorySource.objects.filter(source='tower').update(source='controller')
|
||||||
|
if r:
|
||||||
|
logger.warn(f'Renamed {r} tower inventory sources to controller')
|
||||||
|
|
||||||
|
CredentialType = apps.get_model('main', 'CredentialType')
|
||||||
|
|
||||||
|
tower_type = CredentialType.objects.filter(managed_by_tower=True, namespace='tower').first()
|
||||||
|
if tower_type is not None:
|
||||||
|
controller_type = CredentialType.objects.filter(managed_by_tower=True, namespace='controller', kind='cloud').first()
|
||||||
|
if controller_type:
|
||||||
|
# this gets created by prior migrations in upgrade scenarios
|
||||||
|
controller_type.delete()
|
||||||
|
|
||||||
|
registry_type = ManagedCredentialType.registry.get('controller')
|
||||||
|
if not registry_type:
|
||||||
|
raise RuntimeError('Excpected to find controller credential, this may need to be edited in the future!')
|
||||||
|
logger.warn('Renaming the Ansible Tower credential type for existing install')
|
||||||
|
tower_type.name = registry_type.name # sensitive to translations
|
||||||
|
tower_type.namespace = 'controller' # if not done, will error setup_tower_managed_defaults
|
||||||
|
tower_type.save(update_fields=['name', 'namespace'])
|
||||||
|
|
||||||
|
ModernCredentialType.setup_tower_managed_defaults(apps)
|
||||||
|
|
||||||
|
|
||||||
|
def backwards(apps, schema_editor):
|
||||||
|
InventoryUpdate = apps.get_model('main', 'InventoryUpdate')
|
||||||
|
InventorySource = apps.get_model('main', 'InventorySource')
|
||||||
|
|
||||||
|
r = InventoryUpdate.objects.filter(source='controller').update(source='tower')
|
||||||
|
if r:
|
||||||
|
logger.warn(f'Renamed {r} controller inventory updates to tower')
|
||||||
|
r = InventorySource.objects.filter(source='controller').update(source='tower')
|
||||||
|
if r:
|
||||||
|
logger.warn(f'Renamed {r} controller inventory sources to tower')
|
||||||
|
|
||||||
|
CredentialType = apps.get_model('main', 'CredentialType')
|
||||||
|
|
||||||
|
tower_type = CredentialType.objects.filter(managed_by_tower=True, namespace='controller', kind='cloud').first()
|
||||||
|
if tower_type is not None and not CredentialType.objects.filter(managed_by_tower=True, namespace='tower').exists():
|
||||||
|
logger.info('Renaming the controller credential type back')
|
||||||
|
tower_type.namespace = 'tower'
|
||||||
|
tower_type.name = 'Ansible Tower'
|
||||||
|
tower_type.save(update_fields=['namespace', 'name'])
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
('main', '0149_remove_inventory_insights_credential'),
|
||||||
|
]
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(migrations.RunPython.noop, backwards),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='inventorysource',
|
||||||
|
name='source',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
('file', 'File, Directory or Script'),
|
||||||
|
('scm', 'Sourced from a Project'),
|
||||||
|
('ec2', 'Amazon EC2'),
|
||||||
|
('gce', 'Google Compute Engine'),
|
||||||
|
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||||
|
('vmware', 'VMware vCenter'),
|
||||||
|
('satellite6', 'Red Hat Satellite 6'),
|
||||||
|
('openstack', 'OpenStack'),
|
||||||
|
('rhv', 'Red Hat Virtualization'),
|
||||||
|
('controller', 'Red Hat Ansible Automation Platform'),
|
||||||
|
('insights', 'Red Hat Insights'),
|
||||||
|
],
|
||||||
|
default=None,
|
||||||
|
max_length=32,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='inventoryupdate',
|
||||||
|
name='source',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
('file', 'File, Directory or Script'),
|
||||||
|
('scm', 'Sourced from a Project'),
|
||||||
|
('ec2', 'Amazon EC2'),
|
||||||
|
('gce', 'Google Compute Engine'),
|
||||||
|
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||||
|
('vmware', 'VMware vCenter'),
|
||||||
|
('satellite6', 'Red Hat Satellite 6'),
|
||||||
|
('openstack', 'OpenStack'),
|
||||||
|
('rhv', 'Red Hat Virtualization'),
|
||||||
|
('controller', 'Red Hat Ansible Automation Platform'),
|
||||||
|
('insights', 'Red Hat Insights'),
|
||||||
|
],
|
||||||
|
default=None,
|
||||||
|
max_length=32,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.RunPython(forwards, migrations.RunPython.noop),
|
||||||
|
]
|
||||||
28
awx/main/migrations/0151_rename_managed_by_tower.py
Normal file
28
awx/main/migrations/0151_rename_managed_by_tower.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# Generated by Django 2.2.16 on 2021-06-17 18:32
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0150_rename_inv_sources_inv_updates'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='credential',
|
||||||
|
old_name='managed_by_tower',
|
||||||
|
new_name='managed',
|
||||||
|
),
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='credentialtype',
|
||||||
|
old_name='managed_by_tower',
|
||||||
|
new_name='managed',
|
||||||
|
),
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='executionenvironment',
|
||||||
|
old_name='managed_by_tower',
|
||||||
|
new_name='managed',
|
||||||
|
),
|
||||||
|
]
|
||||||
22
awx/main/migrations/0152_instance_node_type.py
Normal file
22
awx/main/migrations/0152_instance_node_type.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Generated by Django 2.2.20 on 2021-07-26 19:42
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0151_rename_managed_by_tower'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='instance',
|
||||||
|
name='node_type',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[('control', 'Control plane node'), ('execution', 'Execution plane node'), ('hybrid', 'Controller and execution')],
|
||||||
|
default='hybrid',
|
||||||
|
max_length=16,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
27
awx/main/migrations/0153_instance_last_seen.py
Normal file
27
awx/main/migrations/0153_instance_last_seen.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# Generated by Django 2.2.20 on 2021-08-12 13:55
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0152_instance_node_type'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='instance',
|
||||||
|
name='last_seen',
|
||||||
|
field=models.DateTimeField(
|
||||||
|
editable=False,
|
||||||
|
help_text='Last time instance ran its heartbeat task for main cluster nodes. Last known connection to receptor mesh for execution nodes.',
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='instance',
|
||||||
|
name='memory',
|
||||||
|
field=models.BigIntegerField(default=0, editable=False, help_text='Total system memory of this instance in bytes.'),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
awx/main/migrations/0154_set_default_uuid.py
Normal file
18
awx/main/migrations/0154_set_default_uuid.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 2.2.20 on 2021-09-01 22:53
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0153_instance_last_seen'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='instance',
|
||||||
|
name='uuid',
|
||||||
|
field=models.CharField(default='00000000-0000-0000-0000-000000000000', max_length=40),
|
||||||
|
),
|
||||||
|
]
|
||||||
25
awx/main/migrations/0155_improved_health_check.py
Normal file
25
awx/main/migrations/0155_improved_health_check.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# Generated by Django 2.2.20 on 2021-08-31 17:41
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0154_set_default_uuid'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='instance',
|
||||||
|
name='errors',
|
||||||
|
field=models.TextField(blank=True, default='', editable=False, help_text='Any error details from the last health check.'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='instance',
|
||||||
|
name='last_health_check',
|
||||||
|
field=models.DateTimeField(
|
||||||
|
editable=False, help_text='Last time a health check was ran on this instance to refresh cpu, memory, and capacity.', null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -19,7 +19,7 @@ def migrate_galaxy_settings(apps, schema_editor):
|
|||||||
# nothing to migrate
|
# nothing to migrate
|
||||||
return
|
return
|
||||||
set_current_apps(apps)
|
set_current_apps(apps)
|
||||||
ModernCredentialType.setup_tower_managed_defaults()
|
ModernCredentialType.setup_tower_managed_defaults(apps)
|
||||||
CredentialType = apps.get_model('main', 'CredentialType')
|
CredentialType = apps.get_model('main', 'CredentialType')
|
||||||
Credential = apps.get_model('main', 'Credential')
|
Credential = apps.get_model('main', 'Credential')
|
||||||
Setting = apps.get_model('conf', 'Setting')
|
Setting = apps.get_model('conf', 'Setting')
|
||||||
@@ -34,10 +34,21 @@ def migrate_galaxy_settings(apps, schema_editor):
|
|||||||
if public_galaxy_setting and public_galaxy_setting.value is False:
|
if public_galaxy_setting and public_galaxy_setting.value is False:
|
||||||
# ...UNLESS this behavior was explicitly disabled via this setting
|
# ...UNLESS this behavior was explicitly disabled via this setting
|
||||||
public_galaxy_enabled = False
|
public_galaxy_enabled = False
|
||||||
|
try:
|
||||||
public_galaxy_credential = Credential(
|
# Needed for old migrations
|
||||||
created=now(), modified=now(), name='Ansible Galaxy', managed_by_tower=True, credential_type=galaxy_type, inputs={'url': 'https://galaxy.ansible.com/'}
|
public_galaxy_credential = Credential(
|
||||||
)
|
created=now(),
|
||||||
|
modified=now(),
|
||||||
|
name='Ansible Galaxy',
|
||||||
|
managed_by_tower=True,
|
||||||
|
credential_type=galaxy_type,
|
||||||
|
inputs={'url': 'https://galaxy.ansible.com/'},
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
# Needed for new migrations, tests
|
||||||
|
public_galaxy_credential = Credential(
|
||||||
|
created=now(), modified=now(), name='Ansible Galaxy', managed=True, credential_type=galaxy_type, inputs={'url': 'https://galaxy.ansible.com/'}
|
||||||
|
)
|
||||||
public_galaxy_credential.save()
|
public_galaxy_credential.save()
|
||||||
|
|
||||||
for org in Organization.objects.all():
|
for org in Organization.objects.all():
|
||||||
|
|||||||
@@ -1,26 +0,0 @@
|
|||||||
import logging
|
|
||||||
|
|
||||||
from awx.conf.migrations._reencrypt import (
|
|
||||||
decrypt_field,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.migrations')
|
|
||||||
|
|
||||||
__all__ = []
|
|
||||||
|
|
||||||
|
|
||||||
def blank_old_start_args(apps, schema_editor):
|
|
||||||
UnifiedJob = apps.get_model('main', 'UnifiedJob')
|
|
||||||
for uj in UnifiedJob.objects.defer('result_stdout_text').exclude(start_args='').iterator():
|
|
||||||
if uj.status in ['running', 'pending', 'new', 'waiting']:
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
args_dict = decrypt_field(uj, 'start_args')
|
|
||||||
except ValueError:
|
|
||||||
args_dict = None
|
|
||||||
if args_dict == {}:
|
|
||||||
continue
|
|
||||||
if uj.start_args:
|
|
||||||
logger.debug('Blanking job args for %s', uj.pk)
|
|
||||||
uj.start_args = ''
|
|
||||||
uj.save()
|
|
||||||
@@ -3,7 +3,6 @@
|
|||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.conf import settings # noqa
|
from django.conf import settings # noqa
|
||||||
from django.db import connection
|
|
||||||
from django.db.models.signals import pre_delete # noqa
|
from django.db.models.signals import pre_delete # noqa
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
@@ -36,6 +35,11 @@ from awx.main.models.events import ( # noqa
|
|||||||
JobEvent,
|
JobEvent,
|
||||||
ProjectUpdateEvent,
|
ProjectUpdateEvent,
|
||||||
SystemJobEvent,
|
SystemJobEvent,
|
||||||
|
UnpartitionedAdHocCommandEvent,
|
||||||
|
UnpartitionedInventoryUpdateEvent,
|
||||||
|
UnpartitionedJobEvent,
|
||||||
|
UnpartitionedProjectUpdateEvent,
|
||||||
|
UnpartitionedSystemJobEvent,
|
||||||
)
|
)
|
||||||
from awx.main.models.ad_hoc_commands import AdHocCommand # noqa
|
from awx.main.models.ad_hoc_commands import AdHocCommand # noqa
|
||||||
from awx.main.models.schedules import Schedule # noqa
|
from awx.main.models.schedules import Schedule # noqa
|
||||||
@@ -92,27 +96,6 @@ User.add_to_class('can_access_with_errors', check_user_access_with_errors)
|
|||||||
User.add_to_class('accessible_objects', user_accessible_objects)
|
User.add_to_class('accessible_objects', user_accessible_objects)
|
||||||
|
|
||||||
|
|
||||||
def enforce_bigint_pk_migration():
|
|
||||||
#
|
|
||||||
# NOTE: this function is not actually in use anymore,
|
|
||||||
# but has been intentionally kept for historical purposes,
|
|
||||||
# and to serve as an illustration if we ever need to perform
|
|
||||||
# bulk modification/migration of event data in the future.
|
|
||||||
#
|
|
||||||
# see: https://github.com/ansible/awx/issues/6010
|
|
||||||
# look at all the event tables and verify that they have been fully migrated
|
|
||||||
# from the *old* int primary key table to the replacement bigint table
|
|
||||||
# if not, attempt to migrate them in the background
|
|
||||||
#
|
|
||||||
for tblname in ('main_jobevent', 'main_inventoryupdateevent', 'main_projectupdateevent', 'main_adhoccommandevent', 'main_systemjobevent'):
|
|
||||||
with connection.cursor() as cursor:
|
|
||||||
cursor.execute('SELECT 1 FROM information_schema.tables WHERE table_name=%s', (f'_old_{tblname}',))
|
|
||||||
if bool(cursor.rowcount):
|
|
||||||
from awx.main.tasks import migrate_legacy_event_data
|
|
||||||
|
|
||||||
migrate_legacy_event_data.apply_async([tblname])
|
|
||||||
|
|
||||||
|
|
||||||
def cleanup_created_modified_by(sender, **kwargs):
|
def cleanup_created_modified_by(sender, **kwargs):
|
||||||
# work around a bug in django-polymorphic that doesn't properly
|
# work around a bug in django-polymorphic that doesn't properly
|
||||||
# handle cascades for reverse foreign keys on the polymorphic base model
|
# handle cascades for reverse foreign keys on the polymorphic base model
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ from django.core.exceptions import ValidationError
|
|||||||
# AWX
|
# AWX
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main.models.base import prevent_search, AD_HOC_JOB_TYPE_CHOICES, VERBOSITY_CHOICES, VarsDictProperty
|
from awx.main.models.base import prevent_search, AD_HOC_JOB_TYPE_CHOICES, VERBOSITY_CHOICES, VarsDictProperty
|
||||||
from awx.main.models.events import AdHocCommandEvent
|
from awx.main.models.events import AdHocCommandEvent, UnpartitionedAdHocCommandEvent
|
||||||
from awx.main.models.unified_jobs import UnifiedJob
|
from awx.main.models.unified_jobs import UnifiedJob
|
||||||
from awx.main.models.notifications import JobNotificationMixin, NotificationTemplate
|
from awx.main.models.notifications import JobNotificationMixin, NotificationTemplate
|
||||||
|
|
||||||
@@ -127,6 +127,8 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def event_class(self):
|
def event_class(self):
|
||||||
|
if self.has_unpartitioned_events:
|
||||||
|
return UnpartitionedAdHocCommandEvent
|
||||||
return AdHocCommandEvent
|
return AdHocCommandEvent
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -150,10 +152,6 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
|||||||
def is_container_group_task(self):
|
def is_container_group_task(self):
|
||||||
return bool(self.instance_group and self.instance_group.is_container_group)
|
return bool(self.instance_group and self.instance_group.is_container_group)
|
||||||
|
|
||||||
@property
|
|
||||||
def can_run_containerized(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_absolute_url(self, request=None):
|
def get_absolute_url(self, request=None):
|
||||||
return reverse('api:ad_hoc_command_detail', kwargs={'pk': self.pk}, request=request)
|
return reverse('api:ad_hoc_command_detail', kwargs={'pk': self.pk}, request=request)
|
||||||
|
|
||||||
@@ -215,9 +213,6 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
|||||||
self.name = Truncator(u': '.join(filter(None, (self.module_name, self.module_args)))).chars(512)
|
self.name = Truncator(u': '.join(filter(None, (self.module_name, self.module_args)))).chars(512)
|
||||||
if 'name' not in update_fields:
|
if 'name' not in update_fields:
|
||||||
update_fields.append('name')
|
update_fields.append('name')
|
||||||
if not self.execution_environment_id:
|
|
||||||
self.execution_environment = self.resolve_execution_environment()
|
|
||||||
update_fields.append('execution_environment')
|
|
||||||
super(AdHocCommand, self).save(*args, **kwargs)
|
super(AdHocCommand, self).save(*args, **kwargs)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user