mirror of
https://github.com/ansible/awx.git
synced 2026-02-07 12:34:43 -03:30
Compare commits
934 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aa4ca300f5 | ||
|
|
8d8aadb193 | ||
|
|
3194690e5f | ||
|
|
6cc3ac2e99 | ||
|
|
c4c1b9799e | ||
|
|
9f691a048d | ||
|
|
da68e613e3 | ||
|
|
b9a9d645de | ||
|
|
7032927ba3 | ||
|
|
e62035fa5e | ||
|
|
5fe6e75255 | ||
|
|
83372d6f03 | ||
|
|
7c53bf3681 | ||
|
|
e40e646211 | ||
|
|
b913d8411a | ||
|
|
2017504c51 | ||
|
|
16848e9154 | ||
|
|
3e03c726cb | ||
|
|
a2c2588383 | ||
|
|
67066a3fa5 | ||
|
|
bd76d1a75f | ||
|
|
c77ca928cc | ||
|
|
7d495713ee | ||
|
|
b78fee1f01 | ||
|
|
ee59ac957a | ||
|
|
6b7d712f9f | ||
|
|
00e837c17c | ||
|
|
06ff178f9e | ||
|
|
f07818f04a | ||
|
|
7a3002f218 | ||
|
|
a0ded889f9 | ||
|
|
8d46e78606 | ||
|
|
6b53ea51fc | ||
|
|
ce28968a11 | ||
|
|
c793b3a9c8 | ||
|
|
77cd875a9c | ||
|
|
fd708456df | ||
|
|
2f7a7b453f | ||
|
|
cec5a77762 | ||
|
|
7c57a8e5d0 | ||
|
|
87a2039ded | ||
|
|
90edb3b551 | ||
|
|
f37471c858 | ||
|
|
41a855fa6a | ||
|
|
54e1a802c5 | ||
|
|
8a7c714613 | ||
|
|
0a88d42645 | ||
|
|
93160fa4fd | ||
|
|
f738f52c5c | ||
|
|
ac925a03b5 | ||
|
|
90caea2273 | ||
|
|
a06366ccc9 | ||
|
|
f4fb13492b | ||
|
|
7f78018386 | ||
|
|
aabb55810b | ||
|
|
fb62e0ec2c | ||
|
|
448e49ae43 | ||
|
|
9d53bab050 | ||
|
|
3a467067f3 | ||
|
|
3f759d8ddb | ||
|
|
5959bfc4ae | ||
|
|
7ffbc7ed1e | ||
|
|
74d1859552 | ||
|
|
e4cb50921e | ||
|
|
d1e5dc1eae | ||
|
|
6a7f7a0256 | ||
|
|
bb2248cb24 | ||
|
|
fa6de04e79 | ||
|
|
87604749b7 | ||
|
|
684998cd51 | ||
|
|
2d4a3bc943 | ||
|
|
73c46030bc | ||
|
|
b744c4ebb7 | ||
|
|
328435d435 | ||
|
|
62eae017cf | ||
|
|
dcb1773918 | ||
|
|
a1514a3b26 | ||
|
|
88a0d98447 | ||
|
|
d88ed19edf | ||
|
|
47c1dc8171 | ||
|
|
28733beee8 | ||
|
|
c90a459837 | ||
|
|
ba8cf1aaf2 | ||
|
|
57c3b9ab17 | ||
|
|
b24a1746ae | ||
|
|
2f16b361f7 | ||
|
|
5bb59246af | ||
|
|
766b2f774d | ||
|
|
b8369defd6 | ||
|
|
7daa1fe786 | ||
|
|
74daa49e6f | ||
|
|
c7b51176a4 | ||
|
|
b50e72ab20 | ||
|
|
b3d4b57ae9 | ||
|
|
1c2605be8e | ||
|
|
7662a67a65 | ||
|
|
e2cb6a8caf | ||
|
|
60dee83481 | ||
|
|
d59e172f53 | ||
|
|
b22aa3e99e | ||
|
|
7275db8d66 | ||
|
|
938725c86c | ||
|
|
a3723db357 | ||
|
|
682b06be5a | ||
|
|
aa87a99441 | ||
|
|
a157a3598f | ||
|
|
36466b9694 | ||
|
|
1dec79b62d | ||
|
|
9ec958f839 | ||
|
|
98b25f619e | ||
|
|
908291dd3c | ||
|
|
764511f33f | ||
|
|
b0c3c2b16a | ||
|
|
dcd79cdb98 | ||
|
|
e1fb6542c3 | ||
|
|
9cc3579189 | ||
|
|
c8594edc0b | ||
|
|
a1b0fa252a | ||
|
|
e640bde42c | ||
|
|
d1fcb96ee2 | ||
|
|
b6f032bf88 | ||
|
|
8e104417a6 | ||
|
|
148e4ef10c | ||
|
|
3e8eb7f23e | ||
|
|
0eff06318f | ||
|
|
ad621a7da2 | ||
|
|
96a7fe0035 | ||
|
|
545119fb56 | ||
|
|
f40ee7ca15 | ||
|
|
c6e61395f5 | ||
|
|
d3c51ce75d | ||
|
|
9a16e9f787 | ||
|
|
d79b96b6cc | ||
|
|
b667162496 | ||
|
|
72cab99cd7 | ||
|
|
474252dbff | ||
|
|
45e9bdcf86 | ||
|
|
f52b23f298 | ||
|
|
0b3d9b026d | ||
|
|
1dbadca78e | ||
|
|
13861dee85 | ||
|
|
4ea757c91a | ||
|
|
7e74f823f4 | ||
|
|
e611a67be7 | ||
|
|
a98dfc978e | ||
|
|
642e6f792c | ||
|
|
b857fb5074 | ||
|
|
6f9862c72e | ||
|
|
1033c4d251 | ||
|
|
ab6430e50d | ||
|
|
8ceb505977 | ||
|
|
d9ca825935 | ||
|
|
cb5c16918c | ||
|
|
2a353a809b | ||
|
|
0b364b2918 | ||
|
|
4219089166 | ||
|
|
8db88e979e | ||
|
|
3077cb9802 | ||
|
|
70f7bd957d | ||
|
|
f951aa24bf | ||
|
|
225c3d6a39 | ||
|
|
8f8c4e6b7b | ||
|
|
e4c708f458 | ||
|
|
983d377a93 | ||
|
|
efcc1bf262 | ||
|
|
106d90aeb3 | ||
|
|
b695f583dd | ||
|
|
6d577feeba | ||
|
|
f8edb6b4f6 | ||
|
|
66b9a65a14 | ||
|
|
5632f72bb1 | ||
|
|
4d448510b4 | ||
|
|
f44faf4e61 | ||
|
|
3bd976bda9 | ||
|
|
c8d471466c | ||
|
|
fa1ef87f20 | ||
|
|
f09120a973 | ||
|
|
6223a78ff4 | ||
|
|
d05ffd24f4 | ||
|
|
ee1ed2aaa7 | ||
|
|
a9d7fea86f | ||
|
|
204af9ec91 | ||
|
|
9da636e294 | ||
|
|
6a47899dbb | ||
|
|
a8159273eb | ||
|
|
1816280a15 | ||
|
|
b9d3beaa7f | ||
|
|
cfc4c3a1a7 | ||
|
|
e08e88d940 | ||
|
|
1937c0cc08 | ||
|
|
19f855717d | ||
|
|
82f6799c34 | ||
|
|
d35732c4b7 | ||
|
|
793764283d | ||
|
|
fc8d2300af | ||
|
|
846d2a0cbd | ||
|
|
7706ef415a | ||
|
|
b2341408b9 | ||
|
|
a5e54c3858 | ||
|
|
7c96677510 | ||
|
|
3d03c473d6 | ||
|
|
5cb580be7a | ||
|
|
6900ded80b | ||
|
|
81dd54504e | ||
|
|
1c0ac75782 | ||
|
|
d82f68c88e | ||
|
|
a345675a97 | ||
|
|
d95373f2b7 | ||
|
|
6abc981a5e | ||
|
|
0ac38ef5fc | ||
|
|
ca1e597a4d | ||
|
|
7a3382dd76 | ||
|
|
b7c729c96f | ||
|
|
1be1fad610 | ||
|
|
307c9eafb3 | ||
|
|
7d5b198ce6 | ||
|
|
72c6ff095f | ||
|
|
60751dfa16 | ||
|
|
2545f14a93 | ||
|
|
20231041e6 | ||
|
|
a0afe0bdb7 | ||
|
|
e9a51c0bcc | ||
|
|
1321895e83 | ||
|
|
d58b4807d9 | ||
|
|
dae1f8dc7f | ||
|
|
33c3a6d89b | ||
|
|
a756b4400a | ||
|
|
3f35ea66fb | ||
|
|
4958a428ec | ||
|
|
ad20d6c93f | ||
|
|
77da8c6994 | ||
|
|
a2d5d9e320 | ||
|
|
4033e0f218 | ||
|
|
1a4eab6f25 | ||
|
|
7f89eb324a | ||
|
|
70b90dbb24 | ||
|
|
a1934823ba | ||
|
|
de0967a587 | ||
|
|
3ae6ea9cdc | ||
|
|
44df90686a | ||
|
|
50d3b69629 | ||
|
|
bbcf12b4fc | ||
|
|
f66485ff12 | ||
|
|
a98fe1955a | ||
|
|
d31851820a | ||
|
|
3398452197 | ||
|
|
487bf50544 | ||
|
|
997200dd19 | ||
|
|
aa048049ea | ||
|
|
d7949e3db9 | ||
|
|
838a3822a5 | ||
|
|
5ccee4aea2 | ||
|
|
ae38c11211 | ||
|
|
3b2ff25f3c | ||
|
|
4c36183343 | ||
|
|
d88ba5873b | ||
|
|
8b32b61072 | ||
|
|
14f2803ea7 | ||
|
|
8153d60a5f | ||
|
|
e35f1afd57 | ||
|
|
bfb00aecbe | ||
|
|
06fa2a9e26 | ||
|
|
2cd9a05329 | ||
|
|
058cfc55a6 | ||
|
|
4be9008821 | ||
|
|
e5436a0147 | ||
|
|
6e4c3b9a51 | ||
|
|
13cadbc779 | ||
|
|
9687c09108 | ||
|
|
4376365931 | ||
|
|
6c59111e6c | ||
|
|
d72f8eaf2c | ||
|
|
a0ce1350ec | ||
|
|
fc2a2e538f | ||
|
|
ff78156945 | ||
|
|
32ad6cdea6 | ||
|
|
566913fcec | ||
|
|
d39d4d9a9e | ||
|
|
daeba1a898 | ||
|
|
da3e521566 | ||
|
|
d27afe9691 | ||
|
|
9bf721665d | ||
|
|
ee111be261 | ||
|
|
b3f15a1e61 | ||
|
|
5f6a383ebe | ||
|
|
871b862731 | ||
|
|
851f7b4c7e | ||
|
|
c78a50b44d | ||
|
|
704029459f | ||
|
|
b78cacb4d8 | ||
|
|
4c5757b3bd | ||
|
|
ca2f67e0a9 | ||
|
|
ccd39a60db | ||
|
|
889eb2331c | ||
|
|
8e46166313 | ||
|
|
b81f082a18 | ||
|
|
51b18aa012 | ||
|
|
86d289c375 | ||
|
|
5e51dd2ff7 | ||
|
|
f2c86cc962 | ||
|
|
97837a05e6 | ||
|
|
1a270bfc8b | ||
|
|
15704e55e1 | ||
|
|
b3266f6c62 | ||
|
|
c120b731a4 | ||
|
|
ab61675c2d | ||
|
|
548ebd5999 | ||
|
|
12077627e4 | ||
|
|
3d5f28f790 | ||
|
|
f7a51fe658 | ||
|
|
920eda9999 | ||
|
|
957ab9bf7c | ||
|
|
8788c904c8 | ||
|
|
e85a32d463 | ||
|
|
05ae6df80b | ||
|
|
be08e0ce69 | ||
|
|
3aba1e9db5 | ||
|
|
4992fed5a3 | ||
|
|
8d6a6198dc | ||
|
|
aa7514a993 | ||
|
|
ea8ebe8a9f | ||
|
|
7ff82db691 | ||
|
|
8a8bfc5176 | ||
|
|
14685b9157 | ||
|
|
87e564026e | ||
|
|
8795d860d6 | ||
|
|
d14fa93ce9 | ||
|
|
e7090a6f8a | ||
|
|
b6b87aea76 | ||
|
|
e6d1810844 | ||
|
|
720e8055f8 | ||
|
|
182ff3464e | ||
|
|
973c9d313e | ||
|
|
a89a683eb4 | ||
|
|
52646362c3 | ||
|
|
8a433f30e4 | ||
|
|
496eea9647 | ||
|
|
6ab3d5301c | ||
|
|
d93d0f00ee | ||
|
|
4cc947d65d | ||
|
|
8b4b54d2c4 | ||
|
|
701deb2268 | ||
|
|
85adc4a0ab | ||
|
|
9fc5579a50 | ||
|
|
7faf9c6267 | ||
|
|
8cb9341d8f | ||
|
|
8e024c234c | ||
|
|
a5f676c3e1 | ||
|
|
99f3825826 | ||
|
|
29926ba5d9 | ||
|
|
db9fbf1493 | ||
|
|
590d64f40e | ||
|
|
64fa18cafe | ||
|
|
634df240ed | ||
|
|
44e6e9344b | ||
|
|
4b5b95a0f8 | ||
|
|
f5e1f2ed14 | ||
|
|
c232289323 | ||
|
|
db8c56caf4 | ||
|
|
8e66172ed4 | ||
|
|
62be4defa2 | ||
|
|
cb590be095 | ||
|
|
72c3339719 | ||
|
|
7ca35634a7 | ||
|
|
4ab4f2f8f9 | ||
|
|
3e64e8225a | ||
|
|
b65d9ede81 | ||
|
|
12edbdab11 | ||
|
|
fcdb38469b | ||
|
|
900127fde7 | ||
|
|
ffb2198eab | ||
|
|
503a753241 | ||
|
|
7734def856 | ||
|
|
d6e84b54c9 | ||
|
|
ec93af4ba8 | ||
|
|
197d50bc44 | ||
|
|
5ad60a3ed4 | ||
|
|
38638b4a6b | ||
|
|
232801e0ba | ||
|
|
d55f36eb90 | ||
|
|
277c47ba4e | ||
|
|
12cbc9756b | ||
|
|
72df8723f6 | ||
|
|
a8710bf2f1 | ||
|
|
4bdc488fe7 | ||
|
|
9633714c49 | ||
|
|
66bdcee854 | ||
|
|
e61f79c8c3 | ||
|
|
96fc38d182 | ||
|
|
ae9ae14e5a | ||
|
|
39fa70c58b | ||
|
|
4f132e302f | ||
|
|
a45f586599 | ||
|
|
170e64070b | ||
|
|
7e0d2aabbd | ||
|
|
ff3f5fd529 | ||
|
|
52db0bf0c0 | ||
|
|
1e66a977c7 | ||
|
|
1b233aa8cc | ||
|
|
294b9c8910 | ||
|
|
6f43784c47 | ||
|
|
9921887ce8 | ||
|
|
501cf297df | ||
|
|
318274c70f | ||
|
|
169f55c908 | ||
|
|
c0d8474ac6 | ||
|
|
44949b73cf | ||
|
|
b55c5f7de2 | ||
|
|
ef27ebfed8 | ||
|
|
a66eca82c2 | ||
|
|
7248e2c6d0 | ||
|
|
24f3499bd9 | ||
|
|
a50034be3c | ||
|
|
470db2bc91 | ||
|
|
02021fe2c9 | ||
|
|
4882ca0481 | ||
|
|
526a4c303f | ||
|
|
bb5f494fbd | ||
|
|
c81bc60a33 | ||
|
|
a28c44e509 | ||
|
|
27219d34eb | ||
|
|
f49e4a646f | ||
|
|
b699864f00 | ||
|
|
abaeec40ae | ||
|
|
f81f6cf114 | ||
|
|
81bccc1c7f | ||
|
|
e23b47b997 | ||
|
|
faec21ed08 | ||
|
|
1deb4ff5e4 | ||
|
|
13788c4568 | ||
|
|
69b818ff83 | ||
|
|
a8400e4b7c | ||
|
|
9141e789aa | ||
|
|
60ccdfa4e6 | ||
|
|
0d7f7df043 | ||
|
|
b6a55e53d5 | ||
|
|
1a33f7ce1a | ||
|
|
f22fa9c5b0 | ||
|
|
3e7554974a | ||
|
|
d3928a0c0f | ||
|
|
153a1ecd39 | ||
|
|
d6f9c5a0b6 | ||
|
|
2f47bacb4f | ||
|
|
91eff51390 | ||
|
|
9cf294f3d7 | ||
|
|
691b4512b5 | ||
|
|
471e22a4e2 | ||
|
|
a1c2d458de | ||
|
|
ef1da5d5de | ||
|
|
865f348167 | ||
|
|
30f5fbb07a | ||
|
|
d57fee7b63 | ||
|
|
79930347f9 | ||
|
|
e0feda780b | ||
|
|
76c39e38c0 | ||
|
|
8cd4d06903 | ||
|
|
8c263f17ab | ||
|
|
a4e4f0aa98 | ||
|
|
ebf9bf429c | ||
|
|
30e461c18e | ||
|
|
b8b3424c1f | ||
|
|
929be1652a | ||
|
|
2be5ae3b2d | ||
|
|
aba14bfb8c | ||
|
|
a9c3484387 | ||
|
|
ee7f73623f | ||
|
|
b338da40c5 | ||
|
|
00fb955544 | ||
|
|
74711a55bb | ||
|
|
d30dd97c96 | ||
|
|
10664d1931 | ||
|
|
7b04fa114e | ||
|
|
0d843899e1 | ||
|
|
54e1991ff4 | ||
|
|
adfd8ed26b | ||
|
|
76fd63ba5f | ||
|
|
07edf505e7 | ||
|
|
1078bf76ad | ||
|
|
cb4fcb9d80 | ||
|
|
32e149c76e | ||
|
|
4d8176e6af | ||
|
|
ddd109059f | ||
|
|
4d480cb95f | ||
|
|
69c3acfb39 | ||
|
|
a092406543 | ||
|
|
a65008f762 | ||
|
|
2a44a72024 | ||
|
|
40a10dcc5f | ||
|
|
d25d5762e0 | ||
|
|
073e518c16 | ||
|
|
454c8e66e0 | ||
|
|
cd5553a1dc | ||
|
|
e4d9cd4000 | ||
|
|
400c55faaa | ||
|
|
fabd2eec63 | ||
|
|
619fabc3a1 | ||
|
|
4e4f1d3cce | ||
|
|
e2e3d30b49 | ||
|
|
ff78cade3a | ||
|
|
97381f6810 | ||
|
|
999086968c | ||
|
|
ab4abf4e3b | ||
|
|
0fd0f0c1bd | ||
|
|
d16055806b | ||
|
|
e09ac530d5 | ||
|
|
1923926422 | ||
|
|
7d3bf36227 | ||
|
|
d653c05da8 | ||
|
|
862a6835fe | ||
|
|
33f3ad17cb | ||
|
|
52d9fbce73 | ||
|
|
0933a94ae7 | ||
|
|
0b701b3b24 | ||
|
|
b4a45e4cf4 | ||
|
|
0567a2a3bf | ||
|
|
e372f4f8f6 | ||
|
|
dccddfffe6 | ||
|
|
948e4c13d2 | ||
|
|
13f2b3f632 | ||
|
|
944c32da24 | ||
|
|
b8f1fa1a13 | ||
|
|
10ab12c99a | ||
|
|
662ee6fa36 | ||
|
|
314fdd6066 | ||
|
|
94352c9a72 | ||
|
|
4268f1aeeb | ||
|
|
31c85dd89f | ||
|
|
e9f1f8c6fe | ||
|
|
8d87d9e6e7 | ||
|
|
d324baf1b0 | ||
|
|
52c8033a08 | ||
|
|
28a70ced56 | ||
|
|
bf6064db21 | ||
|
|
e406e4298b | ||
|
|
591a3e7a60 | ||
|
|
370440f63d | ||
|
|
330625b565 | ||
|
|
5de34a9c0b | ||
|
|
35eda3a9a7 | ||
|
|
ac4b38bc30 | ||
|
|
3052e2077d | ||
|
|
439302b38e | ||
|
|
22029b9d7c | ||
|
|
114bcd0349 | ||
|
|
6f8725c680 | ||
|
|
4a75ae9869 | ||
|
|
68b399fdef | ||
|
|
514cba6467 | ||
|
|
7dfa957619 | ||
|
|
cd9838d579 | ||
|
|
d8ca3ba894 | ||
|
|
8ff2c5b576 | ||
|
|
8df6dc0ca0 | ||
|
|
a1fa21d5a9 | ||
|
|
df54a1edb5 | ||
|
|
cc89608d2c | ||
|
|
da7896dbc4 | ||
|
|
7a9eff7e65 | ||
|
|
48ecd2400c | ||
|
|
78ce54bc4a | ||
|
|
1a4f2f43b7 | ||
|
|
15ad6a0180 | ||
|
|
7cb3cf4e37 | ||
|
|
c58c7e285c | ||
|
|
adc68b672d | ||
|
|
17b5b531bf | ||
|
|
52ffcc9f7c | ||
|
|
3ce9a778f8 | ||
|
|
3e9a98170e | ||
|
|
5ce9e5b03d | ||
|
|
94b6b31185 | ||
|
|
b031e1f05e | ||
|
|
aae0b29008 | ||
|
|
422c7308fd | ||
|
|
23f1cea29b | ||
|
|
633dc60d49 | ||
|
|
b9960abea6 | ||
|
|
2388758f8a | ||
|
|
b05c34a969 | ||
|
|
bfca3d9910 | ||
|
|
f9511ed7da | ||
|
|
2e93d9f022 | ||
|
|
478111e7df | ||
|
|
dd459e23e2 | ||
|
|
e50c2c2867 | ||
|
|
7f9784c443 | ||
|
|
1294efdeb9 | ||
|
|
be4e4ff47c | ||
|
|
309396f199 | ||
|
|
393e1b75e9 | ||
|
|
c139a998b8 | ||
|
|
e591f1f002 | ||
|
|
67000f0ce9 | ||
|
|
0ddf47740c | ||
|
|
05de875ace | ||
|
|
d8b7791375 | ||
|
|
b609e4ee84 | ||
|
|
0a23bb6e36 | ||
|
|
74c7883b3b | ||
|
|
0a36959ef1 | ||
|
|
2ae429b4ac | ||
|
|
13f2e90a82 | ||
|
|
74ad1f36ac | ||
|
|
cb86193459 | ||
|
|
86d0ee590f | ||
|
|
10242cd6c4 | ||
|
|
3bb930c769 | ||
|
|
ef43d85271 | ||
|
|
927b055e65 | ||
|
|
4445d096f5 | ||
|
|
48934e8544 | ||
|
|
cb570a2ba1 | ||
|
|
607bc07887 | ||
|
|
df874966a6 | ||
|
|
b5c2a6ad65 | ||
|
|
e38d082394 | ||
|
|
b9bce03f71 | ||
|
|
024d148b7f | ||
|
|
8775afc5ea | ||
|
|
2f738415b8 | ||
|
|
57cd474beb | ||
|
|
9b00421ec3 | ||
|
|
23a852bdab | ||
|
|
52d178bbe4 | ||
|
|
5703aa8af5 | ||
|
|
a4e76db672 | ||
|
|
31275122a1 | ||
|
|
beb329c31e | ||
|
|
deb56bf4f8 | ||
|
|
87b97530ff | ||
|
|
3335ea953e | ||
|
|
0fbc02864e | ||
|
|
94eec401c3 | ||
|
|
fa07889f39 | ||
|
|
82a42d1db7 | ||
|
|
a672022a6a | ||
|
|
a75874a5d0 | ||
|
|
ff7fe2acdf | ||
|
|
98d2e1a898 | ||
|
|
9517bf01ce | ||
|
|
b8b1e3d760 | ||
|
|
e5c0889361 | ||
|
|
41d3b164ea | ||
|
|
3a512f39ae | ||
|
|
40e821d0d8 | ||
|
|
ee06df97a4 | ||
|
|
0d3c9ebc2b | ||
|
|
6d412fd8e7 | ||
|
|
e0af178968 | ||
|
|
720d705df3 | ||
|
|
e35b0d1441 | ||
|
|
ddcbb1f9c2 | ||
|
|
2e90cd8d31 | ||
|
|
a2ca2729ba | ||
|
|
51600986c9 | ||
|
|
0a839430e7 | ||
|
|
abaefd0319 | ||
|
|
5a67aa7fff | ||
|
|
22e68fe973 | ||
|
|
4db5447db8 | ||
|
|
eb47c8dbc6 | ||
|
|
abd0eb53bf | ||
|
|
dbc4b677f6 | ||
|
|
f83e4cf092 | ||
|
|
bb38940638 | ||
|
|
a72a688506 | ||
|
|
1f5df7e39c | ||
|
|
8a325d40e4 | ||
|
|
581a0b67f0 | ||
|
|
a71261d5fd | ||
|
|
98f572a50e | ||
|
|
44633c2ba7 | ||
|
|
53dede734f | ||
|
|
95a4cc7b76 | ||
|
|
db3e79e240 | ||
|
|
b7f1393c33 | ||
|
|
3c1cc7fcef | ||
|
|
6097066cd8 | ||
|
|
048e35850a | ||
|
|
d2ceb39d73 | ||
|
|
2991ddfc52 | ||
|
|
a8bb3519c5 | ||
|
|
98b2ac77c8 | ||
|
|
d550487bc8 | ||
|
|
942d7ccfc6 | ||
|
|
02fa85206f | ||
|
|
b36afa3c3e | ||
|
|
3fe9d1c096 | ||
|
|
8a9f75c291 | ||
|
|
bfb8e384a8 | ||
|
|
9bc17db45d | ||
|
|
7c63a6592e | ||
|
|
1f0b1923d7 | ||
|
|
6e8996f59f | ||
|
|
79d7c6d9b3 | ||
|
|
ce052922c6 | ||
|
|
20e2472329 | ||
|
|
26ebf47c71 | ||
|
|
f54116afbb | ||
|
|
f1d2d79f00 | ||
|
|
25ace77048 | ||
|
|
91f72672a1 | ||
|
|
44e9ba1117 | ||
|
|
a5b644c23c | ||
|
|
3ff1d77c03 | ||
|
|
784c924d88 | ||
|
|
466dff96e9 | ||
|
|
fbde4797f8 | ||
|
|
2df924ae78 | ||
|
|
be5ff0a088 | ||
|
|
d8514851bf | ||
|
|
849079316a | ||
|
|
f266325fb0 | ||
|
|
dcc3422484 | ||
|
|
f812d2e318 | ||
|
|
0c63e6a624 | ||
|
|
a532421eef | ||
|
|
23f365786c | ||
|
|
8206874158 | ||
|
|
7baf681b55 | ||
|
|
fcf56b4ba6 | ||
|
|
afc028147a | ||
|
|
a7c7ac714f | ||
|
|
4b625f0f13 | ||
|
|
d6e39376c8 | ||
|
|
ffab48c77f | ||
|
|
8f66cfa2c0 | ||
|
|
a50e32d4ea | ||
|
|
a58e37e31f | ||
|
|
51aed19b29 | ||
|
|
e30569cc1b | ||
|
|
81a79c30cb | ||
|
|
2c0de9ce3d | ||
|
|
3b3dfb6dbe | ||
|
|
143831ffd0 | ||
|
|
6bd573cf07 | ||
|
|
447bc4b4da | ||
|
|
7c038c9329 | ||
|
|
a26d20cbf2 | ||
|
|
52deb7fd86 | ||
|
|
c373d5307f | ||
|
|
1ab0e318f9 | ||
|
|
062ce5f735 | ||
|
|
9b5e59f045 | ||
|
|
8cb8cfe3d5 | ||
|
|
5f4d6daf1b | ||
|
|
bebaf2d97e | ||
|
|
ef85a321bc | ||
|
|
b919d4885c | ||
|
|
f604065246 | ||
|
|
9620da287c | ||
|
|
1ca46893bb | ||
|
|
717861fb46 | ||
|
|
8839fb9af3 | ||
|
|
a3c5f50bbf | ||
|
|
5ea98ab02a | ||
|
|
782e8d5875 | ||
|
|
36abc9b123 | ||
|
|
130f6300c5 | ||
|
|
7bc7cb00ac | ||
|
|
f3ac57e3b6 | ||
|
|
a9c01e891f | ||
|
|
acd8a8dd3c | ||
|
|
f85548abeb | ||
|
|
44776189de | ||
|
|
a929e82060 | ||
|
|
5d0b001764 | ||
|
|
f369f8535d | ||
|
|
35ba74d265 | ||
|
|
c8b9cbe0d5 | ||
|
|
677fb594e8 | ||
|
|
439e872a05 | ||
|
|
d27c482e5e | ||
|
|
34e85dea8b | ||
|
|
6df173ce1d | ||
|
|
e6a1ad0127 | ||
|
|
c3c7e120c8 | ||
|
|
81822dfd1c | ||
|
|
a71a9057a2 | ||
|
|
a1d1a1078b | ||
|
|
e7cd9bbb98 | ||
|
|
908e583c69 | ||
|
|
19ae4eadfb | ||
|
|
485cee56bc | ||
|
|
9e4a236c64 | ||
|
|
e4e5d65a71 | ||
|
|
3a34a079aa | ||
|
|
03c7504d2b | ||
|
|
7a9b55c21b | ||
|
|
67a5ad7dd6 | ||
|
|
06b1243857 | ||
|
|
84cb7be079 | ||
|
|
dc26580466 | ||
|
|
1636f0cb25 | ||
|
|
f6a1707684 | ||
|
|
fe55dca661 | ||
|
|
fb2cea7274 | ||
|
|
a8159c0391 | ||
|
|
23c386223c | ||
|
|
57b2cd402b | ||
|
|
5959809fed | ||
|
|
e416b55b1a | ||
|
|
91ef686fe0 | ||
|
|
d7864c58c1 | ||
|
|
933de6aa97 | ||
|
|
ed5074c09c | ||
|
|
5c751f3f8e | ||
|
|
862cd974ff | ||
|
|
be6ed623f6 | ||
|
|
11cc6362b5 | ||
|
|
bdabe36029 | ||
|
|
2eac5a8873 | ||
|
|
09a0448c3e | ||
|
|
9818440d0f | ||
|
|
2e237661f8 | ||
|
|
8a09731a52 | ||
|
|
4151361420 | ||
|
|
c0e1ac266c | ||
|
|
6c1f688bf1 | ||
|
|
0393d537de | ||
|
|
82bb8033ec | ||
|
|
106b19a05d | ||
|
|
7ca2f33112 | ||
|
|
ad1937b394 | ||
|
|
0fee6d8b86 | ||
|
|
8217d14e36 | ||
|
|
28e792056d | ||
|
|
59b5104431 | ||
|
|
c759c83daf | ||
|
|
407356239b | ||
|
|
1d1e1787c4 | ||
|
|
df43221c24 | ||
|
|
09c961fc56 | ||
|
|
cd72bb6cb2 | ||
|
|
26616a409f | ||
|
|
3c71ab1bd7 | ||
|
|
47cffd3c02 | ||
|
|
63249dc241 | ||
|
|
0d8b1d172c | ||
|
|
851c802ea8 | ||
|
|
1d65b8cd53 | ||
|
|
237727dd62 | ||
|
|
d9184e02f5 | ||
|
|
3b903a7459 | ||
|
|
c72c335b0c | ||
|
|
51eb4e6d6b | ||
|
|
f0449adcf8 | ||
|
|
e16a910062 | ||
|
|
6b27ee6a3c | ||
|
|
1ecd38a4ee | ||
|
|
defb65d3d5 | ||
|
|
f283a6ef68 | ||
|
|
f9e8c03ec6 | ||
|
|
77d0958490 | ||
|
|
058049aa1b | ||
|
|
bad064b577 | ||
|
|
faf0fa9040 | ||
|
|
226046dd16 | ||
|
|
0cdcbdfea6 | ||
|
|
05ac2c1ec2 | ||
|
|
1dd7651d49 | ||
|
|
49c0b77c60 | ||
|
|
119c907279 | ||
|
|
c205ee81f0 | ||
|
|
c57ec1ea79 | ||
|
|
c3045f6a29 | ||
|
|
7ffa70422a | ||
|
|
5655f766f0 | ||
|
|
a2c8e3d87e | ||
|
|
8f37afeec4 | ||
|
|
9bcb5ef0c9 | ||
|
|
501c91f035 | ||
|
|
a07dabae9e | ||
|
|
e6c124962b | ||
|
|
bb15132031 | ||
|
|
0d4226a903 | ||
|
|
2133b83db4 | ||
|
|
d149e23170 | ||
|
|
32c08a09c3 | ||
|
|
1fbcd7e434 | ||
|
|
a1700404cd | ||
|
|
b04be850b5 | ||
|
|
f5e4147502 | ||
|
|
503886b704 | ||
|
|
ee28dff7cb | ||
|
|
52f37242fc | ||
|
|
37b3cc72b2 | ||
|
|
bc22fa56dc | ||
|
|
4af4252604 | ||
|
|
b1a1c82169 | ||
|
|
935c7a5328 | ||
|
|
792662f3d6 | ||
|
|
d4e4e3020c | ||
|
|
7b13a42daa | ||
|
|
ac105ccd05 | ||
|
|
b7070b7a72 | ||
|
|
70141f3d77 | ||
|
|
4907aa35a9 | ||
|
|
f051c4d58a | ||
|
|
bd224a75db | ||
|
|
eb2d7c6a77 | ||
|
|
0b824ee058 | ||
|
|
30b6fd27b3 | ||
|
|
1792b1350c | ||
|
|
6a61b7ce49 | ||
|
|
5df37d4279 | ||
|
|
64485c1066 | ||
|
|
427e1cd214 | ||
|
|
796a61da86 | ||
|
|
afe09695d4 | ||
|
|
f774ef8635 | ||
|
|
bfd224eb7c | ||
|
|
7479b9faca | ||
|
|
e204325d1d | ||
|
|
c75c6ae03d | ||
|
|
1b6acdf84d | ||
|
|
5c0432b979 | ||
|
|
c7869f0408 | ||
|
|
5650344fe8 | ||
|
|
ae29eb9673 | ||
|
|
dc997346b6 | ||
|
|
70a371b212 | ||
|
|
429e752c26 | ||
|
|
679256fd25 | ||
|
|
1bb6601782 | ||
|
|
98b9d4358d | ||
|
|
bd3c4ca50f | ||
|
|
1aa90af342 | ||
|
|
558dfb685e | ||
|
|
a69a40a429 | ||
|
|
749afd53a1 | ||
|
|
7dc1157f69 | ||
|
|
b768b0222e | ||
|
|
d49a61b63e |
@@ -1,2 +1 @@
|
||||
.git
|
||||
awx/ui/node_modules
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -33,9 +33,8 @@ awx/ui_next/src/locales/
|
||||
awx/ui_next/coverage/
|
||||
awx/ui_next/build
|
||||
awx/ui_next/.env.local
|
||||
awx/ui_next/instrumented
|
||||
rsyslog.pid
|
||||
/tower-license
|
||||
/tower-license/**
|
||||
tools/prometheus/data
|
||||
tools/docker-compose/Dockerfile
|
||||
|
||||
@@ -147,3 +146,6 @@ use_dev_supervisor.txt
|
||||
.idea/*
|
||||
*.unison.tmp
|
||||
*.#
|
||||
/tools/docker-compose/overrides/
|
||||
/awx/ui_next/.ui-built
|
||||
/Dockerfile
|
||||
|
||||
71
CHANGELOG.md
71
CHANGELOG.md
@@ -2,22 +2,83 @@
|
||||
|
||||
This is a list of high-level changes for each release of AWX. A full list of commits can be found at `https://github.com/ansible/awx/releases/tag/<version>`.
|
||||
|
||||
# 17.0.1 (January 26, 2021)
|
||||
- Fixed pgdocker directory permissions issue with Local Docker installer: https://github.com/ansible/awx/pull/9152
|
||||
- Fixed a bug in the UI which caused toggle settings to not be changed when clicked: https://github.com/ansible/awx/pull/9093
|
||||
|
||||
# 17.0.0 (January 22, 2021)
|
||||
- AWX now requires PostgreSQL 12 by default: https://github.com/ansible/awx/pull/8943
|
||||
**Note:** users who encounter permissions errors at upgrade time should `chown -R ~/.awx/pgdocker` to ensure it's owned by the user running the install playbook
|
||||
- Added support for region name for OpenStack inventory: https://github.com/ansible/awx/issues/5080
|
||||
- Added the ability to chain undefined attributes in custom notification templates: https://github.com/ansible/awx/issues/8677
|
||||
- Dramatically simplified the `image_build` role: https://github.com/ansible/awx/pull/8980
|
||||
- Fixed a bug which can cause schema migrations to fail at install time: https://github.com/ansible/awx/issues/9077
|
||||
- Fixed a bug which caused the `is_superuser` user property to be out of date in certain circumstances: https://github.com/ansible/awx/pull/8833
|
||||
- Fixed a bug which sometimes results in race conditions on setting access: https://github.com/ansible/awx/pull/8580
|
||||
- Fixed a bug which sometimes causes an unexpected delay in stdout for some playbooks: https://github.com/ansible/awx/issues/9085
|
||||
- (UI) Added support for credential password prompting on job launch: https://github.com/ansible/awx/pull/9028
|
||||
- (UI) Added the ability to configure LDAP settings in the UI: https://github.com/ansible/awx/issues/8291
|
||||
- (UI) Added a sync button to the Project detail view: https://github.com/ansible/awx/issues/8847
|
||||
- (UI) Added a form for configuring Google Outh 2.0 settings: https://github.com/ansible/awx/pull/8762
|
||||
- (UI) Added searchable keys and related keys to the Credentials list: https://github.com/ansible/awx/issues/8603
|
||||
- (UI) Added support for advanced search and copying to Notification Templates: https://github.com/ansible/awx/issues/7879
|
||||
- (UI) Added support for prompting on workflow nodes: https://github.com/ansible/awx/issues/5913
|
||||
- (UI) Added support for session timeouts: https://github.com/ansible/awx/pull/8250
|
||||
- (UI) Fixed a bug that broke websocket streaming for the insecure ws:// protocol: https://github.com/ansible/awx/pull/8877
|
||||
- (UI) Fixed a bug in the user interface when a translation for the browser's preferred locale isn't available: https://github.com/ansible/awx/issues/8884
|
||||
- (UI) Fixed bug where navigating from one survey question form directly to another wasn't reloading the form: https://github.com/ansible/awx/issues/7522
|
||||
- (UI) Fixed a bug which can cause an uncaught error while launching a Job Template: https://github.com/ansible/awx/issues/8936
|
||||
- Updated autobahn to address CVE-2020-35678
|
||||
|
||||
## 16.0.0 (December 10, 2020)
|
||||
- AWX now ships with a reimagined user interface. **Please read this before upgrading:** https://groups.google.com/g/awx-project/c/KuT5Ao92HWo
|
||||
- Removed support for syncing inventory from Red Hat CloudForms - https://github.com/ansible/awx/commit/0b701b3b2
|
||||
- Removed support for Mercurial-based project updates - https://github.com/ansible/awx/issues/7932
|
||||
- Upgraded NodeJS to actively maintained LTS 14.15.1 - https://github.com/ansible/awx/pull/8766
|
||||
- Added Git-LFS to the default image build - https://github.com/ansible/awx/pull/8700
|
||||
- Added the ability to specify `metadata.labels` in the podspec for container groups - https://github.com/ansible/awx/issues/8486
|
||||
- Added support for Kubernetes pod annotations - https://github.com/ansible/awx/pull/8434
|
||||
- Added the ability to label the web container in local Docker installs - https://github.com/ansible/awx/pull/8449
|
||||
- Added additional metadata (as an extra var) to playbook runs to report the SCM branch name - https://github.com/ansible/awx/pull/8433
|
||||
- Fixed a bug that caused k8s installations to fail due to an incorrect Helm repo - https://github.com/ansible/awx/issues/8715
|
||||
- Fixed a bug that prevented certain Workflow Approval resources from being deleted - https://github.com/ansible/awx/pull/8612
|
||||
- Fixed a bug that prevented the deletion of inventories stuck in "pending deletion" state - https://github.com/ansible/awx/issues/8525
|
||||
- Fixed a display bug in webhook notifications with certain unicode characters - https://github.com/ansible/awx/issues/7400
|
||||
- Improved support for exporting dependent objects (Inventory Hosts and Groups) in the `awx export` CLI tool - https://github.com/ansible/awx/commit/607bc0788
|
||||
|
||||
## 15.0.1 (October 20, 2020)
|
||||
- Added several optimizations to improve performance for a variety of high-load simultaneous job launch use cases https://github.com/ansible/awx/pull/8403
|
||||
- Added the ability to source roles and collections from requirements.yaml files (not just requirements.yml) - https://github.com/ansible/awx/issues/4540
|
||||
- awx.awx collection modules now provide a clearer error message for incompatible versions of awxkit - https://github.com/ansible/awx/issues/8127
|
||||
- Fixed a bug in notification messages that contain certain unicode characters - https://github.com/ansible/awx/issues/7400
|
||||
- Fixed a bug that prevents the deletion of Workflow Approval records - https://github.com/ansible/awx/issues/8305
|
||||
- Fixed a bug that broke the selection of webhook credentials - https://github.com/ansible/awx/issues/7892
|
||||
- Fixed a bug which can cause confusing behavior for social auth logins across distinct browser tabs - https://github.com/ansible/awx/issues/8154
|
||||
- Fixed several bugs in the output of Workflow Job Templates using the `awx export` tool - https://github.com/ansible/awx/issues/7798 https://github.com/ansible/awx/pull/7847
|
||||
- Fixed a race condition that can lead to missing hosts when running parallel inventory syncs - https://github.com/ansible/awx/issues/5571
|
||||
- Fixed an HTTP 500 error when certain LDAP group parameters aren't properly set - https://github.com/ansible/awx/issues/7622
|
||||
- Updated a few dependencies in response to several CVEs:
|
||||
* CVE-2020-7720
|
||||
* CVE-2020-7743
|
||||
* CVE-2020-7676
|
||||
|
||||
## 15.0.0 (September 30, 2020)
|
||||
- Added improved support for fetching Ansible collections from private Galaxy content sources (such as https://github.com/ansible/galaxy_ng) - https://github.com/ansible/awx/issues/7813
|
||||
**Note:** as part of this change, new Organizations created in the AWX API will _no longer_ automatically synchronize roles and collections from galaxy.ansible.com by default. More details on this change can be found at: https://github.com/ansible/awx/issues/8341#issuecomment-707310633
|
||||
- AWX now utilizes a version of certifi that auto-discovers certificates in the system certificate store - https://github.com/ansible/awx/pull/8242
|
||||
- Added support for arbitrary custom inventory plugin configuration: https://github.com/ansible/awx/issues/5150
|
||||
- Added improved support for fetching Ansible collections from private Galaxy content sources (such as https://github.com/ansible/galaxy_ng) - https://github.com/ansible/awx/issues/7813
|
||||
- Added an optional setting to disable the auto-creation of organizations and teams on successful SAML login. - https://github.com/ansible/awx/pull/8069
|
||||
- Added a number of optimizations to Ansible Tower's callback receiver to improve the speed of stdout processing for simultaneous playbooks runs - https://github.com/ansible/awx/pull/8193 https://github.com/ansible/awx/pull/8191
|
||||
- Added a number of optimizations to AWX's callback receiver to improve the speed of stdout processing for simultaneous playbooks runs - https://github.com/ansible/awx/pull/8193 https://github.com/ansible/awx/pull/8191
|
||||
- Added the ability to use `!include` and `!import` constructors when constructing YAML for use with the AWX CLI - https://github.com/ansible/awx/issues/8135
|
||||
- Fixed a bug that prevented certain users from being able to edit approval nodes in Workflows - https://github.com/ansible/awx/pull/8253
|
||||
- Fixed a bug that broke password prompting for credentials in certain cases - https://github.com/ansible/awx/issues/8202
|
||||
- Fixed a bug which can cause PostgreSQL deadlocks when running many parallel playbooks against large shared inventories - https://github.com/ansible/awx/issues/8145
|
||||
- Fixed a bug which can cause delays in Ansible Tower's task manager when large numbers of simultaneous jobs are scheduled - https://github.com/ansible/awx/issues/7655
|
||||
- Fixed a bug which can cause delays in AWX's task manager when large numbers of simultaneous jobs are scheduled - https://github.com/ansible/awx/issues/7655
|
||||
- Fixed a bug which can cause certain scheduled jobs - those that run every X minute(s) or hour(s) - to fail to run at the proper time - https://github.com/ansible/awx/issues/8071
|
||||
- Fixed a performance issue for playbooks that store large amounts of data using the `set_stats` module - https://github.com/ansible/awx/issues/8006
|
||||
- Fixed a bug related to AWX's handling of the auth_path argument for the HashiVault KeyValue credential plugin - https://github.com/ansible/awx/pull/7991
|
||||
- Fixed a bug that broke support for Remote Archive SCM Type project syncs on platforms that utilize Python2 - https://github.com/ansible/awx/pull/8057
|
||||
- Updated to the latest version of Django Rest Framework.
|
||||
- Updated to the latest version of Django Rest Framework to address CVE-2020-25626
|
||||
- Updated to the latest version of Django to address CVE-2020-24583 and CVE-2020-24584
|
||||
- Updated to the latest verson of channels_redis to address a bug that slowly causes Daphne processes to leak memory over time - https://github.com/django/channels_redis/issues/212
|
||||
|
||||
@@ -71,7 +132,7 @@ This is a list of high-level changes for each release of AWX. A full list of com
|
||||
- Fixed a bug that caused rsyslogd's configuration file to have world-readable file permissions, potentially leaking secrets (CVE-2020-10782)
|
||||
|
||||
## 12.0.0 (Jun 9, 2020)
|
||||
- Removed memcached as a dependency of AWX (https://github.com/ansible/awx/pull/7240)
|
||||
- Removed memcached as a dependency of AWX (https://github.com/ansible/awx/pull/7240)
|
||||
- Moved to a single container image build instead of separate awx_web and awx_task images. The container image is just `awx` (https://github.com/ansible/awx/pull/7228)
|
||||
- Official AWX container image builds now use a two-stage container build process that notably reduces the size of our published images (https://github.com/ansible/awx/pull/7017)
|
||||
- Removed support for HipChat notifications ([EoL announcement](https://www.atlassian.com/partnerships/slack/faq#faq-98b17ca3-247f-423b-9a78-70a91681eff0)); all previously-created HipChat notification templates will be deleted due to this removal.
|
||||
|
||||
@@ -85,7 +85,7 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
|
||||
|
||||
#### Frontend Development
|
||||
|
||||
See [the ui development documentation](awx/ui/README.md).
|
||||
See [the ui development documentation](awx/ui_next/CONTRIBUTING.md).
|
||||
|
||||
|
||||
### Build the environment
|
||||
@@ -158,7 +158,7 @@ $ docker ps
|
||||
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
|
||||
44251b476f98 gcr.io/ansible-tower-engineering/awx_devel:devel "/entrypoint.sh /bin…" 27 seconds ago Up 23 seconds 0.0.0.0:6899->6899/tcp, 0.0.0.0:7899-7999->7899-7999/tcp, 0.0.0.0:8013->8013/tcp, 0.0.0.0:8043->8043/tcp, 0.0.0.0:8080->8080/tcp, 22/tcp, 0.0.0.0:8888->8888/tcp tools_awx_run_9e820694d57e
|
||||
40de380e3c2e redis:latest "docker-entrypoint.s…" 28 seconds ago Up 26 seconds
|
||||
b66a506d3007 postgres:10 "docker-entrypoint.s…" 28 seconds ago Up 26 seconds 0.0.0.0:5432->5432/tcp tools_postgres_1
|
||||
b66a506d3007 postgres:12 "docker-entrypoint.s…" 28 seconds ago Up 26 seconds 0.0.0.0:5432->5432/tcp tools_postgres_1
|
||||
```
|
||||
**NOTE**
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ Please note that deploying from `HEAD` (or the latest commit) is **not** stable,
|
||||
|
||||
For more on how to clone the repo, view [git clone help](https://git-scm.com/docs/git-clone).
|
||||
|
||||
Once you have a local copy, run commands within the root of the project tree.
|
||||
Once you have a local copy, run the commands in the following sections from the root of the project tree.
|
||||
|
||||
### AWX branding
|
||||
|
||||
@@ -78,10 +78,12 @@ Before you can run a deployment, you'll need the following installed in your loc
|
||||
- [docker](https://pypi.org/project/docker/) Python module
|
||||
+ This is incompatible with `docker-py`. If you have previously installed `docker-py`, please uninstall it.
|
||||
+ We use this module instead of `docker-py` because it is what the `docker-compose` Python module requires.
|
||||
- [community.general.docker_image collection](https://docs.ansible.com/ansible/latest/collections/community/general/docker_image_module.html)
|
||||
+ This is only required if you are using Ansible >= 2.10
|
||||
- [GNU Make](https://www.gnu.org/software/make/)
|
||||
- [Git](https://git-scm.com/) Requires Version 1.8.4+
|
||||
- Python 3.6+
|
||||
- [Node 10.x LTS version](https://nodejs.org/en/download/)
|
||||
- [Node 14.x LTS version](https://nodejs.org/en/download/)
|
||||
+ This is only required if you're [building your own container images](#official-vs-building-images) with `use_container_for_build=false`
|
||||
- [NPM 6.x LTS](https://docs.npmjs.com/)
|
||||
+ This is only required if you're [building your own container images](#official-vs-building-images) with `use_container_for_build=false`
|
||||
@@ -495,7 +497,7 @@ Before starting the install process, review the [inventory](./installer/inventor
|
||||
|
||||
*docker_compose_dir*
|
||||
|
||||
> When using docker-compose, the `docker-compose.yml` file will be created there (default `/tmp/awxcompose`).
|
||||
> When using docker-compose, the `docker-compose.yml` file will be created there (default `~/.awx/awxcompose`).
|
||||
|
||||
*custom_venv_dir*
|
||||
|
||||
@@ -662,6 +664,7 @@ The preferred way to install the AWX CLI is through pip directly from PyPI:
|
||||
|
||||
To build the docs, spin up a real AWX server, `pip3 install sphinx sphinxcontrib-autoprogram`, and run:
|
||||
|
||||
~ cd awxkit/awxkit/cli/docs
|
||||
~ TOWER_HOST=https://awx.example.org TOWER_USERNAME=example TOWER_PASSWORD=secret make clean html
|
||||
~ cd build/html/ && python -m http.server
|
||||
Serving HTTP on 0.0.0.0 port 8000 (http://0.0.0.0:8000/) ..
|
||||
|
||||
@@ -4,8 +4,6 @@ recursive-include awx *.mo
|
||||
recursive-include awx/static *
|
||||
recursive-include awx/templates *.html
|
||||
recursive-include awx/api/templates *.md *.html
|
||||
recursive-include awx/ui/templates *.html
|
||||
recursive-include awx/ui/static *
|
||||
recursive-include awx/ui_next/build *.html
|
||||
recursive-include awx/ui_next/build *
|
||||
recursive-include awx/playbooks *.yml
|
||||
|
||||
170
Makefile
170
Makefile
@@ -19,7 +19,8 @@ PYCURL_SSL_LIBRARY ?= openssl
|
||||
COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
COMPOSE_HOST ?= $(shell hostname)
|
||||
|
||||
VENV_BASE ?= /venv
|
||||
VENV_BASE ?= /var/lib/awx/venv/
|
||||
COLLECTION_BASE ?= /var/lib/awx/vendor/awx_ansible_collections
|
||||
SCL_PREFIX ?=
|
||||
CELERY_SCHEDULE_FILE ?= /var/lib/awx/beat.db
|
||||
|
||||
@@ -56,11 +57,6 @@ WHEEL_COMMAND ?= bdist_wheel
|
||||
SDIST_TAR_FILE ?= $(SDIST_TAR_NAME).tar.gz
|
||||
WHEEL_FILE ?= $(WHEEL_NAME)-py2-none-any.whl
|
||||
|
||||
# UI flag files
|
||||
UI_DEPS_FLAG_FILE = awx/ui/.deps_built
|
||||
UI_RELEASE_DEPS_FLAG_FILE = awx/ui/.release_deps_built
|
||||
UI_RELEASE_FLAG_FILE = awx/ui/.release_built
|
||||
|
||||
I18N_FLAG_FILE = .i18n_built
|
||||
|
||||
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
||||
@@ -70,22 +66,6 @@ I18N_FLAG_FILE = .i18n_built
|
||||
ui-docker-machine ui-docker ui-release ui-devel \
|
||||
ui-test ui-deps ui-test-ci VERSION
|
||||
|
||||
# remove ui build artifacts
|
||||
clean-ui: clean-languages
|
||||
rm -rf awx/ui/static/
|
||||
rm -rf awx/ui/node_modules/
|
||||
rm -rf awx/ui/test/unit/reports/
|
||||
rm -rf awx/ui/test/spec/reports/
|
||||
rm -rf awx/ui/test/e2e/reports/
|
||||
rm -rf awx/ui/client/languages/
|
||||
rm -rf awx/ui_next/node_modules/
|
||||
rm -rf node_modules
|
||||
rm -rf awx/ui_next/coverage/
|
||||
rm -rf awx/ui_next/build/locales/_build/
|
||||
rm -f $(UI_DEPS_FLAG_FILE)
|
||||
rm -f $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
rm -f $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
clean-tmp:
|
||||
rm -rf tmp/
|
||||
|
||||
@@ -214,7 +194,11 @@ requirements_awx_dev:
|
||||
|
||||
requirements_collections:
|
||||
mkdir -p $(COLLECTION_BASE)
|
||||
ansible-galaxy collection install -r requirements/collections_requirements.yml -p $(COLLECTION_BASE)
|
||||
n=0; \
|
||||
until [ "$$n" -ge 5 ]; do \
|
||||
ansible-galaxy collection install -r requirements/collections_requirements.yml -p $(COLLECTION_BASE) && break; \
|
||||
n=$$((n+1)); \
|
||||
done
|
||||
|
||||
requirements: requirements_ansible requirements_awx requirements_collections
|
||||
|
||||
@@ -287,7 +271,7 @@ uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1="exec:supervisorctl restart tower-processes:awx-dispatcher tower-processes:awx-receiver"
|
||||
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/var/lib/awx/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1="exec:supervisorctl restart tower-processes:awx-dispatcher tower-processes:awx-receiver"
|
||||
|
||||
daphne:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -357,7 +341,7 @@ check: flake8 pep8 # pyflakes pylint
|
||||
|
||||
awx-link:
|
||||
[ -d "/awx_devel/awx.egg-info" ] || python3 /awx_devel/setup.py egg_info_dev
|
||||
cp -f /tmp/awx.egg-link /venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.egg-link
|
||||
cp -f /tmp/awx.egg-link /var/lib/awx/venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.egg-link
|
||||
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||
|
||||
@@ -476,130 +460,47 @@ else
|
||||
@echo No PO files
|
||||
endif
|
||||
|
||||
# generate UI .pot
|
||||
pot: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run pot
|
||||
|
||||
# generate django .pot .po
|
||||
LANG = "en-us"
|
||||
messages:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py makemessages -l $(LANG) --keep-pot
|
||||
|
||||
# generate l10n .json .mo
|
||||
languages: $(I18N_FLAG_FILE)
|
||||
|
||||
$(I18N_FLAG_FILE): $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run languages
|
||||
$(PYTHON) tools/scripts/compilemessages.py
|
||||
touch $(I18N_FLAG_FILE)
|
||||
|
||||
# End l10n TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# UI RELEASE TASKS
|
||||
# --------------------------------------
|
||||
ui-release: $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
$(UI_RELEASE_FLAG_FILE): $(I18N_FLAG_FILE) $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run build-release
|
||||
touch $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
$(UI_RELEASE_DEPS_FLAG_FILE):
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 $(NPM_BIN) --unsafe-perm --prefix awx/ui ci --no-save awx/ui
|
||||
touch $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
|
||||
# END UI RELEASE TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# UI TASKS
|
||||
# --------------------------------------
|
||||
ui-deps: $(UI_DEPS_FLAG_FILE)
|
||||
|
||||
$(UI_DEPS_FLAG_FILE):
|
||||
@if [ -f ${UI_RELEASE_DEPS_FLAG_FILE} ]; then \
|
||||
rm -rf awx/ui/node_modules; \
|
||||
rm -f ${UI_RELEASE_DEPS_FLAG_FILE}; \
|
||||
fi; \
|
||||
$(NPM_BIN) --unsafe-perm --prefix awx/ui ci --no-save awx/ui
|
||||
touch $(UI_DEPS_FLAG_FILE)
|
||||
UI_BUILD_FLAG_FILE = awx/ui_next/.ui-built
|
||||
|
||||
ui-docker-machine: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run ui-docker-machine -- $(MAKEFLAGS)
|
||||
|
||||
# Native docker. Builds UI and raises BrowserSync & filesystem polling.
|
||||
ui-docker: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run ui-docker -- $(MAKEFLAGS)
|
||||
|
||||
# Builds UI with development UI without raising browser-sync or filesystem polling.
|
||||
ui-devel: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run build-devel -- $(MAKEFLAGS)
|
||||
|
||||
ui-test: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run test
|
||||
|
||||
ui-lint: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) run --prefix awx/ui jshint
|
||||
$(NPM_BIN) run --prefix awx/ui lint
|
||||
|
||||
# A standard go-to target for API developers to use building the frontend
|
||||
ui: clean-ui ui-devel
|
||||
|
||||
ui-test-ci: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run test:ci
|
||||
$(NPM_BIN) --prefix awx/ui run unit
|
||||
|
||||
jshint: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) run --prefix awx/ui jshint
|
||||
$(NPM_BIN) run --prefix awx/ui lint
|
||||
|
||||
ui-zuul-lint-and-test:
|
||||
CHROMIUM_BIN=$(CHROMIUM_BIN) ./awx/ui/build/zuul_download_chromium.sh
|
||||
CHROMIUM_BIN=$(CHROMIUM_BIN) PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 $(NPM_BIN) --unsafe-perm --prefix awx/ui ci --no-save awx/ui
|
||||
CHROMIUM_BIN=$(CHROMIUM_BIN) $(NPM_BIN) run --prefix awx/ui jshint
|
||||
CHROMIUM_BIN=$(CHROMIUM_BIN) $(NPM_BIN) run --prefix awx/ui lint
|
||||
CHROME_BIN=$(CHROMIUM_BIN) $(NPM_BIN) --prefix awx/ui run test:ci
|
||||
CHROME_BIN=$(CHROMIUM_BIN) $(NPM_BIN) --prefix awx/ui run unit
|
||||
|
||||
# END UI TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# UI NEXT TASKS
|
||||
# --------------------------------------
|
||||
clean-ui:
|
||||
rm -rf node_modules
|
||||
rm -rf awx/ui_next/node_modules
|
||||
rm -rf awx/ui_next/build
|
||||
rm -rf awx/ui_next/src/locales/_build
|
||||
rm -rf $(UI_BUILD_FLAG_FILE)
|
||||
git checkout awx/ui_next/src/locales
|
||||
|
||||
awx/ui_next/node_modules:
|
||||
$(NPM_BIN) --prefix awx/ui_next install
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn --ignore-scripts install
|
||||
|
||||
ui-release-next:
|
||||
mkdir -p awx/ui_next/build/static
|
||||
touch awx/ui_next/build/static/.placeholder
|
||||
|
||||
ui-devel-next: awx/ui_next/node_modules
|
||||
$(NPM_BIN) --prefix awx/ui_next run extract-strings
|
||||
$(NPM_BIN) --prefix awx/ui_next run compile-strings
|
||||
$(NPM_BIN) --prefix awx/ui_next run build
|
||||
$(UI_BUILD_FLAG_FILE):
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-strings
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run compile-strings
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run build
|
||||
git checkout awx/ui_next/src/locales
|
||||
mkdir -p awx/public/static/css
|
||||
mkdir -p awx/public/static/js
|
||||
mkdir -p awx/public/static/media
|
||||
cp -r awx/ui_next/build/static/css/* awx/public/static/css
|
||||
cp -r awx/ui_next/build/static/js/* awx/public/static/js
|
||||
cp -r awx/ui_next/build/static/media/* awx/public/static/media
|
||||
touch $@
|
||||
|
||||
clean-ui-next:
|
||||
rm -rf node_modules
|
||||
rm -rf awx/ui_next/node_modules
|
||||
rm -rf awx/ui_next/build
|
||||
ui-release: awx/ui_next/node_modules $(UI_BUILD_FLAG_FILE)
|
||||
|
||||
ui-next-zuul-lint-and-test:
|
||||
ui-devel: awx/ui_next/node_modules
|
||||
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
|
||||
|
||||
ui-zuul-lint-and-test:
|
||||
$(NPM_BIN) --prefix awx/ui_next install
|
||||
$(NPM_BIN) run --prefix awx/ui_next lint
|
||||
$(NPM_BIN) run --prefix awx/ui_next prettier-check
|
||||
$(NPM_BIN) run --prefix awx/ui_next test
|
||||
|
||||
# END UI NEXT TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# Build a pip-installable package into dist/ with a timestamped version number.
|
||||
dev_build:
|
||||
@@ -609,10 +510,10 @@ dev_build:
|
||||
release_build:
|
||||
$(PYTHON) setup.py release_build
|
||||
|
||||
dist/$(SDIST_TAR_FILE): ui-release ui-release-next VERSION
|
||||
dist/$(SDIST_TAR_FILE): ui-release VERSION
|
||||
$(PYTHON) setup.py $(SDIST_COMMAND)
|
||||
|
||||
dist/$(WHEEL_FILE): ui-release ui-release-next
|
||||
dist/$(WHEEL_FILE): ui-release
|
||||
$(PYTHON) setup.py $(WHEEL_COMMAND)
|
||||
|
||||
sdist: dist/$(SDIST_TAR_FILE)
|
||||
@@ -646,9 +547,11 @@ awx/projects:
|
||||
docker-compose-isolated: awx/projects
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-isolated-override.yml up
|
||||
|
||||
COMPOSE_UP_OPTS ?=
|
||||
|
||||
# Docker Compose Development environment
|
||||
docker-compose: docker-auth awx/projects
|
||||
CURRENT_UID=$(shell id -u) OS="$(shell docker info | grep 'Operating System')" TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml up --no-recreate awx
|
||||
CURRENT_UID=$(shell id -u) OS="$(shell docker info | grep 'Operating System')" TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml $(COMPOSE_UP_OPTS) up --no-recreate awx
|
||||
|
||||
docker-compose-cluster: docker-auth awx/projects
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose-cluster.yml up
|
||||
@@ -716,7 +619,10 @@ clean-elk:
|
||||
docker rm tools_kibana_1
|
||||
|
||||
psql-container:
|
||||
docker run -it --net tools_default --rm postgres:10 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
docker run -it --net tools_default --rm postgres:12 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
|
||||
VERSION:
|
||||
@echo "awx: $(VERSION)"
|
||||
|
||||
Dockerfile: installer/roles/image_build/templates/Dockerfile.j2
|
||||
ansible localhost -m template -a "src=installer/roles/image_build/templates/Dockerfile.j2 dest=Dockerfile"
|
||||
|
||||
15
README.md
15
README.md
@@ -1,7 +1,5 @@
|
||||
[](https://ansible.softwarefactory-project.io/zuul/status)
|
||||
|
||||
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
|
||||
|
||||
AWX provides a web-based user interface, REST API, and task engine built on top of [Ansible](https://github.com/ansible/ansible). It is the upstream project for [Tower](https://www.ansible.com/tower), a commercial derivative of AWX.
|
||||
|
||||
To install AWX, please view the [Install guide](./INSTALL.md).
|
||||
@@ -16,20 +14,20 @@ Contributing
|
||||
------------
|
||||
|
||||
- Refer to the [Contributing guide](./CONTRIBUTING.md) to get started developing, testing, and building AWX.
|
||||
- All code submissions are done through pull requests against the `devel` branch.
|
||||
- All contributors must use git commit --signoff for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md)
|
||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.freenode.net, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
||||
- All code submissions are made through pull requests against the `devel` branch.
|
||||
- All contributors must use git commit --signoff for any commit to be merged and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md)
|
||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs. `git merge` for this reason.
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.freenode.net and talk about what you would like to do or add first. This not only helps everyone know what's going on, but it also helps save time and effort if the community decides some changes are needed.
|
||||
|
||||
Reporting Issues
|
||||
----------------
|
||||
|
||||
If you're experiencing a problem that you feel is a bug in AWX, or have ideas for how to improve AWX, we encourage you to open an issue, and share your feedback. But before opening a new issue, we ask that you please take a look at our [Issues guide](./ISSUES.md).
|
||||
If you're experiencing a problem that you feel is a bug in AWX or have ideas for improving AWX, we encourage you to open an issue and share your feedback. But before opening a new issue, we ask that you please take a look at our [Issues guide](./ISSUES.md).
|
||||
|
||||
Code of Conduct
|
||||
---------------
|
||||
|
||||
We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
|
||||
Get Involved
|
||||
------------
|
||||
@@ -43,4 +41,3 @@ License
|
||||
-------
|
||||
|
||||
[Apache v2](./LICENSE.md)
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ register(
|
||||
help_text=_('Number of seconds that a user is inactive before they will need to login again.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
unit=_('seconds'),
|
||||
)
|
||||
register(
|
||||
'SESSIONS_PER_USER',
|
||||
@@ -49,6 +50,7 @@ register(
|
||||
'in the number of seconds.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
unit=_('seconds'),
|
||||
)
|
||||
register(
|
||||
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS',
|
||||
|
||||
@@ -47,8 +47,6 @@ from awx.main.utils import (
|
||||
get_object_or_400,
|
||||
decrypt_field,
|
||||
get_awx_version,
|
||||
get_licenser,
|
||||
StubLicense
|
||||
)
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.views import ApiErrorView
|
||||
@@ -189,7 +187,8 @@ class APIView(views.APIView):
|
||||
'''
|
||||
Log warning for 400 requests. Add header with elapsed time.
|
||||
'''
|
||||
|
||||
from awx.main.utils import get_licenser
|
||||
from awx.main.utils.licensing import OpenLicense
|
||||
#
|
||||
# If the URL was rewritten, and we get a 404, we should entirely
|
||||
# replace the view in the request context with an ApiErrorView()
|
||||
@@ -225,7 +224,8 @@ class APIView(views.APIView):
|
||||
response = super(APIView, self).finalize_response(request, response, *args, **kwargs)
|
||||
time_started = getattr(self, 'time_started', None)
|
||||
response['X-API-Product-Version'] = get_awx_version()
|
||||
response['X-API-Product-Name'] = 'AWX' if isinstance(get_licenser(), StubLicense) else 'Red Hat Ansible Tower'
|
||||
response['X-API-Product-Name'] = 'AWX' if isinstance(get_licenser(), OpenLicense) else 'Red Hat Ansible Tower'
|
||||
|
||||
response['X-API-Node'] = settings.CLUSTER_HOST_ID
|
||||
if time_started:
|
||||
time_elapsed = time.time() - self.time_started
|
||||
|
||||
@@ -39,7 +39,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
'min_length', 'max_length',
|
||||
'min_value', 'max_value',
|
||||
'category', 'category_slug',
|
||||
'defined_in_file'
|
||||
'defined_in_file', 'unit',
|
||||
]
|
||||
|
||||
for attr in text_attrs:
|
||||
|
||||
@@ -453,7 +453,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
||||
if 'capability_map' not in self.context:
|
||||
if hasattr(self, 'polymorphic_base'):
|
||||
model = self.polymorphic_base.Meta.model
|
||||
prefetch_list = self.polymorphic_base._capabilities_prefetch
|
||||
prefetch_list = self.polymorphic_base.capabilities_prefetch
|
||||
else:
|
||||
model = self.Meta.model
|
||||
prefetch_list = self.capabilities_prefetch
|
||||
@@ -640,12 +640,9 @@ class EmptySerializer(serializers.Serializer):
|
||||
|
||||
|
||||
class UnifiedJobTemplateSerializer(BaseSerializer):
|
||||
# As a base serializer, the capabilities prefetch is not used directly
|
||||
_capabilities_prefetch = [
|
||||
'admin', 'execute',
|
||||
{'copy': ['jobtemplate.project.use', 'jobtemplate.inventory.use',
|
||||
'organization.workflow_admin']}
|
||||
]
|
||||
# As a base serializer, the capabilities prefetch is not used directly,
|
||||
# instead they are derived from the Workflow Job Template Serializer and the Job Template Serializer, respectively.
|
||||
capabilities_prefetch = []
|
||||
|
||||
class Meta:
|
||||
model = UnifiedJobTemplate
|
||||
@@ -695,7 +692,7 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
|
||||
serializer.polymorphic_base = self
|
||||
# capabilities prefetch is only valid for these models
|
||||
if isinstance(obj, (JobTemplate, WorkflowJobTemplate)):
|
||||
serializer.capabilities_prefetch = self._capabilities_prefetch
|
||||
serializer.capabilities_prefetch = serializer_class.capabilities_prefetch
|
||||
else:
|
||||
serializer.capabilities_prefetch = None
|
||||
return serializer.to_representation(obj)
|
||||
@@ -1333,6 +1330,8 @@ class ProjectOptionsSerializer(BaseSerializer):
|
||||
scm_type = attrs.get('scm_type', u'') or u''
|
||||
if self.instance and not scm_type:
|
||||
valid_local_paths.append(self.instance.local_path)
|
||||
if self.instance and scm_type and "local_path" in attrs and self.instance.local_path != attrs['local_path']:
|
||||
errors['local_path'] = _(f'Cannot change local_path for {scm_type}-based projects')
|
||||
if scm_type:
|
||||
attrs.pop('local_path', None)
|
||||
if 'local_path' in attrs and attrs['local_path'] not in valid_local_paths:
|
||||
@@ -1749,7 +1748,7 @@ class HostSerializer(BaseSerializerWithVariables):
|
||||
attrs['variables'] = json.dumps(vars_dict)
|
||||
if Group.objects.filter(name=name, inventory=inventory).exists():
|
||||
raise serializers.ValidationError(_('A Group with that name already exists.'))
|
||||
|
||||
|
||||
return super(HostSerializer, self).validate(attrs)
|
||||
|
||||
def to_representation(self, obj):
|
||||
@@ -3438,6 +3437,12 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
|
||||
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
|
||||
if obj.webhook_credential_id:
|
||||
res['webhook_credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.webhook_credential_id})
|
||||
if obj.inventory_id:
|
||||
res['inventory'] = self.reverse(
|
||||
'api:inventory_detail', kwargs={
|
||||
'pk': obj.inventory_id
|
||||
}
|
||||
)
|
||||
return res
|
||||
|
||||
def validate_extra_vars(self, value):
|
||||
@@ -3940,12 +3945,12 @@ class ProjectUpdateEventSerializer(JobEventSerializer):
|
||||
return UriCleaner.remove_sensitive(obj.stdout)
|
||||
|
||||
def get_event_data(self, obj):
|
||||
# the project update playbook uses the git, hg, or svn modules
|
||||
# the project update playbook uses the git or svn modules
|
||||
# to clone repositories, and those modules are prone to printing
|
||||
# raw SCM URLs in their stdout (which *could* contain passwords)
|
||||
# attempt to detect and filter HTTP basic auth passwords in the stdout
|
||||
# of these types of events
|
||||
if obj.event_data.get('task_action') in ('git', 'hg', 'svn'):
|
||||
if obj.event_data.get('task_action') in ('git', 'svn'):
|
||||
try:
|
||||
return json.loads(
|
||||
UriCleaner.remove_sensitive(
|
||||
|
||||
@@ -4,7 +4,6 @@ The following lists the expected format and details of our rrules:
|
||||
* DTSTART is expected to be in UTC
|
||||
* INTERVAL is required
|
||||
* SECONDLY is not supported
|
||||
* TZID is not supported
|
||||
* RRULE must precede the rule statements
|
||||
* BYDAY is supported but not BYDAY with a numerical prefix
|
||||
* BYYEARDAY and BYWEEKNO are not supported
|
||||
|
||||
@@ -8,7 +8,7 @@ The `period` of the data can be adjusted with:
|
||||
|
||||
?period=month
|
||||
|
||||
Where `month` can be replaced with `week`, or `day`. `month` is the default.
|
||||
Where `month` can be replaced with `week`, `two_weeks`, or `day`. `month` is the default.
|
||||
|
||||
The type of job can be filtered with:
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ from awx.api.views import (
|
||||
ApiV2PingView,
|
||||
ApiV2ConfigView,
|
||||
ApiV2SubscriptionView,
|
||||
ApiV2AttachView,
|
||||
AuthView,
|
||||
UserMeList,
|
||||
DashboardView,
|
||||
@@ -94,6 +95,7 @@ v2_urls = [
|
||||
url(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'),
|
||||
url(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'),
|
||||
url(r'^config/subscriptions/$', ApiV2SubscriptionView.as_view(), name='api_v2_subscription_view'),
|
||||
url(r'^config/attach/$', ApiV2AttachView.as_view(), name='api_v2_attach_view'),
|
||||
url(r'^auth/$', AuthView.as_view()),
|
||||
url(r'^me/$', UserMeList.as_view(), name='user_me_list'),
|
||||
url(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'),
|
||||
|
||||
@@ -153,6 +153,7 @@ from awx.api.views.root import ( # noqa
|
||||
ApiV2PingView,
|
||||
ApiV2ConfigView,
|
||||
ApiV2SubscriptionView,
|
||||
ApiV2AttachView,
|
||||
)
|
||||
from awx.api.views.webhooks import ( # noqa
|
||||
WebhookKeyView,
|
||||
@@ -241,8 +242,6 @@ class DashboardView(APIView):
|
||||
git_failed_projects = git_projects.filter(last_job_failed=True)
|
||||
svn_projects = user_projects.filter(scm_type='svn')
|
||||
svn_failed_projects = svn_projects.filter(last_job_failed=True)
|
||||
hg_projects = user_projects.filter(scm_type='hg')
|
||||
hg_failed_projects = hg_projects.filter(last_job_failed=True)
|
||||
archive_projects = user_projects.filter(scm_type='archive')
|
||||
archive_failed_projects = archive_projects.filter(last_job_failed=True)
|
||||
data['scm_types'] = {}
|
||||
@@ -256,11 +255,6 @@ class DashboardView(APIView):
|
||||
'failures_url': reverse('api:project_list', request=request) + "?scm_type=svn&last_job_failed=True",
|
||||
'total': svn_projects.count(),
|
||||
'failed': svn_failed_projects.count()}
|
||||
data['scm_types']['hg'] = {'url': reverse('api:project_list', request=request) + "?scm_type=hg",
|
||||
'label': 'Mercurial',
|
||||
'failures_url': reverse('api:project_list', request=request) + "?scm_type=hg&last_job_failed=True",
|
||||
'total': hg_projects.count(),
|
||||
'failed': hg_failed_projects.count()}
|
||||
data['scm_types']['archive'] = {'url': reverse('api:project_list', request=request) + "?scm_type=archive",
|
||||
'label': 'Remote Archive',
|
||||
'failures_url': reverse('api:project_list', request=request) + "?scm_type=archive&last_job_failed=True",
|
||||
@@ -316,6 +310,9 @@ class DashboardJobsGraphView(APIView):
|
||||
if period == 'month':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(months=1)
|
||||
interval = 'days'
|
||||
elif period == 'two_weeks':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(weeks=2)
|
||||
interval = 'days'
|
||||
elif period == 'week':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(weeks=1)
|
||||
interval = 'days'
|
||||
@@ -3043,7 +3040,7 @@ class WorkflowJobTemplateNodeCreateApproval(RetrieveAPIView):
|
||||
approval_template,
|
||||
context=self.get_serializer_context()
|
||||
).data
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
|
||||
def check_permissions(self, request):
|
||||
obj = self.get_object().workflow_job_template
|
||||
@@ -4253,7 +4250,9 @@ class NotificationTemplateDetail(RetrieveUpdateDestroyAPIView):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'delete', obj):
|
||||
return Response(status=status.HTTP_404_NOT_FOUND)
|
||||
if obj.notifications.filter(status='pending').exists():
|
||||
|
||||
hours_old = now() - dateutil.relativedelta.relativedelta(hours=8)
|
||||
if obj.notifications.filter(status='pending', created__gt=hours_old).exists():
|
||||
return Response({"error": _("Delete not allowed while there are pending notifications")},
|
||||
status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||
return super(NotificationTemplateDetail, self).delete(request, *args, **kwargs)
|
||||
|
||||
@@ -13,6 +13,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
from awx.main.models import (
|
||||
ActivityStream,
|
||||
Inventory,
|
||||
Host,
|
||||
Project,
|
||||
JobTemplate,
|
||||
WorkflowJobTemplate,
|
||||
@@ -98,6 +99,7 @@ class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPI
|
||||
organization__id=org_id).count()
|
||||
org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['hosts'] = Host.objects.org_active_count(org_id)
|
||||
|
||||
full_context['related_field_counts'] = {}
|
||||
full_context['related_field_counts'][org_id] = org_counts
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# Copyright (c) 2018 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import operator
|
||||
import json
|
||||
from collections import OrderedDict
|
||||
|
||||
from django.conf import settings
|
||||
@@ -29,8 +30,8 @@ from awx.main.utils import (
|
||||
get_custom_venv_choices,
|
||||
to_python_boolean,
|
||||
)
|
||||
from awx.main.utils.licensing import validate_entitlement_manifest
|
||||
from awx.api.versioning import reverse, drf_reverse
|
||||
from awx.conf.license import get_license
|
||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||
from awx.main.models import (
|
||||
Project,
|
||||
@@ -178,7 +179,7 @@ class ApiV2PingView(APIView):
|
||||
class ApiV2SubscriptionView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
name = _('Configuration')
|
||||
name = _('Subscriptions')
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
def check_permissions(self, request):
|
||||
@@ -189,18 +190,18 @@ class ApiV2SubscriptionView(APIView):
|
||||
def post(self, request):
|
||||
from awx.main.utils.common import get_licenser
|
||||
data = request.data.copy()
|
||||
if data.get('rh_password') == '$encrypted$':
|
||||
data['rh_password'] = settings.REDHAT_PASSWORD
|
||||
if data.get('subscriptions_password') == '$encrypted$':
|
||||
data['subscriptions_password'] = settings.SUBSCRIPTIONS_PASSWORD
|
||||
try:
|
||||
user, pw = data.get('rh_username'), data.get('rh_password')
|
||||
user, pw = data.get('subscriptions_username'), data.get('subscriptions_password')
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
validated = get_licenser().validate_rh(user, pw)
|
||||
if user:
|
||||
settings.REDHAT_USERNAME = data['rh_username']
|
||||
settings.SUBSCRIPTIONS_USERNAME = data['subscriptions_username']
|
||||
if pw:
|
||||
settings.REDHAT_PASSWORD = data['rh_password']
|
||||
settings.SUBSCRIPTIONS_PASSWORD = data['subscriptions_password']
|
||||
except Exception as exc:
|
||||
msg = _("Invalid License")
|
||||
msg = _("Invalid Subscription")
|
||||
if (
|
||||
isinstance(exc, requests.exceptions.HTTPError) and
|
||||
getattr(getattr(exc, 'response', None), 'status_code', None) == 401
|
||||
@@ -213,13 +214,63 @@ class ApiV2SubscriptionView(APIView):
|
||||
elif isinstance(exc, (ValueError, OSError)) and exc.args:
|
||||
msg = exc.args[0]
|
||||
else:
|
||||
logger.exception(smart_text(u"Invalid license submitted."),
|
||||
logger.exception(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
return Response(validated)
|
||||
|
||||
|
||||
class ApiV2AttachView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
name = _('Attach Subscription')
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
def check_permissions(self, request):
|
||||
super(ApiV2AttachView, self).check_permissions(request)
|
||||
if not request.user.is_superuser and request.method.lower() not in {'options', 'head'}:
|
||||
self.permission_denied(request) # Raises PermissionDenied exception.
|
||||
|
||||
def post(self, request):
|
||||
data = request.data.copy()
|
||||
pool_id = data.get('pool_id', None)
|
||||
if not pool_id:
|
||||
return Response({"error": _("No subscription pool ID provided.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||
if pool_id and user and pw:
|
||||
from awx.main.utils.common import get_licenser
|
||||
data = request.data.copy()
|
||||
try:
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
validated = get_licenser().validate_rh(user, pw)
|
||||
except Exception as exc:
|
||||
msg = _("Invalid Subscription")
|
||||
if (
|
||||
isinstance(exc, requests.exceptions.HTTPError) and
|
||||
getattr(getattr(exc, 'response', None), 'status_code', None) == 401
|
||||
):
|
||||
msg = _("The provided credentials are invalid (HTTP 401).")
|
||||
elif isinstance(exc, requests.exceptions.ProxyError):
|
||||
msg = _("Unable to connect to proxy server.")
|
||||
elif isinstance(exc, requests.exceptions.ConnectionError):
|
||||
msg = _("Could not connect to subscription service.")
|
||||
elif isinstance(exc, (ValueError, OSError)) and exc.args:
|
||||
msg = exc.args[0]
|
||||
else:
|
||||
logger.exception(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
|
||||
for sub in validated:
|
||||
if sub['pool_id'] == pool_id:
|
||||
sub['valid_key'] = True
|
||||
settings.LICENSE = sub
|
||||
return Response(sub)
|
||||
|
||||
return Response({"error": _("Error processing subscription metadata.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class ApiV2ConfigView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
@@ -234,15 +285,11 @@ class ApiV2ConfigView(APIView):
|
||||
def get(self, request, format=None):
|
||||
'''Return various sitewide configuration settings'''
|
||||
|
||||
if request.user.is_superuser or request.user.is_system_auditor:
|
||||
license_data = get_license(show_key=True)
|
||||
else:
|
||||
license_data = get_license(show_key=False)
|
||||
from awx.main.utils.common import get_licenser
|
||||
license_data = get_licenser().validate()
|
||||
|
||||
if not license_data.get('valid_key', False):
|
||||
license_data = {}
|
||||
if license_data and 'features' in license_data and 'activity_streams' in license_data['features']:
|
||||
# FIXME: Make the final setting value dependent on the feature?
|
||||
license_data['features']['activity_streams'] &= settings.ACTIVITY_STREAM_ENABLED
|
||||
|
||||
pendo_state = settings.PENDO_TRACKING_STATE if settings.PENDO_TRACKING_STATE in ('off', 'anonymous', 'detailed') else 'off'
|
||||
|
||||
@@ -281,9 +328,10 @@ class ApiV2ConfigView(APIView):
|
||||
|
||||
return Response(data)
|
||||
|
||||
|
||||
def post(self, request):
|
||||
if not isinstance(request.data, dict):
|
||||
return Response({"error": _("Invalid license data")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"error": _("Invalid subscription data")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
if "eula_accepted" not in request.data:
|
||||
return Response({"error": _("Missing 'eula_accepted' property")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
@@ -300,25 +348,47 @@ class ApiV2ConfigView(APIView):
|
||||
logger.info(smart_text(u"Invalid JSON submitted for license."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
from awx.main.utils.common import get_licenser
|
||||
license_data = json.loads(data_actual)
|
||||
license_data_validated = get_licenser(**license_data).validate()
|
||||
except Exception:
|
||||
logger.warning(smart_text(u"Invalid license submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
from awx.main.utils.common import get_licenser
|
||||
license_data = json.loads(data_actual)
|
||||
if 'license_key' in license_data:
|
||||
return Response({"error": _('Legacy license submitted. A subscription manifest is now required.')}, status=status.HTTP_400_BAD_REQUEST)
|
||||
if 'manifest' in license_data:
|
||||
try:
|
||||
json_actual = json.loads(base64.b64decode(license_data['manifest']))
|
||||
if 'license_key' in json_actual:
|
||||
return Response(
|
||||
{"error": _('Legacy license submitted. A subscription manifest is now required.')},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
license_data = validate_entitlement_manifest(license_data['manifest'])
|
||||
except ValueError as e:
|
||||
return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception:
|
||||
logger.exception('Invalid manifest submitted. {}')
|
||||
return Response({"error": _('Invalid manifest submitted.')}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
try:
|
||||
license_data_validated = get_licenser().license_from_manifest(license_data)
|
||||
except Exception:
|
||||
logger.warning(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
license_data_validated = get_licenser().validate()
|
||||
|
||||
# If the license is valid, write it to the database.
|
||||
if license_data_validated['valid_key']:
|
||||
settings.LICENSE = license_data
|
||||
if not settings_registry.is_setting_read_only('TOWER_URL_BASE'):
|
||||
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
|
||||
return Response(license_data_validated)
|
||||
|
||||
logger.warning(smart_text(u"Invalid license submitted."),
|
||||
logger.warning(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid license")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"error": _("Invalid subscription")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request):
|
||||
try:
|
||||
|
||||
@@ -25,10 +25,12 @@ if MODE == 'production':
|
||||
try:
|
||||
fd = open("/var/lib/awx/.tower_version", "r")
|
||||
if fd.read().strip() != tower_version:
|
||||
raise Exception()
|
||||
except Exception:
|
||||
raise ValueError()
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
except ValueError as e:
|
||||
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
|
||||
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
|
||||
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") from e
|
||||
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "awx.settings")
|
||||
|
||||
@@ -1,18 +1,14 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
|
||||
__all__ = ['get_license']
|
||||
|
||||
|
||||
def _get_validated_license_data():
|
||||
from awx.main.utils.common import get_licenser
|
||||
from awx.main.utils import get_licenser
|
||||
return get_licenser().validate()
|
||||
|
||||
|
||||
def get_license(show_key=False):
|
||||
def get_license():
|
||||
"""Return a dictionary representing the active license on this Tower instance."""
|
||||
license_data = _get_validated_license_data()
|
||||
if not show_key:
|
||||
license_data.pop('license_key', None)
|
||||
return license_data
|
||||
return _get_validated_license_data()
|
||||
|
||||
26
awx/conf/migrations/0008_subscriptions.py
Normal file
26
awx/conf/migrations/0008_subscriptions.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 2.2.11 on 2020-08-04 15:19
|
||||
|
||||
import logging
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from awx.conf.migrations._subscriptions import clear_old_license, prefill_rh_credentials
|
||||
|
||||
logger = logging.getLogger('awx.conf.migrations')
|
||||
|
||||
|
||||
def _noop(apps, schema_editor):
|
||||
pass
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0007_v380_rename_more_settings'),
|
||||
]
|
||||
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(clear_old_license, _noop),
|
||||
migrations.RunPython(prefill_rh_credentials, _noop)
|
||||
]
|
||||
34
awx/conf/migrations/_subscriptions.py
Normal file
34
awx/conf/migrations/_subscriptions.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
from django.utils.timezone import now
|
||||
from awx.main.utils.encryption import decrypt_field, encrypt_field
|
||||
|
||||
logger = logging.getLogger('awx.conf.settings')
|
||||
|
||||
__all__ = ['clear_old_license', 'prefill_rh_credentials']
|
||||
|
||||
|
||||
def clear_old_license(apps, schema_editor):
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
Setting.objects.filter(key='LICENSE').delete()
|
||||
|
||||
|
||||
def _migrate_setting(apps, old_key, new_key, encrypted=False):
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
if not Setting.objects.filter(key=old_key).exists():
|
||||
return
|
||||
new_setting = Setting.objects.create(key=new_key,
|
||||
created=now(),
|
||||
modified=now()
|
||||
)
|
||||
if encrypted:
|
||||
new_setting.value = decrypt_field(Setting.objects.filter(key=old_key).first(), 'value')
|
||||
new_setting.value = encrypt_field(new_setting, 'value')
|
||||
else:
|
||||
new_setting.value = getattr(Setting.objects.filter(key=old_key).first(), 'value')
|
||||
new_setting.save()
|
||||
|
||||
|
||||
def prefill_rh_credentials(apps, schema_editor):
|
||||
_migrate_setting(apps, 'REDHAT_USERNAME', 'SUBSCRIPTIONS_USERNAME', encrypted=False)
|
||||
_migrate_setting(apps, 'REDHAT_PASSWORD', 'SUBSCRIPTIONS_PASSWORD', encrypted=True)
|
||||
@@ -78,14 +78,6 @@ class Setting(CreatedModifiedModel):
|
||||
def get_cache_id_key(self, key):
|
||||
return '{}_ID'.format(key)
|
||||
|
||||
def display_value(self):
|
||||
if self.key == 'LICENSE' and 'license_key' in self.value:
|
||||
# don't log the license key in activity stream
|
||||
value = self.value.copy()
|
||||
value['license_key'] = '********'
|
||||
return value
|
||||
return self.value
|
||||
|
||||
|
||||
import awx.conf.signals # noqa
|
||||
|
||||
|
||||
@@ -129,12 +129,14 @@ class SettingsRegistry(object):
|
||||
placeholder = field_kwargs.pop('placeholder', empty)
|
||||
encrypted = bool(field_kwargs.pop('encrypted', False))
|
||||
defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
|
||||
unit = field_kwargs.pop('unit', None)
|
||||
if getattr(field_kwargs.get('child', None), 'source', None) is not None:
|
||||
field_kwargs['child'].source = None
|
||||
field_instance = field_class(**field_kwargs)
|
||||
field_instance.category_slug = category_slug
|
||||
field_instance.category = category
|
||||
field_instance.depends_on = depends_on
|
||||
field_instance.unit = unit
|
||||
if placeholder is not empty:
|
||||
field_instance.placeholder = placeholder
|
||||
field_instance.defined_in_file = defined_in_file
|
||||
|
||||
@@ -4,6 +4,7 @@ import logging
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import os
|
||||
|
||||
# Django
|
||||
from django.conf import LazySettings
|
||||
@@ -247,6 +248,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
# These values have to be stored via self.__dict__ in this way to get
|
||||
# around the magic __setattr__ method on this class (which is used to
|
||||
# store API-assigned settings in the database).
|
||||
self.__dict__['__forks__'] = {}
|
||||
self.__dict__['default_settings'] = default_settings
|
||||
self.__dict__['_awx_conf_settings'] = self
|
||||
self.__dict__['_awx_conf_preload_expires'] = None
|
||||
@@ -255,6 +257,26 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
self.__dict__['cache'] = EncryptedCacheProxy(cache, registry)
|
||||
self.__dict__['registry'] = registry
|
||||
|
||||
# record the current pid so we compare it post-fork for
|
||||
# processes like the dispatcher and callback receiver
|
||||
self.__dict__['pid'] = os.getpid()
|
||||
|
||||
def __clean_on_fork__(self):
|
||||
pid = os.getpid()
|
||||
# if the current pid does *not* match the value on self, it means
|
||||
# that value was copied on fork, and we're now in a *forked* process;
|
||||
# the *first* time we enter this code path (on setting access),
|
||||
# forcibly close DB/cache sockets and set a marker so we don't run
|
||||
# this code again _in this process_
|
||||
#
|
||||
if pid != self.__dict__['pid'] and pid not in self.__dict__['__forks__']:
|
||||
self.__dict__['__forks__'][pid] = True
|
||||
# It's important to close these post-fork, because we
|
||||
# don't want the forked processes to inherit the open sockets
|
||||
# for the DB and cache connections (that way lies race conditions)
|
||||
connection.close()
|
||||
django_cache.close()
|
||||
|
||||
@cached_property
|
||||
def all_supported_settings(self):
|
||||
return self.registry.get_registered_settings()
|
||||
@@ -330,6 +352,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
self.cache.set_many(settings_to_cache, timeout=SETTING_CACHE_TIMEOUT)
|
||||
|
||||
def _get_local(self, name, validate=True):
|
||||
self.__clean_on_fork__()
|
||||
self._preload_cache()
|
||||
cache_key = Setting.get_cache_key(name)
|
||||
try:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -3294,6 +3294,16 @@ msgid ""
|
||||
"common scenarios."
|
||||
msgstr "Les domaines OpenStack définissent les limites administratives. Ils sont nécessaires uniquement pour les URL d’authentification Keystone v3. Voir la documentation Ansible Tower pour les scénarios courants."
|
||||
|
||||
#: awx/main/models/credential/__init__.py:824
|
||||
msgid "Region Name"
|
||||
msgstr "Nom de la region"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:826
|
||||
msgid ""
|
||||
"For some cloud providers, like OVH, region must be specified."
|
||||
msgstr ""
|
||||
"Chez certains fournisseurs, comme OVH, vous devez spécifier le nom de la région"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:812
|
||||
#: awx/main/models/credential/__init__.py:1110
|
||||
#: awx/main/models/credential/__init__.py:1144
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -333,14 +333,14 @@ class BaseAccess(object):
|
||||
report_violation(_("License has expired."))
|
||||
|
||||
free_instances = validation_info.get('free_instances', 0)
|
||||
available_instances = validation_info.get('available_instances', 0)
|
||||
instance_count = validation_info.get('instance_count', 0)
|
||||
|
||||
if add_host_name:
|
||||
host_exists = Host.objects.filter(name=add_host_name).exists()
|
||||
if not host_exists and free_instances == 0:
|
||||
report_violation(_("License count of %s instances has been reached.") % available_instances)
|
||||
report_violation(_("License count of %s instances has been reached.") % instance_count)
|
||||
elif not host_exists and free_instances < 0:
|
||||
report_violation(_("License count of %s instances has been exceeded.") % available_instances)
|
||||
report_violation(_("License count of %s instances has been exceeded.") % instance_count)
|
||||
elif not add_host_name and free_instances < 0:
|
||||
report_violation(_("Host count exceeds available instances."))
|
||||
|
||||
|
||||
@@ -33,9 +33,9 @@ data _since_ the last report date - i.e., new data in the last 24 hours)
|
||||
'''
|
||||
|
||||
|
||||
@register('config', '1.1', description=_('General platform configuration.'))
|
||||
@register('config', '1.2', description=_('General platform configuration.'))
|
||||
def config(since, **kwargs):
|
||||
license_info = get_license(show_key=False)
|
||||
license_info = get_license()
|
||||
install_type = 'traditional'
|
||||
if os.environ.get('container') == 'oci':
|
||||
install_type = 'openshift'
|
||||
@@ -194,7 +194,6 @@ def instance_info(since, include_hostnames=False, **kwargs):
|
||||
return info
|
||||
|
||||
|
||||
@register('job_counts', '1.0', description=_('Counts of jobs by status'))
|
||||
def job_counts(since, **kwargs):
|
||||
counts = {}
|
||||
counts['total_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').count()
|
||||
@@ -204,7 +203,6 @@ def job_counts(since, **kwargs):
|
||||
return counts
|
||||
|
||||
|
||||
@register('job_instance_counts', '1.0', description=_('Counts of jobs by execution node'))
|
||||
def job_instance_counts(since, **kwargs):
|
||||
counts = {}
|
||||
job_types = models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
|
||||
@@ -282,14 +280,16 @@ def _copy_table(table, query, path):
|
||||
return file.file_list()
|
||||
|
||||
|
||||
@register('events_table', '1.1', format='csv', description=_('Automation task records'), expensive=True)
|
||||
@register('events_table', '1.2', format='csv', description=_('Automation task records'), expensive=True)
|
||||
def events_table(since, full_path, until, **kwargs):
|
||||
events_query = '''COPY (SELECT main_jobevent.id,
|
||||
main_jobevent.created,
|
||||
main_jobevent.modified,
|
||||
main_jobevent.uuid,
|
||||
main_jobevent.parent_uuid,
|
||||
main_jobevent.event,
|
||||
main_jobevent.event_data::json->'task_action' AS task_action,
|
||||
(CASE WHEN event = 'playbook_on_stats' THEN event_data END) as playbook_on_stats,
|
||||
main_jobevent.failed,
|
||||
main_jobevent.changed,
|
||||
main_jobevent.playbook,
|
||||
|
||||
@@ -24,7 +24,7 @@ logger = logging.getLogger('awx.main.analytics')
|
||||
|
||||
def _valid_license():
|
||||
try:
|
||||
if get_license(show_key=False).get('license_type', 'UNLICENSED') == 'open':
|
||||
if get_license().get('license_type', 'UNLICENSED') == 'open':
|
||||
return False
|
||||
access_registry[Job](None).check_license()
|
||||
except PermissionDenied:
|
||||
@@ -68,7 +68,7 @@ def register(key, version, description=None, format='json', expensive=False):
|
||||
|
||||
@register('projects_by_scm_type', 1)
|
||||
def projects_by_scm_type():
|
||||
return {'git': 5, 'svn': 1, 'hg': 0}
|
||||
return {'git': 5, 'svn': 1}
|
||||
"""
|
||||
|
||||
def decorate(f):
|
||||
@@ -102,7 +102,7 @@ def gather(dest=None, module=None, subset = None, since = None, until = now(), c
|
||||
|
||||
last_run = since or settings.AUTOMATION_ANALYTICS_LAST_GATHER or (now() - timedelta(weeks=4))
|
||||
logger.debug("Last analytics run was: {}".format(settings.AUTOMATION_ANALYTICS_LAST_GATHER))
|
||||
|
||||
|
||||
if _valid_license() is False:
|
||||
logger.exception("Invalid License provided, or No License Provided")
|
||||
return None
|
||||
|
||||
@@ -12,7 +12,7 @@ from prometheus_client import (
|
||||
from awx.conf.license import get_license
|
||||
from awx.main.utils import (get_awx_version, get_ansible_version)
|
||||
from awx.main.analytics.collectors import (
|
||||
counts,
|
||||
counts,
|
||||
instance_info,
|
||||
job_instance_counts,
|
||||
job_counts,
|
||||
@@ -54,7 +54,7 @@ LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining
|
||||
|
||||
|
||||
def metrics():
|
||||
license_info = get_license(show_key=False)
|
||||
license_info = get_license()
|
||||
SYSTEM_INFO.info({
|
||||
'install_uuid': settings.INSTALL_UUID,
|
||||
'insights_analytics': str(settings.INSIGHTS_TRACKING_STATE),
|
||||
@@ -68,7 +68,7 @@ def metrics():
|
||||
'external_logger_type': getattr(settings, 'LOG_AGGREGATOR_TYPE', 'None')
|
||||
})
|
||||
|
||||
LICENSE_INSTANCE_TOTAL.set(str(license_info.get('available_instances', 0)))
|
||||
LICENSE_INSTANCE_TOTAL.set(str(license_info.get('instance_count', 0)))
|
||||
LICENSE_INSTANCE_FREE.set(str(license_info.get('free_instances', 0)))
|
||||
|
||||
current_counts = counts(None)
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
# Python
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
# Django
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
@@ -13,6 +11,7 @@ from rest_framework.fields import FloatField
|
||||
# Tower
|
||||
from awx.conf import fields, register, register_validate
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.conf')
|
||||
|
||||
register(
|
||||
@@ -92,22 +91,10 @@ register(
|
||||
)
|
||||
|
||||
|
||||
def _load_default_license_from_file():
|
||||
try:
|
||||
license_file = os.environ.get('AWX_LICENSE_FILE', '/etc/tower/license')
|
||||
if os.path.exists(license_file):
|
||||
license_data = json.load(open(license_file))
|
||||
logger.debug('Read license data from "%s".', license_file)
|
||||
return license_data
|
||||
except Exception:
|
||||
logger.warning('Could not read license from "%s".', license_file, exc_info=True)
|
||||
return {}
|
||||
|
||||
|
||||
register(
|
||||
'LICENSE',
|
||||
field_class=fields.DictField,
|
||||
default=_load_default_license_from_file,
|
||||
default=lambda: {},
|
||||
label=_('License'),
|
||||
help_text=_('The license controls which features and functionality are '
|
||||
'enabled. Use /api/v2/config/ to update or change '
|
||||
@@ -124,7 +111,7 @@ register(
|
||||
encrypted=False,
|
||||
read_only=False,
|
||||
label=_('Red Hat customer username'),
|
||||
help_text=_('This username is used to retrieve license information and to send Automation Analytics'), # noqa
|
||||
help_text=_('This username is used to send data to Automation Analytics'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@@ -137,7 +124,33 @@ register(
|
||||
encrypted=True,
|
||||
read_only=False,
|
||||
label=_('Red Hat customer password'),
|
||||
help_text=_('This password is used to retrieve license information and to send Automation Analytics'), # noqa
|
||||
help_text=_('This password is used to send data to Automation Analytics'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'SUBSCRIPTIONS_USERNAME',
|
||||
field_class=fields.CharField,
|
||||
default='',
|
||||
allow_blank=True,
|
||||
encrypted=False,
|
||||
read_only=False,
|
||||
label=_('Red Hat or Satellite username'),
|
||||
help_text=_('This username is used to retrieve subscription and content information'), # noqa
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'SUBSCRIPTIONS_PASSWORD',
|
||||
field_class=fields.CharField,
|
||||
default='',
|
||||
allow_blank=True,
|
||||
encrypted=True,
|
||||
read_only=False,
|
||||
label=_('Red Hat or Satellite password'),
|
||||
help_text=_('This password is used to retrieve subscription and content information'), # noqa
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@@ -148,7 +161,7 @@ register(
|
||||
default='https://example.com',
|
||||
schemes=('http', 'https'),
|
||||
allow_plain_hostname=True, # Allow hostname only without TLD.
|
||||
label=_('Automation Analytics upload URL.'),
|
||||
label=_('Automation Analytics upload URL'),
|
||||
help_text=_('This setting is used to to configure data collection for the Automation Analytics dashboard'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
@@ -253,6 +266,7 @@ register(
|
||||
help_text=_('The number of seconds to sleep between status checks for jobs running on isolated instances.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
unit=_('seconds'),
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -264,6 +278,7 @@ register(
|
||||
'This includes the time needed to copy source control files (playbooks) to the isolated instance.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
unit=_('seconds'),
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -276,6 +291,7 @@ register(
|
||||
'Value should be substantially greater than expected network latency.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
unit=_('seconds'),
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -497,6 +513,7 @@ register(
|
||||
'timeout should be imposed. A timeout set on an individual job template will override this.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
unit=_('seconds'),
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -509,6 +526,7 @@ register(
|
||||
'timeout should be imposed. A timeout set on an individual inventory source will override this.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
unit=_('seconds'),
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -521,6 +539,7 @@ register(
|
||||
'timeout should be imposed. A timeout set on an individual project will override this.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
unit=_('seconds'),
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -535,6 +554,7 @@ register(
|
||||
'Use a value of 0 to indicate that no timeout should be imposed.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
unit=_('seconds'),
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -542,7 +562,7 @@ register(
|
||||
field_class=fields.IntegerField,
|
||||
allow_null=False,
|
||||
default=200,
|
||||
label=_('Maximum number of forks per job.'),
|
||||
label=_('Maximum number of forks per job'),
|
||||
help_text=_('Saving a Job Template with more than this number of forks will result in an error. '
|
||||
'When set to 0, no limit is applied.'),
|
||||
category=_('Jobs'),
|
||||
@@ -672,6 +692,7 @@ register(
|
||||
'aggregator protocols.'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
unit=_('seconds'),
|
||||
)
|
||||
register(
|
||||
'LOG_AGGREGATOR_VERIFY_CERT',
|
||||
@@ -752,7 +773,8 @@ register(
|
||||
default=14400, # every 4 hours
|
||||
min_value=1800, # every 30 minutes
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
category_slug='system',
|
||||
unit=_('seconds'),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -75,7 +75,7 @@ class WebsocketSecretAuthHelper:
|
||||
nonce_diff = now - nonce_parsed
|
||||
if abs(nonce_diff) > nonce_tolerance:
|
||||
logger.warn(f"Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.")
|
||||
raise ValueError("Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.")
|
||||
raise ValueError(f"Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
import cProfile
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pstats
|
||||
import signal
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
|
||||
@@ -23,6 +20,7 @@ from awx.main.models import (JobEvent, AdHocCommandEvent, ProjectUpdateEvent,
|
||||
Job)
|
||||
from awx.main.tasks import handle_success_and_failure_notifications
|
||||
from awx.main.models.events import emit_event_detail
|
||||
from awx.main.utils.profiling import AWXProfiler
|
||||
|
||||
from .base import BaseWorker
|
||||
|
||||
@@ -40,6 +38,7 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
|
||||
MAX_RETRIES = 2
|
||||
last_stats = time.time()
|
||||
last_flush = time.time()
|
||||
total = 0
|
||||
last_event = ''
|
||||
prof = None
|
||||
@@ -48,12 +47,13 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
self.buff = {}
|
||||
self.pid = os.getpid()
|
||||
self.redis = redis.Redis.from_url(settings.BROKER_URL)
|
||||
self.prof = AWXProfiler("CallbackBrokerWorker")
|
||||
for key in self.redis.keys('awx_callback_receiver_statistics_*'):
|
||||
self.redis.delete(key)
|
||||
|
||||
def read(self, queue):
|
||||
try:
|
||||
res = self.redis.blpop(settings.CALLBACK_QUEUE, timeout=settings.JOB_EVENT_BUFFER_SECONDS)
|
||||
res = self.redis.blpop(settings.CALLBACK_QUEUE, timeout=1)
|
||||
if res is None:
|
||||
return {'event': 'FLUSH'}
|
||||
self.total += 1
|
||||
@@ -87,19 +87,12 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
)
|
||||
|
||||
def toggle_profiling(self, *args):
|
||||
if self.prof:
|
||||
self.prof.disable()
|
||||
filename = f'callback-{self.pid}.pstats'
|
||||
filepath = os.path.join(tempfile.gettempdir(), filename)
|
||||
with open(filepath, 'w') as f:
|
||||
pstats.Stats(self.prof, stream=f).sort_stats('cumulative').print_stats()
|
||||
pstats.Stats(self.prof).dump_stats(filepath + '.raw')
|
||||
self.prof = False
|
||||
logger.error(f'profiling is disabled, wrote {filepath}')
|
||||
else:
|
||||
self.prof = cProfile.Profile()
|
||||
self.prof.enable()
|
||||
if not self.prof.is_started():
|
||||
self.prof.start()
|
||||
logger.error('profiling is enabled')
|
||||
else:
|
||||
filepath = self.prof.stop()
|
||||
logger.error(f'profiling is disabled, wrote {filepath}')
|
||||
|
||||
def work_loop(self, *args, **kw):
|
||||
if settings.AWX_CALLBACK_PROFILE:
|
||||
@@ -110,6 +103,7 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
now = tz_now()
|
||||
if (
|
||||
force or
|
||||
(time.time() - self.last_flush) > settings.JOB_EVENT_BUFFER_SECONDS or
|
||||
any([len(events) >= 1000 for events in self.buff.values()])
|
||||
):
|
||||
for cls, events in self.buff.items():
|
||||
@@ -132,6 +126,7 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
for e in events:
|
||||
emit_event_detail(e)
|
||||
self.buff = {}
|
||||
self.last_flush = time.time()
|
||||
|
||||
def perform_work(self, body):
|
||||
try:
|
||||
|
||||
@@ -30,3 +30,10 @@ class _AwxTaskError():
|
||||
|
||||
|
||||
AwxTaskError = _AwxTaskError()
|
||||
|
||||
|
||||
class PostRunError(Exception):
|
||||
def __init__(self, msg, status='failed', tb=''):
|
||||
self.status = status
|
||||
self.tb = tb
|
||||
super(PostRunError, self).__init__(msg)
|
||||
|
||||
@@ -7,6 +7,7 @@ import tempfile
|
||||
import time
|
||||
import logging
|
||||
import yaml
|
||||
import datetime
|
||||
|
||||
from django.conf import settings
|
||||
import ansible_runner
|
||||
@@ -123,6 +124,7 @@ class IsolatedManager(object):
|
||||
dir=private_data_dir
|
||||
)
|
||||
params = self.runner_params.copy()
|
||||
params.get('envvars', dict())['ANSIBLE_CALLBACK_WHITELIST'] = 'profile_tasks'
|
||||
params['playbook'] = playbook
|
||||
params['private_data_dir'] = iso_dir
|
||||
if idle_timeout:
|
||||
@@ -149,7 +151,6 @@ class IsolatedManager(object):
|
||||
# don't rsync source control metadata (it can be huge!)
|
||||
'- /project/.git',
|
||||
'- /project/.svn',
|
||||
'- /project/.hg',
|
||||
# don't rsync job events that are in the process of being written
|
||||
'- /artifacts/job_events/*-partial.json.tmp',
|
||||
# don't rsync the ssh_key FIFO
|
||||
@@ -169,7 +170,8 @@ class IsolatedManager(object):
|
||||
extravars = {
|
||||
'src': self.private_data_dir,
|
||||
'dest': settings.AWX_PROOT_BASE_PATH,
|
||||
'ident': self.ident
|
||||
'ident': self.ident,
|
||||
'job_id': self.instance.id,
|
||||
}
|
||||
if playbook:
|
||||
extravars['playbook'] = playbook
|
||||
@@ -205,7 +207,10 @@ class IsolatedManager(object):
|
||||
:param interval: an interval (in seconds) to wait between status polls
|
||||
"""
|
||||
interval = interval if interval is not None else settings.AWX_ISOLATED_CHECK_INTERVAL
|
||||
extravars = {'src': self.private_data_dir}
|
||||
extravars = {
|
||||
'src': self.private_data_dir,
|
||||
'job_id': self.instance.id
|
||||
}
|
||||
status = 'failed'
|
||||
rc = None
|
||||
last_check = time.time()
|
||||
@@ -221,9 +226,13 @@ class IsolatedManager(object):
|
||||
logger.warning('Isolated job {} was manually canceled.'.format(self.instance.id))
|
||||
|
||||
logger.debug('Checking on isolated job {} with `check_isolated.yml`.'.format(self.instance.id))
|
||||
time_start = datetime.datetime.now()
|
||||
runner_obj = self.run_management_playbook('check_isolated.yml',
|
||||
self.private_data_dir,
|
||||
extravars=extravars)
|
||||
time_end = datetime.datetime.now()
|
||||
time_diff = time_end - time_start
|
||||
logger.debug('Finished checking on isolated job {} with `check_isolated.yml` took {} seconds.'.format(self.instance.id, time_diff.total_seconds()))
|
||||
status, rc = runner_obj.status, runner_obj.rc
|
||||
|
||||
if self.check_callback is not None and not self.captured_command_artifact:
|
||||
|
||||
@@ -18,7 +18,5 @@ class Command(BaseCommand):
|
||||
super(Command, self).__init__()
|
||||
license = get_licenser().validate()
|
||||
if options.get('data'):
|
||||
if license.get('license_key', '') != 'UNLICENSED':
|
||||
license['license_key'] = '********'
|
||||
return json.dumps(license)
|
||||
return license.get('license_type', 'none')
|
||||
|
||||
@@ -8,5 +8,7 @@ class Command(MakeMigrations):
|
||||
def execute(self, *args, **options):
|
||||
settings = connections['default'].settings_dict.copy()
|
||||
settings['ENGINE'] = 'sqlite3'
|
||||
if 'application_name' in settings['OPTIONS']:
|
||||
del settings['OPTIONS']['application_name']
|
||||
connections['default'] = DatabaseWrapper(settings)
|
||||
return MakeMigrations().execute(*args, **options)
|
||||
|
||||
@@ -21,7 +21,7 @@ from awx.main.signals import (
|
||||
disable_computed_fields
|
||||
)
|
||||
|
||||
from awx.main.management.commands.deletion import AWXCollector, pre_delete
|
||||
from awx.main.utils.deletion import AWXCollector, pre_delete
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
117
awx/main/management/commands/graph_jobs.py
Normal file
117
awx/main/management/commands/graph_jobs.py
Normal file
@@ -0,0 +1,117 @@
|
||||
# Python
|
||||
import asciichartpy as chart
|
||||
import collections
|
||||
import time
|
||||
import sys
|
||||
|
||||
# Django
|
||||
from django.db.models import Count
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
# AWX
|
||||
from awx.main.models import (
|
||||
Job,
|
||||
Instance
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_WIDTH = 100
|
||||
DEFAULT_HEIGHT = 30
|
||||
|
||||
|
||||
def chart_color_lookup(color_str):
|
||||
return getattr(chart, color_str)
|
||||
|
||||
|
||||
def clear_screen():
|
||||
print(chr(27) + "[2J")
|
||||
|
||||
|
||||
class JobStatus():
|
||||
def __init__(self, status, color, width):
|
||||
self.status = status
|
||||
self.color = color
|
||||
self.color_code = chart_color_lookup(color)
|
||||
self.x = collections.deque(maxlen=width)
|
||||
self.y = collections.deque(maxlen=width)
|
||||
|
||||
def tick(self, x, y):
|
||||
self.x.append(x)
|
||||
self.y.append(y)
|
||||
|
||||
|
||||
class JobStatusController:
|
||||
RESET = chart_color_lookup('reset')
|
||||
|
||||
def __init__(self, width):
|
||||
self.plots = [
|
||||
JobStatus('pending', 'red', width),
|
||||
JobStatus('waiting', 'blue', width),
|
||||
JobStatus('running', 'green', width)
|
||||
]
|
||||
self.ts_start = int(time.time())
|
||||
|
||||
def tick(self):
|
||||
ts = int(time.time()) - self.ts_start
|
||||
q = Job.objects.filter(status__in=['pending','waiting','running']).values_list('status').order_by().annotate(Count('status'))
|
||||
status_count = dict(pending=0, waiting=0, running=0)
|
||||
for status, count in q:
|
||||
status_count[status] = count
|
||||
|
||||
for p in self.plots:
|
||||
p.tick(ts, status_count[p.status])
|
||||
|
||||
def series(self):
|
||||
return [list(p.y) for p in self.plots]
|
||||
|
||||
def generate_status(self):
|
||||
line = ""
|
||||
lines = []
|
||||
for p in self.plots:
|
||||
lines.append(f'{p.color_code}{p.status} {p.y[-1]}{self.RESET}')
|
||||
|
||||
line += ", ".join(lines) + '\n'
|
||||
|
||||
width = 5
|
||||
time_running = int(time.time()) - self.ts_start
|
||||
instances = Instance.objects.all().order_by('hostname')
|
||||
line += "Capacity: " + ", ".join([f"{instance.capacity:{width}}" for instance in instances]) + '\n'
|
||||
line += "Remaining: " + ", ".join([f"{instance.remaining_capacity:{width}}" for instance in instances]) + '\n'
|
||||
line += f"Seconds running: {time_running}" + '\n'
|
||||
|
||||
return line
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Plot pending, waiting, running jobs over time on the terminal"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--refresh', dest='refresh', type=float, default=1.0,
|
||||
help='Time between refreshes of the graph and data in seconds (defaults to 1.0)')
|
||||
parser.add_argument('--width', dest='width', type=int, default=DEFAULT_WIDTH,
|
||||
help=f'Width of the graph (defaults to {DEFAULT_WIDTH})')
|
||||
parser.add_argument('--height', dest='height', type=int, default=DEFAULT_HEIGHT,
|
||||
help=f'Height of the graph (defaults to {DEFAULT_HEIGHT})')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
refresh_seconds = options['refresh']
|
||||
width = options['width']
|
||||
height = options['height']
|
||||
|
||||
jctl = JobStatusController(width)
|
||||
|
||||
conf = {
|
||||
'colors': [chart_color_lookup(p.color) for p in jctl.plots],
|
||||
'height': height,
|
||||
}
|
||||
|
||||
while True:
|
||||
jctl.tick()
|
||||
|
||||
draw = chart.plot(jctl.series(), conf)
|
||||
status_line = jctl.generate_status()
|
||||
clear_screen()
|
||||
print(draw)
|
||||
sys.stdout.write(status_line)
|
||||
time.sleep(refresh_seconds)
|
||||
|
||||
@@ -19,6 +19,9 @@ from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import connection, transaction
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
# DRF error class to distinguish license exceptions
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
# AWX inventory imports
|
||||
from awx.main.models.inventory import (
|
||||
Inventory,
|
||||
@@ -31,11 +34,12 @@ from awx.main.utils.safe_yaml import sanitize_jinja
|
||||
|
||||
# other AWX imports
|
||||
from awx.main.models.rbac import batch_role_ancestor_rebuilding
|
||||
# TODO: remove proot utils once we move to running inv. updates in containers
|
||||
from awx.main.utils import (
|
||||
ignore_inventory_computed_fields,
|
||||
check_proot_installed,
|
||||
wrap_args_with_proot,
|
||||
build_proot_temp_dir,
|
||||
ignore_inventory_computed_fields,
|
||||
get_licenser
|
||||
)
|
||||
from awx.main.signals import disable_activity_stream
|
||||
@@ -53,11 +57,11 @@ No license.
|
||||
See http://www.ansible.com/renew for license information.'''
|
||||
|
||||
LICENSE_MESSAGE = '''\
|
||||
Number of licensed instances exceeded, would bring available instances to %(new_count)d, system is licensed for %(available_instances)d.
|
||||
Number of licensed instances exceeded, would bring available instances to %(new_count)d, system is licensed for %(instance_count)d.
|
||||
See http://www.ansible.com/renew for license extension information.'''
|
||||
|
||||
DEMO_LICENSE_MESSAGE = '''\
|
||||
Demo mode free license count exceeded, would bring available instances to %(new_count)d, demo mode allows %(available_instances)d.
|
||||
Demo mode free license count exceeded, would bring available instances to %(new_count)d, demo mode allows %(instance_count)d.
|
||||
See http://www.ansible.com/renew for licensing information.'''
|
||||
|
||||
|
||||
@@ -75,13 +79,11 @@ class AnsibleInventoryLoader(object):
|
||||
/usr/bin/ansible/ansible-inventory -i hosts --list
|
||||
'''
|
||||
|
||||
def __init__(self, source, is_custom=False, venv_path=None, verbosity=0):
|
||||
def __init__(self, source, venv_path=None, verbosity=0):
|
||||
self.source = source
|
||||
self.source_dir = functioning_dir(self.source)
|
||||
self.is_custom = is_custom
|
||||
self.tmp_private_dir = None
|
||||
self.method = 'ansible-inventory'
|
||||
self.verbosity = verbosity
|
||||
# TODO: remove once proot has been removed
|
||||
self.tmp_private_dir = None
|
||||
if venv_path:
|
||||
self.venv_path = venv_path
|
||||
else:
|
||||
@@ -131,38 +133,34 @@ class AnsibleInventoryLoader(object):
|
||||
# NOTE: why do we add "python" to the start of these args?
|
||||
# the script that runs ansible-inventory specifies a python interpreter
|
||||
# that makes no sense in light of the fact that we put all the dependencies
|
||||
# inside of /venv/ansible, so we override the specified interpreter
|
||||
# inside of /var/lib/awx/venv/ansible, so we override the specified interpreter
|
||||
# https://github.com/ansible/ansible/issues/50714
|
||||
bargs = ['python', ansible_inventory_path, '-i', self.source]
|
||||
bargs.extend(['--playbook-dir', self.source_dir])
|
||||
bargs.extend(['--playbook-dir', functioning_dir(self.source)])
|
||||
if self.verbosity:
|
||||
# INFO: -vvv, DEBUG: -vvvvv, for inventory, any more than 3 makes little difference
|
||||
bargs.append('-{}'.format('v' * min(5, self.verbosity * 2 + 1)))
|
||||
logger.debug('Using base command: {}'.format(' '.join(bargs)))
|
||||
return bargs
|
||||
|
||||
# TODO: Remove this once we move to running ansible-inventory in containers
|
||||
# and don't need proot for process isolation anymore
|
||||
def get_proot_args(self, cmd, env):
|
||||
cwd = os.getcwd()
|
||||
if not check_proot_installed():
|
||||
raise RuntimeError("proot is not installed but is configured for use")
|
||||
|
||||
kwargs = {}
|
||||
if self.is_custom:
|
||||
# use source's tmp dir for proot, task manager will delete folder
|
||||
logger.debug("Using provided directory '{}' for isolation.".format(self.source_dir))
|
||||
kwargs['proot_temp_dir'] = self.source_dir
|
||||
cwd = self.source_dir
|
||||
else:
|
||||
# we cannot safely store tmp data in source dir or trust script contents
|
||||
if env['AWX_PRIVATE_DATA_DIR']:
|
||||
# If this is non-blank, file credentials are being used and we need access
|
||||
private_data_dir = functioning_dir(env['AWX_PRIVATE_DATA_DIR'])
|
||||
logger.debug("Using private credential data in '{}'.".format(private_data_dir))
|
||||
kwargs['private_data_dir'] = private_data_dir
|
||||
self.tmp_private_dir = build_proot_temp_dir()
|
||||
logger.debug("Using fresh temporary directory '{}' for isolation.".format(self.tmp_private_dir))
|
||||
kwargs['proot_temp_dir'] = self.tmp_private_dir
|
||||
kwargs['proot_show_paths'] = [functioning_dir(self.source), settings.AWX_ANSIBLE_COLLECTIONS_PATHS]
|
||||
# we cannot safely store tmp data in source dir or trust script contents
|
||||
if env['AWX_PRIVATE_DATA_DIR']:
|
||||
# If this is non-blank, file credentials are being used and we need access
|
||||
private_data_dir = functioning_dir(env['AWX_PRIVATE_DATA_DIR'])
|
||||
logger.debug("Using private credential data in '{}'.".format(private_data_dir))
|
||||
kwargs['private_data_dir'] = private_data_dir
|
||||
self.tmp_private_dir = build_proot_temp_dir()
|
||||
logger.debug("Using fresh temporary directory '{}' for isolation.".format(self.tmp_private_dir))
|
||||
kwargs['proot_temp_dir'] = self.tmp_private_dir
|
||||
kwargs['proot_show_paths'] = [functioning_dir(self.source), settings.AWX_ANSIBLE_COLLECTIONS_PATHS]
|
||||
logger.debug("Running from `{}` working directory.".format(cwd))
|
||||
|
||||
if self.venv_path != settings.ANSIBLE_VENV_PATH:
|
||||
@@ -170,12 +168,14 @@ class AnsibleInventoryLoader(object):
|
||||
|
||||
return wrap_args_with_proot(cmd, cwd, **kwargs)
|
||||
|
||||
|
||||
def command_to_json(self, cmd):
|
||||
data = {}
|
||||
stdout, stderr = '', ''
|
||||
env = self.build_env()
|
||||
|
||||
if ((self.is_custom or 'AWX_PRIVATE_DATA_DIR' in env) and
|
||||
# TODO: remove proot args once inv. updates run in containers
|
||||
if (('AWX_PRIVATE_DATA_DIR' in env) and
|
||||
getattr(settings, 'AWX_PROOT_ENABLED', False)):
|
||||
cmd = self.get_proot_args(cmd, env)
|
||||
|
||||
@@ -184,11 +184,13 @@ class AnsibleInventoryLoader(object):
|
||||
stdout = smart_text(stdout)
|
||||
stderr = smart_text(stderr)
|
||||
|
||||
# TODO: can be removed when proot is removed
|
||||
if self.tmp_private_dir:
|
||||
shutil.rmtree(self.tmp_private_dir, True)
|
||||
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError('%s failed (rc=%d) with stdout:\n%s\nstderr:\n%s' % (
|
||||
self.method, proc.returncode, stdout, stderr))
|
||||
'ansible-inventory', proc.returncode, stdout, stderr))
|
||||
|
||||
for line in stderr.splitlines():
|
||||
logger.error(line)
|
||||
@@ -231,9 +233,9 @@ class Command(BaseCommand):
|
||||
action='store_true', default=False,
|
||||
help='overwrite (rather than merge) variables')
|
||||
parser.add_argument('--keep-vars', dest='keep_vars', action='store_true', default=False,
|
||||
help='use database variables if set')
|
||||
help='DEPRECATED legacy option, has no effect')
|
||||
parser.add_argument('--custom', dest='custom', action='store_true', default=False,
|
||||
help='this is a custom inventory script')
|
||||
help='DEPRECATED indicates a custom inventory script, no longer used')
|
||||
parser.add_argument('--source', dest='source', type=str, default=None,
|
||||
metavar='s', help='inventory directory, file, or script to load')
|
||||
parser.add_argument('--enabled-var', dest='enabled_var', type=str,
|
||||
@@ -259,10 +261,10 @@ class Command(BaseCommand):
|
||||
'specifies the unique, immutable instance ID, may be '
|
||||
'specified as "foo.bar" to traverse nested dicts.')
|
||||
|
||||
def set_logging_level(self):
|
||||
def set_logging_level(self, verbosity):
|
||||
log_levels = dict(enumerate([logging.WARNING, logging.INFO,
|
||||
logging.DEBUG, 0]))
|
||||
logger.setLevel(log_levels.get(self.verbosity, 0))
|
||||
logger.setLevel(log_levels.get(verbosity, 0))
|
||||
|
||||
def _get_instance_id(self, variables, default=''):
|
||||
'''
|
||||
@@ -322,7 +324,8 @@ class Command(BaseCommand):
|
||||
else:
|
||||
raise NotImplementedError('Value of enabled {} not understood.'.format(enabled))
|
||||
|
||||
def get_source_absolute_path(self, source):
|
||||
@staticmethod
|
||||
def get_source_absolute_path(source):
|
||||
if not os.path.exists(source):
|
||||
raise IOError('Source does not exist: %s' % source)
|
||||
source = os.path.join(os.getcwd(), os.path.dirname(source),
|
||||
@@ -330,61 +333,6 @@ class Command(BaseCommand):
|
||||
source = os.path.normpath(os.path.abspath(source))
|
||||
return source
|
||||
|
||||
def load_inventory_from_database(self):
|
||||
'''
|
||||
Load inventory and related objects from the database.
|
||||
'''
|
||||
# Load inventory object based on name or ID.
|
||||
if self.inventory_id:
|
||||
q = dict(id=self.inventory_id)
|
||||
else:
|
||||
q = dict(name=self.inventory_name)
|
||||
try:
|
||||
self.inventory = Inventory.objects.get(**q)
|
||||
except Inventory.DoesNotExist:
|
||||
raise CommandError('Inventory with %s = %s cannot be found' % list(q.items())[0])
|
||||
except Inventory.MultipleObjectsReturned:
|
||||
raise CommandError('Inventory with %s = %s returned multiple results' % list(q.items())[0])
|
||||
logger.info('Updating inventory %d: %s' % (self.inventory.pk,
|
||||
self.inventory.name))
|
||||
|
||||
# Load inventory source if specified via environment variable (when
|
||||
# inventory_import is called from an InventoryUpdate task).
|
||||
inventory_source_id = os.getenv('INVENTORY_SOURCE_ID', None)
|
||||
inventory_update_id = os.getenv('INVENTORY_UPDATE_ID', None)
|
||||
if inventory_source_id:
|
||||
try:
|
||||
self.inventory_source = InventorySource.objects.get(pk=inventory_source_id,
|
||||
inventory=self.inventory)
|
||||
except InventorySource.DoesNotExist:
|
||||
raise CommandError('Inventory source with id=%s not found' %
|
||||
inventory_source_id)
|
||||
try:
|
||||
self.inventory_update = InventoryUpdate.objects.get(pk=inventory_update_id)
|
||||
except InventoryUpdate.DoesNotExist:
|
||||
raise CommandError('Inventory update with id=%s not found' %
|
||||
inventory_update_id)
|
||||
# Otherwise, create a new inventory source to capture this invocation
|
||||
# via command line.
|
||||
else:
|
||||
with ignore_inventory_computed_fields():
|
||||
self.inventory_source, created = InventorySource.objects.get_or_create(
|
||||
inventory=self.inventory,
|
||||
source='file',
|
||||
source_path=os.path.abspath(self.source),
|
||||
overwrite=self.overwrite,
|
||||
overwrite_vars=self.overwrite_vars,
|
||||
)
|
||||
self.inventory_update = self.inventory_source.create_inventory_update(
|
||||
_eager_fields=dict(
|
||||
job_args=json.dumps(sys.argv),
|
||||
job_env=dict(os.environ.items()),
|
||||
job_cwd=os.getcwd())
|
||||
)
|
||||
|
||||
# FIXME: Wait or raise error if inventory is being updated by another
|
||||
# source.
|
||||
|
||||
def _batch_add_m2m(self, related_manager, *objs, **kwargs):
|
||||
key = (related_manager.instance.pk, related_manager.through._meta.db_table)
|
||||
flush = bool(kwargs.get('flush', False))
|
||||
@@ -874,42 +822,41 @@ class Command(BaseCommand):
|
||||
Load inventory from in-memory groups to the database, overwriting or
|
||||
merging as appropriate.
|
||||
'''
|
||||
with advisory_lock('inventory_{}_update'.format(self.inventory.id)):
|
||||
# FIXME: Attribute changes to superuser?
|
||||
# Perform __in queries in batches (mainly for unit tests using SQLite).
|
||||
self._batch_size = 500
|
||||
self._build_db_instance_id_map()
|
||||
self._build_mem_instance_id_map()
|
||||
if self.overwrite:
|
||||
self._delete_hosts()
|
||||
self._delete_groups()
|
||||
self._delete_group_children_and_hosts()
|
||||
self._update_inventory()
|
||||
self._create_update_groups()
|
||||
self._create_update_hosts()
|
||||
self._create_update_group_children()
|
||||
self._create_update_group_hosts()
|
||||
# FIXME: Attribute changes to superuser?
|
||||
# Perform __in queries in batches (mainly for unit tests using SQLite).
|
||||
self._batch_size = 500
|
||||
self._build_db_instance_id_map()
|
||||
self._build_mem_instance_id_map()
|
||||
if self.overwrite:
|
||||
self._delete_hosts()
|
||||
self._delete_groups()
|
||||
self._delete_group_children_and_hosts()
|
||||
self._update_inventory()
|
||||
self._create_update_groups()
|
||||
self._create_update_hosts()
|
||||
self._create_update_group_children()
|
||||
self._create_update_group_hosts()
|
||||
|
||||
def remote_tower_license_compare(self, local_license_type):
|
||||
# this requires https://github.com/ansible/ansible/pull/52747
|
||||
source_vars = self.all_group.variables
|
||||
remote_license_type = source_vars.get('tower_metadata', {}).get('license_type', None)
|
||||
if remote_license_type is None:
|
||||
raise CommandError('Unexpected Error: Tower inventory plugin missing needed metadata!')
|
||||
raise PermissionDenied('Unexpected Error: Tower inventory plugin missing needed metadata!')
|
||||
if local_license_type != remote_license_type:
|
||||
raise CommandError('Tower server licenses must match: source: {} local: {}'.format(
|
||||
raise PermissionDenied('Tower server licenses must match: source: {} local: {}'.format(
|
||||
remote_license_type, local_license_type
|
||||
))
|
||||
|
||||
def check_license(self):
|
||||
license_info = get_licenser().validate()
|
||||
local_license_type = license_info.get('license_type', 'UNLICENSED')
|
||||
if license_info.get('license_key', 'UNLICENSED') == 'UNLICENSED':
|
||||
if local_license_type == 'UNLICENSED':
|
||||
logger.error(LICENSE_NON_EXISTANT_MESSAGE)
|
||||
raise CommandError('No license found!')
|
||||
raise PermissionDenied('No license found!')
|
||||
elif local_license_type == 'open':
|
||||
return
|
||||
available_instances = license_info.get('available_instances', 0)
|
||||
instance_count = license_info.get('instance_count', 0)
|
||||
free_instances = license_info.get('free_instances', 0)
|
||||
time_remaining = license_info.get('time_remaining', 0)
|
||||
hard_error = license_info.get('trial', False) is True or license_info['instance_count'] == 10
|
||||
@@ -917,24 +864,24 @@ class Command(BaseCommand):
|
||||
if time_remaining <= 0:
|
||||
if hard_error:
|
||||
logger.error(LICENSE_EXPIRED_MESSAGE)
|
||||
raise CommandError("License has expired!")
|
||||
raise PermissionDenied("License has expired!")
|
||||
else:
|
||||
logger.warning(LICENSE_EXPIRED_MESSAGE)
|
||||
# special check for tower-type inventory sources
|
||||
# but only if running the plugin
|
||||
TOWER_SOURCE_FILES = ['tower.yml', 'tower.yaml']
|
||||
if self.inventory_source.source == 'tower' and any(f in self.source for f in TOWER_SOURCE_FILES):
|
||||
if self.inventory_source.source == 'tower' and any(f in self.inventory_source.source_path for f in TOWER_SOURCE_FILES):
|
||||
# only if this is the 2nd call to license check, we cannot compare before running plugin
|
||||
if hasattr(self, 'all_group'):
|
||||
self.remote_tower_license_compare(local_license_type)
|
||||
if free_instances < 0:
|
||||
d = {
|
||||
'new_count': new_count,
|
||||
'available_instances': available_instances,
|
||||
'instance_count': instance_count,
|
||||
}
|
||||
if hard_error:
|
||||
logger.error(LICENSE_MESSAGE % d)
|
||||
raise CommandError('License count exceeded!')
|
||||
raise PermissionDenied('License count exceeded!')
|
||||
else:
|
||||
logger.warning(LICENSE_MESSAGE % d)
|
||||
|
||||
@@ -949,7 +896,7 @@ class Command(BaseCommand):
|
||||
|
||||
active_count = Host.objects.org_active_count(org.id)
|
||||
if active_count > org.max_hosts:
|
||||
raise CommandError('Host limit for organization exceeded!')
|
||||
raise PermissionDenied('Host limit for organization exceeded!')
|
||||
|
||||
def mark_license_failure(self, save=True):
|
||||
self.inventory_update.license_error = True
|
||||
@@ -960,16 +907,103 @@ class Command(BaseCommand):
|
||||
self.inventory_update.save(update_fields=['org_host_limit_error'])
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.set_logging_level()
|
||||
self.inventory_name = options.get('inventory_name', None)
|
||||
self.inventory_id = options.get('inventory_id', None)
|
||||
venv_path = options.get('venv', None)
|
||||
# Load inventory and related objects from database.
|
||||
inventory_name = options.get('inventory_name', None)
|
||||
inventory_id = options.get('inventory_id', None)
|
||||
if inventory_name and inventory_id:
|
||||
raise CommandError('--inventory-name and --inventory-id are mutually exclusive')
|
||||
elif not inventory_name and not inventory_id:
|
||||
raise CommandError('--inventory-name or --inventory-id is required')
|
||||
|
||||
with advisory_lock('inventory_{}_import'.format(inventory_id)):
|
||||
# Obtain rest of the options needed to run update
|
||||
raw_source = options.get('source', None)
|
||||
if not raw_source:
|
||||
raise CommandError('--source is required')
|
||||
verbosity = int(options.get('verbosity', 1))
|
||||
self.set_logging_level(verbosity)
|
||||
venv_path = options.get('venv', None)
|
||||
|
||||
# Load inventory object based on name or ID.
|
||||
if inventory_id:
|
||||
q = dict(id=inventory_id)
|
||||
else:
|
||||
q = dict(name=inventory_name)
|
||||
try:
|
||||
inventory = Inventory.objects.get(**q)
|
||||
except Inventory.DoesNotExist:
|
||||
raise CommandError('Inventory with %s = %s cannot be found' % list(q.items())[0])
|
||||
except Inventory.MultipleObjectsReturned:
|
||||
raise CommandError('Inventory with %s = %s returned multiple results' % list(q.items())[0])
|
||||
logger.info('Updating inventory %d: %s' % (inventory.pk, inventory.name))
|
||||
|
||||
|
||||
# Create ad-hoc inventory source and inventory update objects
|
||||
with ignore_inventory_computed_fields():
|
||||
source = Command.get_source_absolute_path(raw_source)
|
||||
|
||||
inventory_source, created = InventorySource.objects.get_or_create(
|
||||
inventory=inventory,
|
||||
source='file',
|
||||
source_path=os.path.abspath(source),
|
||||
overwrite=bool(options.get('overwrite', False)),
|
||||
overwrite_vars=bool(options.get('overwrite_vars', False)),
|
||||
)
|
||||
inventory_update = inventory_source.create_inventory_update(
|
||||
_eager_fields=dict(
|
||||
job_args=json.dumps(sys.argv),
|
||||
job_env=dict(os.environ.items()),
|
||||
job_cwd=os.getcwd())
|
||||
)
|
||||
|
||||
data = AnsibleInventoryLoader(
|
||||
source=source, venv_path=venv_path, verbosity=verbosity
|
||||
).load()
|
||||
|
||||
logger.debug('Finished loading from source: %s', source)
|
||||
|
||||
status, tb, exc = 'error', '', None
|
||||
try:
|
||||
self.perform_update(options, data, inventory_update)
|
||||
status = 'successful'
|
||||
except Exception as e:
|
||||
exc = e
|
||||
if isinstance(e, KeyboardInterrupt):
|
||||
status = 'canceled'
|
||||
else:
|
||||
tb = traceback.format_exc()
|
||||
|
||||
with ignore_inventory_computed_fields():
|
||||
inventory_update = InventoryUpdate.objects.get(pk=inventory_update.pk)
|
||||
inventory_update.result_traceback = tb
|
||||
inventory_update.status = status
|
||||
inventory_update.save(update_fields=['status', 'result_traceback'])
|
||||
inventory_source.status = status
|
||||
inventory_source.save(update_fields=['status'])
|
||||
|
||||
if exc:
|
||||
logger.error(str(exc))
|
||||
|
||||
if exc:
|
||||
if isinstance(exc, CommandError):
|
||||
sys.exit(1)
|
||||
raise exc
|
||||
|
||||
def perform_update(self, options, data, inventory_update):
|
||||
"""Shared method for both awx-manage CLI updates and inventory updates
|
||||
from the tasks system.
|
||||
|
||||
This saves the inventory data to the database, calling load_into_database
|
||||
but also wraps that method in a host of options processing
|
||||
"""
|
||||
# outside of normal options, these are needed as part of programatic interface
|
||||
self.inventory = inventory_update.inventory
|
||||
self.inventory_source = inventory_update.inventory_source
|
||||
self.inventory_update = inventory_update
|
||||
|
||||
# the update options, could be parser object or dict
|
||||
self.overwrite = bool(options.get('overwrite', False))
|
||||
self.overwrite_vars = bool(options.get('overwrite_vars', False))
|
||||
self.keep_vars = bool(options.get('keep_vars', False))
|
||||
self.is_custom = bool(options.get('custom', False))
|
||||
self.source = options.get('source', None)
|
||||
self.enabled_var = options.get('enabled_var', None)
|
||||
self.enabled_value = options.get('enabled_value', None)
|
||||
self.group_filter = options.get('group_filter', None) or r'^.+$'
|
||||
@@ -977,17 +1011,6 @@ class Command(BaseCommand):
|
||||
self.exclude_empty_groups = bool(options.get('exclude_empty_groups', False))
|
||||
self.instance_id_var = options.get('instance_id_var', None)
|
||||
|
||||
self.invoked_from_dispatcher = False if os.getenv('INVENTORY_SOURCE_ID', None) is None else True
|
||||
|
||||
# Load inventory and related objects from database.
|
||||
if self.inventory_name and self.inventory_id:
|
||||
raise CommandError('--inventory-name and --inventory-id are mutually exclusive')
|
||||
elif not self.inventory_name and not self.inventory_id:
|
||||
raise CommandError('--inventory-name or --inventory-id is required')
|
||||
if (self.overwrite or self.overwrite_vars) and self.keep_vars:
|
||||
raise CommandError('--overwrite/--overwrite-vars and --keep-vars are mutually exclusive')
|
||||
if not self.source:
|
||||
raise CommandError('--source is required')
|
||||
try:
|
||||
self.group_filter_re = re.compile(self.group_filter)
|
||||
except re.error:
|
||||
@@ -998,47 +1021,43 @@ class Command(BaseCommand):
|
||||
raise CommandError('invalid regular expression for --host-filter')
|
||||
|
||||
begin = time.time()
|
||||
self.load_inventory_from_database()
|
||||
|
||||
try:
|
||||
self.check_license()
|
||||
except CommandError as e:
|
||||
self.mark_license_failure(save=True)
|
||||
raise e
|
||||
# Since perform_update can be invoked either through the awx-manage CLI
|
||||
# or from the task system, we need to create a new lock at this level
|
||||
# (even though inventory_import.Command.handle -- which calls
|
||||
# perform_update -- has its own lock, inventory_ID_import)
|
||||
with advisory_lock('inventory_{}_perform_update'.format(self.inventory.id)):
|
||||
|
||||
try:
|
||||
# Check the per-org host limits
|
||||
self.check_org_host_limit()
|
||||
except CommandError as e:
|
||||
self.mark_org_limits_failure(save=True)
|
||||
raise e
|
||||
try:
|
||||
self.check_license()
|
||||
except PermissionDenied as e:
|
||||
self.mark_license_failure(save=True)
|
||||
raise e
|
||||
|
||||
try:
|
||||
# Check the per-org host limits
|
||||
self.check_org_host_limit()
|
||||
except PermissionDenied as e:
|
||||
self.mark_org_limits_failure(save=True)
|
||||
raise e
|
||||
|
||||
status, tb, exc = 'error', '', None
|
||||
try:
|
||||
if settings.SQL_DEBUG:
|
||||
queries_before = len(connection.queries)
|
||||
|
||||
# Update inventory update for this command line invocation.
|
||||
with ignore_inventory_computed_fields():
|
||||
# TODO: move this to before perform_update
|
||||
iu = self.inventory_update
|
||||
if iu.status != 'running':
|
||||
with transaction.atomic():
|
||||
self.inventory_update.status = 'running'
|
||||
self.inventory_update.save()
|
||||
|
||||
source = self.get_source_absolute_path(self.source)
|
||||
|
||||
data = AnsibleInventoryLoader(source=source, is_custom=self.is_custom,
|
||||
venv_path=venv_path, verbosity=self.verbosity).load()
|
||||
|
||||
logger.debug('Finished loading from source: %s', source)
|
||||
logger.info('Processing JSON output...')
|
||||
inventory = MemInventory(
|
||||
group_filter_re=self.group_filter_re, host_filter_re=self.host_filter_re)
|
||||
inventory = dict_to_mem_data(data, inventory=inventory)
|
||||
|
||||
del data # forget dict from import, could be large
|
||||
|
||||
logger.info('Loaded %d groups, %d hosts', len(inventory.all_group.all_groups),
|
||||
len(inventory.all_group.all_hosts))
|
||||
|
||||
@@ -1078,9 +1097,10 @@ class Command(BaseCommand):
|
||||
if settings.SQL_DEBUG:
|
||||
queries_before2 = len(connection.queries)
|
||||
self.inventory.update_computed_fields()
|
||||
if settings.SQL_DEBUG:
|
||||
logger.warning('update computed fields took %d queries',
|
||||
len(connection.queries) - queries_before2)
|
||||
if settings.SQL_DEBUG:
|
||||
logger.warning('update computed fields took %d queries',
|
||||
len(connection.queries) - queries_before2)
|
||||
|
||||
# Check if the license is valid.
|
||||
# If the license is not valid, a CommandError will be thrown,
|
||||
# and inventory update will be marked as invalid.
|
||||
@@ -1091,11 +1111,11 @@ class Command(BaseCommand):
|
||||
# Check the per-org host limits
|
||||
license_fail = False
|
||||
self.check_org_host_limit()
|
||||
except CommandError as e:
|
||||
except PermissionDenied as e:
|
||||
if license_fail:
|
||||
self.mark_license_failure()
|
||||
self.mark_license_failure(save=True)
|
||||
else:
|
||||
self.mark_org_limits_failure()
|
||||
self.mark_org_limits_failure(save=True)
|
||||
raise e
|
||||
|
||||
if settings.SQL_DEBUG:
|
||||
@@ -1104,7 +1124,6 @@ class Command(BaseCommand):
|
||||
else:
|
||||
logger.info('Inventory import completed for %s in %0.1fs',
|
||||
self.inventory_source.name, time.time() - begin)
|
||||
status = 'successful'
|
||||
|
||||
# If we're in debug mode, then log the queries and time
|
||||
# used to do the operation.
|
||||
@@ -1114,29 +1133,3 @@ class Command(BaseCommand):
|
||||
logger.warning('Inventory import required %d queries '
|
||||
'taking %0.3fs', len(queries_this_import),
|
||||
sqltime)
|
||||
except Exception as e:
|
||||
if isinstance(e, KeyboardInterrupt):
|
||||
status = 'canceled'
|
||||
exc = e
|
||||
elif isinstance(e, CommandError):
|
||||
exc = e
|
||||
else:
|
||||
tb = traceback.format_exc()
|
||||
exc = e
|
||||
|
||||
if not self.invoked_from_dispatcher:
|
||||
with ignore_inventory_computed_fields():
|
||||
self.inventory_update = InventoryUpdate.objects.get(pk=self.inventory_update.pk)
|
||||
self.inventory_update.result_traceback = tb
|
||||
self.inventory_update.status = status
|
||||
self.inventory_update.save(update_fields=['status', 'result_traceback'])
|
||||
self.inventory_source.status = status
|
||||
self.inventory_source.save(update_fields=['status'])
|
||||
|
||||
if exc:
|
||||
logger.error(str(exc))
|
||||
|
||||
if exc:
|
||||
if isinstance(exc, CommandError):
|
||||
sys.exit(1)
|
||||
raise exc
|
||||
|
||||
@@ -19,7 +19,9 @@ class Command(BaseCommand):
|
||||
profile_sql.delay(
|
||||
threshold=options['threshold'], minutes=options['minutes']
|
||||
)
|
||||
print(f"Logging initiated with a threshold of {options['threshold']} second(s) and a duration of"
|
||||
f" {options['minutes']} minute(s), any queries that meet criteria can"
|
||||
f" be found in /var/log/tower/profile/."
|
||||
)
|
||||
if options['threshold'] > 0:
|
||||
print(f"SQL profiling initiated with a threshold of {options['threshold']} second(s) and a"
|
||||
f" duration of {options['minutes']} minute(s), any queries that meet criteria can"
|
||||
f" be found in /var/log/tower/profile/.")
|
||||
else:
|
||||
print("SQL profiling disabled.")
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import uuid
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
import cProfile
|
||||
import pstats
|
||||
import os
|
||||
import urllib.parse
|
||||
|
||||
from django.conf import settings
|
||||
@@ -22,6 +18,7 @@ from django.urls import reverse, resolve
|
||||
|
||||
from awx.main.utils.named_url_graph import generate_graph, GraphNode
|
||||
from awx.conf import fields, register
|
||||
from awx.main.utils.profiling import AWXProfiler
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.middleware')
|
||||
@@ -32,11 +29,14 @@ class TimingMiddleware(threading.local, MiddlewareMixin):
|
||||
|
||||
dest = '/var/log/tower/profile'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.prof = AWXProfiler("TimingMiddleware")
|
||||
|
||||
def process_request(self, request):
|
||||
self.start_time = time.time()
|
||||
if settings.AWX_REQUEST_PROFILE:
|
||||
self.prof = cProfile.Profile()
|
||||
self.prof.enable()
|
||||
self.prof.start()
|
||||
|
||||
def process_response(self, request, response):
|
||||
if not hasattr(self, 'start_time'): # some tools may not invoke process_request
|
||||
@@ -44,33 +44,10 @@ class TimingMiddleware(threading.local, MiddlewareMixin):
|
||||
total_time = time.time() - self.start_time
|
||||
response['X-API-Total-Time'] = '%0.3fs' % total_time
|
||||
if settings.AWX_REQUEST_PROFILE:
|
||||
self.prof.disable()
|
||||
cprofile_file = self.save_profile_file(request)
|
||||
response['cprofile_file'] = cprofile_file
|
||||
response['X-API-Profile-File'] = self.prof.stop()
|
||||
perf_logger.info('api response times', extra=dict(python_objects=dict(request=request, response=response)))
|
||||
return response
|
||||
|
||||
def save_profile_file(self, request):
|
||||
if not os.path.isdir(self.dest):
|
||||
os.makedirs(self.dest)
|
||||
filename = '%.3fs-%s.pstats' % (pstats.Stats(self.prof).total_tt, uuid.uuid4())
|
||||
filepath = os.path.join(self.dest, filename)
|
||||
with open(filepath, 'w') as f:
|
||||
f.write('%s %s\n' % (request.method, request.get_full_path()))
|
||||
pstats.Stats(self.prof, stream=f).sort_stats('cumulative').print_stats()
|
||||
|
||||
if settings.AWX_REQUEST_PROFILE_WITH_DOT:
|
||||
from gprof2dot import main as generate_dot
|
||||
raw = os.path.join(self.dest, filename) + '.raw'
|
||||
pstats.Stats(self.prof).dump_stats(raw)
|
||||
generate_dot([
|
||||
'-n', '2.5', '-f', 'pstats', '-o',
|
||||
os.path.join( self.dest, filename).replace('.pstats', '.dot'),
|
||||
raw
|
||||
])
|
||||
os.remove(raw)
|
||||
return filepath
|
||||
|
||||
|
||||
class SessionTimeoutMiddleware(MiddlewareMixin):
|
||||
"""
|
||||
@@ -204,4 +181,4 @@ class MigrationRanCheckMiddleware(MiddlewareMixin):
|
||||
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
|
||||
if bool(plan) and \
|
||||
getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
||||
return redirect(reverse("ui:migrations_notran"))
|
||||
return redirect(reverse("ui_next:migrations_notran"))
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
# Generated by Django 2.2.11 on 2020-05-01 13:25
|
||||
|
||||
from django.db import migrations, models
|
||||
from awx.main.migrations._inventory_source import create_scm_script_substitute
|
||||
|
||||
|
||||
def convert_cloudforms_to_scm(apps, schema_editor):
|
||||
create_scm_script_substitute(apps, 'cloudforms')
|
||||
from awx.main.migrations._inventory_source import delete_cloudforms_inv_source
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -15,7 +11,7 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(convert_cloudforms_to_scm),
|
||||
migrations.RunPython(delete_cloudforms_inv_source),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
from django.db import migrations
|
||||
from awx.main.migrations._inventory_source import delete_cloudforms_inv_source
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0121_delete_toweranalyticsstate'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(delete_cloudforms_inv_source),
|
||||
]
|
||||
23
awx/main/migrations/0123_drop_hg_support.py
Normal file
23
awx/main/migrations/0123_drop_hg_support.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from django.db import migrations, models
|
||||
from awx.main.migrations._hg_removal import delete_hg_scm
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0122_really_remove_cloudforms_inventory'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(delete_hg_scm),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_type',
|
||||
field=models.CharField(blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights'), ('archive', 'Remote Archive')], default='', help_text='Specifies the source control system used to store the project.', max_length=8, verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_type',
|
||||
field=models.CharField(blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights'), ('archive', 'Remote Archive')], default='', help_text='Specifies the source control system used to store the project.', max_length=8, verbose_name='SCM Type'),
|
||||
),
|
||||
]
|
||||
19
awx/main/migrations/_hg_removal.py
Normal file
19
awx/main/migrations/_hg_removal.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import logging
|
||||
|
||||
from awx.main.utils.common import set_current_apps
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
|
||||
def delete_hg_scm(apps, schema_editor):
|
||||
set_current_apps(apps)
|
||||
Project = apps.get_model('main', 'Project')
|
||||
ProjectUpdate = apps.get_model('main', 'ProjectUpdate')
|
||||
|
||||
ProjectUpdate.objects.filter(project__scm_type='hg').update(scm_type='')
|
||||
update_ct = Project.objects.filter(scm_type='hg').update(scm_type='')
|
||||
|
||||
if update_ct:
|
||||
logger.warn('Changed {} mercurial projects to manual, deprecation period ended'.format(
|
||||
update_ct
|
||||
))
|
||||
@@ -5,6 +5,7 @@ from uuid import uuid4
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.timezone import now
|
||||
|
||||
from awx.main.utils.common import set_current_apps
|
||||
from awx.main.utils.common import parse_yaml_or_json
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
@@ -91,43 +92,14 @@ def back_out_new_instance_id(apps, source, new_id):
|
||||
))
|
||||
|
||||
|
||||
def create_scm_script_substitute(apps, source):
|
||||
"""Only applies for cloudforms in practice, but written generally.
|
||||
Given a source type, this will replace all inventory sources of that type
|
||||
with SCM inventory sources that source the script from Ansible core
|
||||
"""
|
||||
# the revision in the Ansible 2.9 stable branch this project will start out as
|
||||
# it can still be updated manually later (but staying within 2.9 branch), if desired
|
||||
ansible_rev = '6f83b9aff42331e15c55a171de0a8b001208c18c'
|
||||
def delete_cloudforms_inv_source(apps, schema_editor):
|
||||
set_current_apps(apps)
|
||||
InventorySource = apps.get_model('main', 'InventorySource')
|
||||
ContentType = apps.get_model('contenttypes', 'ContentType')
|
||||
Project = apps.get_model('main', 'Project')
|
||||
if not InventorySource.objects.filter(source=source).exists():
|
||||
logger.debug('No sources of type {} to migrate'.format(source))
|
||||
return
|
||||
proj_name = 'Replacement project for {} type sources - {}'.format(source, uuid4())
|
||||
right_now = now()
|
||||
project = Project.objects.create(
|
||||
name=proj_name,
|
||||
created=right_now,
|
||||
modified=right_now,
|
||||
description='Created by migration',
|
||||
polymorphic_ctype=ContentType.objects.get(model='project'),
|
||||
# project-specific fields
|
||||
scm_type='git',
|
||||
scm_url='https://github.com/ansible/ansible.git',
|
||||
scm_branch='stable-2.9',
|
||||
scm_revision=ansible_rev
|
||||
)
|
||||
ct = 0
|
||||
for inv_src in InventorySource.objects.filter(source=source).iterator():
|
||||
inv_src.source = 'scm'
|
||||
inv_src.source_project = project
|
||||
inv_src.source_path = 'contrib/inventory/{}.py'.format(source)
|
||||
inv_src.scm_last_revision = ansible_rev
|
||||
inv_src.save(update_fields=['source', 'source_project', 'source_path', 'scm_last_revision'])
|
||||
logger.debug('Changed inventory source {} to scm type'.format(inv_src.pk))
|
||||
ct += 1
|
||||
InventoryUpdate = apps.get_model('main', 'InventoryUpdate')
|
||||
CredentialType = apps.get_model('main', 'CredentialType')
|
||||
InventoryUpdate.objects.filter(inventory_source__source='cloudforms').delete()
|
||||
InventorySource.objects.filter(source='cloudforms').delete()
|
||||
ct = CredentialType.objects.filter(namespace='cloudforms').first()
|
||||
if ct:
|
||||
logger.info('Changed total of {} inventory sources from {} type to scm'.format(ct, source))
|
||||
|
||||
ct.credentials.all().delete()
|
||||
ct.delete()
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
import json
|
||||
import re
|
||||
import logging
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.encoding import iri_to_uri
|
||||
|
||||
|
||||
FrozenInjectors = dict()
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
|
||||
class PluginFileInjector(object):
|
||||
@@ -129,6 +133,7 @@ class azure_rm(PluginFileInjector):
|
||||
ret['exclude_host_filters'].append("location not in {}".format(repr(python_regions)))
|
||||
return ret
|
||||
|
||||
|
||||
class ec2(PluginFileInjector):
|
||||
plugin_name = 'aws_ec2'
|
||||
namespace = 'amazon'
|
||||
@@ -586,6 +591,7 @@ class openstack(PluginFileInjector):
|
||||
ret['inventory_hostname'] = use_host_name_for_name(source_vars['use_hostnames'])
|
||||
return ret
|
||||
|
||||
|
||||
class rhv(PluginFileInjector):
|
||||
"""ovirt uses the custom credential templating, and that is all
|
||||
"""
|
||||
|
||||
@@ -81,10 +81,17 @@ User.add_to_class('accessible_objects', user_accessible_objects)
|
||||
|
||||
|
||||
def enforce_bigint_pk_migration():
|
||||
#
|
||||
# NOTE: this function is not actually in use anymore,
|
||||
# but has been intentionally kept for historical purposes,
|
||||
# and to serve as an illustration if we ever need to perform
|
||||
# bulk modification/migration of event data in the future.
|
||||
#
|
||||
# see: https://github.com/ansible/awx/issues/6010
|
||||
# look at all the event tables and verify that they have been fully migrated
|
||||
# from the *old* int primary key table to the replacement bigint table
|
||||
# if not, attempt to migrate them in the background
|
||||
#
|
||||
for tblname in (
|
||||
'main_jobevent', 'main_inventoryupdateevent',
|
||||
'main_projectupdateevent', 'main_adhoccommandevent',
|
||||
|
||||
@@ -819,6 +819,11 @@ ManagedCredentialType(
|
||||
'It is only needed for Keystone v3 authentication '
|
||||
'URLs. Refer to Ansible Tower documentation for '
|
||||
'common scenarios.')
|
||||
}, {
|
||||
'id': 'region',
|
||||
'label': ugettext_noop('Region Name'),
|
||||
'type': 'string',
|
||||
'help_text': ugettext_noop('For some cloud providers, like OVH, region must be specified'),
|
||||
}, {
|
||||
'id': 'verify_ssl',
|
||||
'label': ugettext_noop('Verify SSL'),
|
||||
@@ -881,33 +886,6 @@ ManagedCredentialType(
|
||||
}
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='cloudforms',
|
||||
kind='cloud',
|
||||
name=ugettext_noop('Red Hat CloudForms'),
|
||||
managed_by_tower=True,
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'host',
|
||||
'label': ugettext_noop('CloudForms URL'),
|
||||
'type': 'string',
|
||||
'help_text': ugettext_noop('Enter the URL for the virtual machine that '
|
||||
'corresponds to your CloudForms instance. '
|
||||
'For example, https://cloudforms.example.org')
|
||||
}, {
|
||||
'id': 'username',
|
||||
'label': ugettext_noop('Username'),
|
||||
'type': 'string'
|
||||
}, {
|
||||
'id': 'password',
|
||||
'label': ugettext_noop('Password'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
}],
|
||||
'required': ['host', 'username', 'password'],
|
||||
}
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='gce',
|
||||
kind='cloud',
|
||||
|
||||
@@ -82,6 +82,7 @@ def _openstack_data(cred):
|
||||
if cred.has_input('domain'):
|
||||
openstack_auth['domain_name'] = cred.get_input('domain', default='')
|
||||
verify_state = cred.get_input('verify_ssl', default=True)
|
||||
|
||||
openstack_data = {
|
||||
'clouds': {
|
||||
'devstack': {
|
||||
@@ -90,6 +91,10 @@ def _openstack_data(cred):
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
if cred.has_input('project_region_name'):
|
||||
openstack_data['clouds']['devstack']['region_name'] = cred.get_input('project_region_name', default='')
|
||||
|
||||
return openstack_data
|
||||
|
||||
|
||||
|
||||
@@ -261,18 +261,20 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
|
||||
app_label = 'main'
|
||||
|
||||
|
||||
def fit_task_to_most_remaining_capacity_instance(self, task):
|
||||
@staticmethod
|
||||
def fit_task_to_most_remaining_capacity_instance(task, instances):
|
||||
instance_most_capacity = None
|
||||
for i in self.instances.filter(capacity__gt=0, enabled=True).order_by('hostname'):
|
||||
for i in instances:
|
||||
if i.remaining_capacity >= task.task_impact and \
|
||||
(instance_most_capacity is None or
|
||||
i.remaining_capacity > instance_most_capacity.remaining_capacity):
|
||||
instance_most_capacity = i
|
||||
return instance_most_capacity
|
||||
|
||||
def find_largest_idle_instance(self):
|
||||
@staticmethod
|
||||
def find_largest_idle_instance(instances):
|
||||
largest_instance = None
|
||||
for i in self.instances.filter(capacity__gt=0, enabled=True).order_by('hostname'):
|
||||
for i in instances:
|
||||
if i.jobs_running == 0:
|
||||
if largest_instance is None:
|
||||
largest_instance = i
|
||||
|
||||
@@ -798,6 +798,10 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
if self.project:
|
||||
for name in ('awx', 'tower'):
|
||||
r['{}_project_revision'.format(name)] = self.project.scm_revision
|
||||
r['{}_project_scm_branch'.format(name)] = self.project.scm_branch
|
||||
if self.scm_branch:
|
||||
for name in ('awx', 'tower'):
|
||||
r['{}_job_scm_branch'.format(name)] = self.scm_branch
|
||||
if self.job_template:
|
||||
for name in ('awx', 'tower'):
|
||||
r['{}_job_template_id'.format(name)] = self.job_template.pk
|
||||
|
||||
@@ -12,7 +12,7 @@ from django.core.mail.message import EmailMessage
|
||||
from django.db import connection
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.encoding import smart_str, force_text
|
||||
from jinja2 import sandbox
|
||||
from jinja2 import sandbox, ChainableUndefined
|
||||
from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
|
||||
|
||||
# AWX
|
||||
@@ -357,7 +357,7 @@ class JobNotificationMixin(object):
|
||||
'url': 'https://towerhost/#/jobs/playbook/1010',
|
||||
'approval_status': 'approved',
|
||||
'approval_node_name': 'Approve Me',
|
||||
'workflow_url': 'https://towerhost/#/workflows/1010',
|
||||
'workflow_url': 'https://towerhost/#/jobs/workflow/1010',
|
||||
'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
|
||||
'traceback': '',
|
||||
'status': 'running',
|
||||
@@ -393,7 +393,11 @@ class JobNotificationMixin(object):
|
||||
'job': job_context,
|
||||
'job_friendly_name': self.get_notification_friendly_name(),
|
||||
'url': self.get_ui_url(),
|
||||
'job_metadata': json.dumps(self.notification_data(), indent=4)
|
||||
'job_metadata': json.dumps(
|
||||
self.notification_data(),
|
||||
ensure_ascii=False,
|
||||
indent=4
|
||||
)
|
||||
}
|
||||
|
||||
def build_context(node, fields, allowed_fields):
|
||||
@@ -425,7 +429,7 @@ class JobNotificationMixin(object):
|
||||
raise RuntimeError("Define me")
|
||||
|
||||
def build_notification_message(self, nt, status):
|
||||
env = sandbox.ImmutableSandboxedEnvironment()
|
||||
env = sandbox.ImmutableSandboxedEnvironment(undefined=ChainableUndefined)
|
||||
|
||||
from awx.api.serializers import UnifiedJobSerializer
|
||||
job_serialization = UnifiedJobSerializer(self).to_representation(self)
|
||||
|
||||
@@ -52,7 +52,6 @@ class ProjectOptions(models.Model):
|
||||
SCM_TYPE_CHOICES = [
|
||||
('', _('Manual')),
|
||||
('git', _('Git')),
|
||||
('hg', _('Mercurial')),
|
||||
('svn', _('Subversion')),
|
||||
('insights', _('Red Hat Insights')),
|
||||
('archive', _('Remote Archive')),
|
||||
|
||||
@@ -873,7 +873,13 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
|
||||
# If status changed, update the parent instance.
|
||||
if self.status != status_before:
|
||||
self._update_parent_instance()
|
||||
# Update parent outside of the transaction for Job w/ allow_simultaneous=True
|
||||
# This dodges lock contention at the expense of the foreign key not being
|
||||
# completely correct.
|
||||
if getattr(self, 'allow_simultaneous', False):
|
||||
connection.on_commit(self._update_parent_instance)
|
||||
else:
|
||||
self._update_parent_instance()
|
||||
|
||||
# Done.
|
||||
return result
|
||||
|
||||
@@ -620,7 +620,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
|
||||
return reverse('api:workflow_job_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
def get_ui_url(self):
|
||||
return urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.pk))
|
||||
return urljoin(settings.TOWER_URL_BASE, '/#/jobs/workflow/{}'.format(self.pk))
|
||||
|
||||
def notification_data(self):
|
||||
result = super(WorkflowJob, self).notification_data()
|
||||
@@ -674,7 +674,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
|
||||
return self.status == 'running'
|
||||
|
||||
|
||||
class WorkflowApprovalTemplate(UnifiedJobTemplate):
|
||||
class WorkflowApprovalTemplate(UnifiedJobTemplate, RelatedJobsMixin):
|
||||
|
||||
FIELDS_TO_PRESERVE_AT_COPY = ['description', 'timeout',]
|
||||
|
||||
@@ -702,6 +702,12 @@ class WorkflowApprovalTemplate(UnifiedJobTemplate):
|
||||
def workflow_job_template(self):
|
||||
return self.workflowjobtemplatenodes.first().workflow_job_template
|
||||
|
||||
'''
|
||||
RelatedJobsMixin
|
||||
'''
|
||||
def _get_related_jobs(self):
|
||||
return UnifiedJob.objects.filter(unified_job_template=self)
|
||||
|
||||
|
||||
class WorkflowApproval(UnifiedJob, JobNotificationMixin):
|
||||
class Meta:
|
||||
@@ -746,7 +752,7 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
|
||||
return None
|
||||
|
||||
def get_ui_url(self):
|
||||
return urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.workflow_job.id))
|
||||
return urljoin(settings.TOWER_URL_BASE, '/#/jobs/workflow/{}'.format(self.workflow_job.id))
|
||||
|
||||
def _get_parent_field_name(self):
|
||||
return 'workflow_approval_template'
|
||||
@@ -776,6 +782,10 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
|
||||
self.send_approval_notification('running')
|
||||
return can_start
|
||||
|
||||
@property
|
||||
def event_processing_finished(self):
|
||||
return True
|
||||
|
||||
def send_approval_notification(self, approval_status):
|
||||
from awx.main.tasks import send_notifications # avoid circular import
|
||||
if self.workflow_job_template is None:
|
||||
@@ -830,7 +840,7 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
|
||||
return (msg, body)
|
||||
|
||||
def context(self, approval_status):
|
||||
workflow_url = urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.workflow_job.id))
|
||||
workflow_url = urljoin(settings.TOWER_URL_BASE, '/#/jobs/workflow/{}'.format(self.workflow_job.id))
|
||||
return {'approval_status': approval_status,
|
||||
'approval_node_name': self.workflow_approval_template.name,
|
||||
'workflow_url': workflow_url,
|
||||
|
||||
@@ -57,6 +57,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
def send_messages(self, messages):
|
||||
sent_messages = 0
|
||||
self.headers['Content-Type'] = 'application/json'
|
||||
if 'User-Agent' not in self.headers:
|
||||
self.headers['User-Agent'] = "Tower {}".format(get_awx_version())
|
||||
if self.http_method.lower() not in ['put','post']:
|
||||
@@ -68,7 +69,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
auth = (self.username, self.password)
|
||||
r = chosen_method("{}".format(m.recipients()[0]),
|
||||
auth=auth,
|
||||
json=m.body,
|
||||
data=json.dumps(m.body, ensure_ascii=False).encode('utf-8'),
|
||||
headers=self.headers,
|
||||
verify=(not self.disable_ssl_verification))
|
||||
if r.status_code >= 400:
|
||||
|
||||
@@ -12,6 +12,24 @@ from awx.main.utils.common import parse_yaml_or_json
|
||||
logger = logging.getLogger('awx.main.scheduler')
|
||||
|
||||
|
||||
def deepmerge(a, b):
|
||||
"""
|
||||
Merge dict structures and return the result.
|
||||
|
||||
>>> a = {'first': {'all_rows': {'pass': 'dog', 'number': '1'}}}
|
||||
>>> b = {'first': {'all_rows': {'fail': 'cat', 'number': '5'}}}
|
||||
>>> import pprint; pprint.pprint(deepmerge(a, b))
|
||||
{'first': {'all_rows': {'fail': 'cat', 'number': '5', 'pass': 'dog'}}}
|
||||
"""
|
||||
if isinstance(a, dict) and isinstance(b, dict):
|
||||
return dict([(k, deepmerge(a.get(k), b.get(k)))
|
||||
for k in set(a.keys()).union(b.keys())])
|
||||
elif b is None:
|
||||
return a
|
||||
else:
|
||||
return b
|
||||
|
||||
|
||||
class PodManager(object):
|
||||
|
||||
def __init__(self, task=None):
|
||||
@@ -128,11 +146,13 @@ class PodManager(object):
|
||||
pod_spec = {**default_pod_spec, **pod_spec_override}
|
||||
|
||||
if self.task:
|
||||
pod_spec['metadata']['name'] = self.pod_name
|
||||
pod_spec['metadata']['labels'] = {
|
||||
'ansible-awx': settings.INSTALL_UUID,
|
||||
'ansible-awx-job-id': str(self.task.id)
|
||||
}
|
||||
pod_spec['metadata'] = deepmerge(
|
||||
pod_spec.get('metadata', {}),
|
||||
dict(name=self.pod_name,
|
||||
labels={
|
||||
'ansible-awx': settings.INSTALL_UUID,
|
||||
'ansible-awx-job-id': str(self.task.id)
|
||||
}))
|
||||
pod_spec['spec']['containers'][0]['name'] = self.pod_name
|
||||
|
||||
return pod_spec
|
||||
|
||||
@@ -7,12 +7,14 @@ import logging
|
||||
import uuid
|
||||
import json
|
||||
import random
|
||||
from types import SimpleNamespace
|
||||
|
||||
# Django
|
||||
from django.db import transaction, connection
|
||||
from django.utils.translation import ugettext_lazy as _, gettext_noop
|
||||
from django.utils.timezone import now as tz_now
|
||||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
|
||||
# AWX
|
||||
from awx.main.dispatch.reaper import reap_job
|
||||
@@ -45,6 +47,15 @@ logger = logging.getLogger('awx.main.scheduler')
|
||||
class TaskManager():
|
||||
|
||||
def __init__(self):
|
||||
'''
|
||||
Do NOT put database queries or other potentially expensive operations
|
||||
in the task manager init. The task manager object is created every time a
|
||||
job is created, transitions state, and every 30 seconds on each tower node.
|
||||
More often then not, the object is destroyed quickly because the NOOP case is hit.
|
||||
|
||||
The NOOP case is short-circuit logic. If the task manager realizes that another instance
|
||||
of the task manager is already running, then it short-circuits and decides not to run.
|
||||
'''
|
||||
self.graph = dict()
|
||||
# start task limit indicates how many pending jobs can be started on this
|
||||
# .schedule() run. Starting jobs is expensive, and there is code in place to reap
|
||||
@@ -52,10 +63,30 @@ class TaskManager():
|
||||
# 5 minutes to start pending jobs. If this limit is reached, pending jobs
|
||||
# will no longer be started and will be started on the next task manager cycle.
|
||||
self.start_task_limit = settings.START_TASK_LIMIT
|
||||
|
||||
def after_lock_init(self):
|
||||
'''
|
||||
Init AFTER we know this instance of the task manager will run because the lock is acquired.
|
||||
'''
|
||||
instances = Instance.objects.filter(~Q(hostname=None), capacity__gt=0, enabled=True)
|
||||
self.real_instances = {i.hostname: i for i in instances}
|
||||
|
||||
instances_partial = [SimpleNamespace(obj=instance,
|
||||
remaining_capacity=instance.remaining_capacity,
|
||||
capacity=instance.capacity,
|
||||
jobs_running=instance.jobs_running,
|
||||
hostname=instance.hostname) for instance in instances]
|
||||
|
||||
instances_by_hostname = {i.hostname: i for i in instances_partial}
|
||||
|
||||
for rampart_group in InstanceGroup.objects.prefetch_related('instances'):
|
||||
self.graph[rampart_group.name] = dict(graph=DependencyGraph(rampart_group.name),
|
||||
capacity_total=rampart_group.capacity,
|
||||
consumed_capacity=0)
|
||||
consumed_capacity=0,
|
||||
instances=[])
|
||||
for instance in rampart_group.instances.filter(capacity__gt=0, enabled=True).order_by('hostname'):
|
||||
if instance.hostname in instances_by_hostname:
|
||||
self.graph[rampart_group.name]['instances'].append(instances_by_hostname[instance.hostname])
|
||||
|
||||
def is_job_blocked(self, task):
|
||||
# TODO: I'm not happy with this, I think blocking behavior should be decided outside of the dependency graph
|
||||
@@ -254,7 +285,7 @@ class TaskManager():
|
||||
for group in InstanceGroup.objects.all():
|
||||
if group.is_containerized or group.controller_id:
|
||||
continue
|
||||
match = group.fit_task_to_most_remaining_capacity_instance(task)
|
||||
match = group.fit_task_to_most_remaining_capacity_instance(task, group.instances.all())
|
||||
if match:
|
||||
break
|
||||
task.instance_group = rampart_group
|
||||
@@ -466,7 +497,6 @@ class TaskManager():
|
||||
continue
|
||||
preferred_instance_groups = task.preferred_instance_groups
|
||||
found_acceptable_queue = False
|
||||
idle_instance_that_fits = None
|
||||
if isinstance(task, WorkflowJob):
|
||||
if task.unified_job_template_id in running_workflow_templates:
|
||||
if not task.allow_simultaneous:
|
||||
@@ -483,24 +513,24 @@ class TaskManager():
|
||||
found_acceptable_queue = True
|
||||
break
|
||||
|
||||
if idle_instance_that_fits is None:
|
||||
idle_instance_that_fits = rampart_group.find_largest_idle_instance()
|
||||
remaining_capacity = self.get_remaining_capacity(rampart_group.name)
|
||||
if not rampart_group.is_containerized and self.get_remaining_capacity(rampart_group.name) <= 0:
|
||||
logger.debug("Skipping group {}, remaining_capacity {} <= 0".format(
|
||||
rampart_group.name, remaining_capacity))
|
||||
continue
|
||||
|
||||
execution_instance = rampart_group.fit_task_to_most_remaining_capacity_instance(task)
|
||||
if execution_instance:
|
||||
logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity))
|
||||
elif not execution_instance and idle_instance_that_fits:
|
||||
execution_instance = InstanceGroup.fit_task_to_most_remaining_capacity_instance(task, self.graph[rampart_group.name]['instances']) or \
|
||||
InstanceGroup.find_largest_idle_instance(self.graph[rampart_group.name]['instances'])
|
||||
|
||||
if execution_instance or rampart_group.is_containerized:
|
||||
if not rampart_group.is_containerized:
|
||||
execution_instance = idle_instance_that_fits
|
||||
execution_instance.remaining_capacity = max(0, execution_instance.remaining_capacity - task.task_impact)
|
||||
execution_instance.jobs_running += 1
|
||||
logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity))
|
||||
if execution_instance or rampart_group.is_containerized:
|
||||
|
||||
if execution_instance:
|
||||
execution_instance = self.real_instances[execution_instance.hostname]
|
||||
self.graph[rampart_group.name]['graph'].add_job(task)
|
||||
self.start_task(task, rampart_group, task.get_jobs_fail_chain(), execution_instance)
|
||||
found_acceptable_queue = True
|
||||
@@ -572,6 +602,9 @@ class TaskManager():
|
||||
def _schedule(self):
|
||||
finished_wfjs = []
|
||||
all_sorted_tasks = self.get_tasks()
|
||||
|
||||
self.after_lock_init()
|
||||
|
||||
if len(all_sorted_tasks) > 0:
|
||||
# TODO: Deal with
|
||||
# latest_project_updates = self.get_latest_project_update_tasks(all_sorted_tasks)
|
||||
|
||||
@@ -121,6 +121,27 @@ def sync_superuser_status_to_rbac(instance, **kwargs):
|
||||
Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR).members.remove(instance)
|
||||
|
||||
|
||||
def sync_rbac_to_superuser_status(instance, sender, **kwargs):
|
||||
'When the is_superuser flag is false but a user has the System Admin role, update the database to reflect that'
|
||||
if kwargs['action'] in ['post_add', 'post_remove', 'post_clear']:
|
||||
new_status_value = bool(kwargs['action'] == 'post_add')
|
||||
if hasattr(instance, 'singleton_name'): # duck typing, role.members.add() vs user.roles.add()
|
||||
role = instance
|
||||
if role.singleton_name == ROLE_SINGLETON_SYSTEM_ADMINISTRATOR:
|
||||
if kwargs['pk_set']:
|
||||
kwargs['model'].objects.filter(pk__in=kwargs['pk_set']).update(is_superuser=new_status_value)
|
||||
elif kwargs['action'] == 'post_clear':
|
||||
kwargs['model'].objects.all().update(is_superuser=False)
|
||||
else:
|
||||
user = instance
|
||||
if kwargs['action'] == 'post_clear':
|
||||
user.is_superuser = False
|
||||
user.save(update_fields=['is_superuser'])
|
||||
elif kwargs['model'].objects.filter(pk__in=kwargs['pk_set'], singleton_name=ROLE_SINGLETON_SYSTEM_ADMINISTRATOR).exists():
|
||||
user.is_superuser = new_status_value
|
||||
user.save(update_fields=['is_superuser'])
|
||||
|
||||
|
||||
def rbac_activity_stream(instance, sender, **kwargs):
|
||||
# Only if we are associating/disassociating
|
||||
if kwargs['action'] in ['pre_add', 'pre_remove']:
|
||||
@@ -197,6 +218,7 @@ m2m_changed.connect(rebuild_role_ancestor_list, Role.parents.through)
|
||||
m2m_changed.connect(rbac_activity_stream, Role.members.through)
|
||||
m2m_changed.connect(rbac_activity_stream, Role.parents.through)
|
||||
post_save.connect(sync_superuser_status_to_rbac, sender=User)
|
||||
m2m_changed.connect(sync_rbac_to_superuser_status, Role.members.through)
|
||||
pre_delete.connect(cleanup_detached_labels_on_deleted_parent, sender=UnifiedJob)
|
||||
pre_delete.connect(cleanup_detached_labels_on_deleted_parent, sender=UnifiedJobTemplate)
|
||||
|
||||
|
||||
@@ -23,7 +23,6 @@ import fcntl
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
import urllib.parse as urlparse
|
||||
import shlex
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -61,10 +60,10 @@ from awx.main.models import (
|
||||
Inventory, InventorySource, SmartInventoryMembership,
|
||||
Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob,
|
||||
JobEvent, ProjectUpdateEvent, InventoryUpdateEvent, AdHocCommandEvent, SystemJobEvent,
|
||||
build_safe_env, enforce_bigint_pk_migration
|
||||
build_safe_env
|
||||
)
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.exceptions import AwxTaskError
|
||||
from awx.main.exceptions import AwxTaskError, PostRunError
|
||||
from awx.main.queue import CallbackQueueDispatcher
|
||||
from awx.main.isolated import manager as isolated_manager
|
||||
from awx.main.dispatch.publish import task
|
||||
@@ -79,6 +78,7 @@ from awx.main.utils.external_logging import reconfigure_rsyslog
|
||||
from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja
|
||||
from awx.main.utils.reload import stop_local_services
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
from awx.main.utils.handlers import SpecialInventoryHandler
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
from awx.main import analytics
|
||||
from awx.conf import settings_registry
|
||||
@@ -138,12 +138,6 @@ def dispatch_startup():
|
||||
if Instance.objects.me().is_controller():
|
||||
awx_isolated_heartbeat()
|
||||
|
||||
# at process startup, detect the need to migrate old event records from int
|
||||
# to bigint; at *some point* in the future, once certain versions of AWX
|
||||
# and Tower fall out of use/support, we can probably just _assume_ that
|
||||
# everybody has moved to bigint, and remove this code entirely
|
||||
enforce_bigint_pk_migration()
|
||||
|
||||
# Update Tower's rsyslog.conf file based on loggins settings in the db
|
||||
reconfigure_rsyslog()
|
||||
|
||||
@@ -313,7 +307,7 @@ def delete_project_files(project_path):
|
||||
|
||||
@task(queue='tower_broadcast_all')
|
||||
def profile_sql(threshold=1, minutes=1):
|
||||
if threshold == 0:
|
||||
if threshold <= 0:
|
||||
cache.delete('awx-profile-sql-threshold')
|
||||
logger.error('SQL PROFILING DISABLED')
|
||||
else:
|
||||
@@ -378,6 +372,7 @@ def gather_analytics():
|
||||
|
||||
from awx.conf.models import Setting
|
||||
from rest_framework.fields import DateTimeField
|
||||
from awx.main.signals import disable_activity_stream
|
||||
if not settings.INSIGHTS_TRACKING_STATE:
|
||||
return
|
||||
if not (settings.AUTOMATION_ANALYTICS_URL and settings.REDHAT_USERNAME and settings.REDHAT_PASSWORD):
|
||||
@@ -414,7 +409,8 @@ def gather_analytics():
|
||||
if not _gather_and_ship(incremental_collectors, since=start, until=until):
|
||||
break
|
||||
start = until
|
||||
settings.AUTOMATION_ANALYTICS_LAST_GATHER = until
|
||||
with disable_activity_stream():
|
||||
settings.AUTOMATION_ANALYTICS_LAST_GATHER = until
|
||||
if subset:
|
||||
_gather_and_ship(subset, since=since, until=gather_time)
|
||||
|
||||
@@ -736,6 +732,12 @@ def update_host_smart_inventory_memberships():
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
def migrate_legacy_event_data(tblname):
|
||||
#
|
||||
# NOTE: this function is not actually in use anymore,
|
||||
# but has been intentionally kept for historical purposes,
|
||||
# and to serve as an illustration if we ever need to perform
|
||||
# bulk modification/migration of event data in the future.
|
||||
#
|
||||
if 'event' not in tblname:
|
||||
return
|
||||
with advisory_lock(f'bigint_migration_{tblname}', wait=False) as acquired:
|
||||
@@ -1225,6 +1227,13 @@ class BaseTask(object):
|
||||
Ansible runner puts a parent_uuid on each event, no matter what the type.
|
||||
AWX only saves the parent_uuid if the event is for a Job.
|
||||
'''
|
||||
# cache end_line locally for RunInventoryUpdate tasks
|
||||
# which generate job events from two 'streams':
|
||||
# ansible-inventory and the awx.main.commands.inventory_import
|
||||
# logger
|
||||
if isinstance(self, RunInventoryUpdate):
|
||||
self.end_line = event_data['end_line']
|
||||
|
||||
if event_data.get(self.event_data_key, None):
|
||||
if self.event_data_key != 'job_id':
|
||||
event_data.pop('parent_uuid', None)
|
||||
@@ -1253,7 +1262,7 @@ class BaseTask(object):
|
||||
# so it *should* have a negligible performance impact
|
||||
task = event_data.get('event_data', {}).get('task_action')
|
||||
try:
|
||||
if task in ('git', 'hg', 'svn'):
|
||||
if task in ('git', 'svn'):
|
||||
event_data_json = json.dumps(event_data)
|
||||
event_data_json = UriCleaner.remove_sensitive(event_data_json)
|
||||
event_data = json.loads(event_data_json)
|
||||
@@ -1521,6 +1530,12 @@ class BaseTask(object):
|
||||
|
||||
try:
|
||||
self.post_run_hook(self.instance, status)
|
||||
except PostRunError as exc:
|
||||
if status == 'successful':
|
||||
status = exc.status
|
||||
extra_update_fields['job_explanation'] = exc.args[0]
|
||||
if exc.tb:
|
||||
extra_update_fields['result_traceback'] = exc.tb
|
||||
except Exception:
|
||||
logger.exception('{} Post run hook errored.'.format(self.instance.log_format))
|
||||
|
||||
@@ -2141,7 +2156,7 @@ class RunProjectUpdate(BaseTask):
|
||||
elif not scm_branch:
|
||||
raise RuntimeError('Could not determine a revision to run from project.')
|
||||
elif not scm_branch:
|
||||
scm_branch = {'hg': 'tip'}.get(project_update.scm_type, 'HEAD')
|
||||
scm_branch = 'HEAD'
|
||||
|
||||
galaxy_creds_are_defined = (
|
||||
project_update.project.organization and
|
||||
@@ -2150,7 +2165,7 @@ class RunProjectUpdate(BaseTask):
|
||||
if not galaxy_creds_are_defined and (
|
||||
settings.AWX_ROLES_ENABLED or settings.AWX_COLLECTIONS_ENABLED
|
||||
):
|
||||
logger.debug(
|
||||
logger.warning(
|
||||
'Galaxy role/collection syncing is enabled, but no '
|
||||
f'credentials are configured for {project_update.project.organization}.'
|
||||
)
|
||||
@@ -2160,7 +2175,7 @@ class RunProjectUpdate(BaseTask):
|
||||
'local_path': os.path.basename(project_update.project.local_path),
|
||||
'project_path': project_update.get_project_path(check_if_exists=False), # deprecated
|
||||
'insights_url': settings.INSIGHTS_URL_BASE,
|
||||
'awx_license_type': get_license(show_key=False).get('license_type', 'UNLICENSED'),
|
||||
'awx_license_type': get_license().get('license_type', 'UNLICENSED'),
|
||||
'awx_version': get_awx_version(),
|
||||
'scm_url': scm_url,
|
||||
'scm_branch': scm_branch,
|
||||
@@ -2417,9 +2432,10 @@ class RunProjectUpdate(BaseTask):
|
||||
shutil.rmtree(stage_path) # cannot trust content update produced
|
||||
|
||||
if self.job_private_data_dir:
|
||||
# copy project folder before resetting to default branch
|
||||
# because some git-tree-specific resources (like submodules) might matter
|
||||
self.make_local_copy(instance, self.job_private_data_dir)
|
||||
if status == 'successful':
|
||||
# copy project folder before resetting to default branch
|
||||
# because some git-tree-specific resources (like submodules) might matter
|
||||
self.make_local_copy(instance, self.job_private_data_dir)
|
||||
if self.original_branch:
|
||||
# for git project syncs, non-default branches can be problems
|
||||
# restore to branch the repo was on before this run
|
||||
@@ -2461,6 +2477,14 @@ class RunInventoryUpdate(BaseTask):
|
||||
event_model = InventoryUpdateEvent
|
||||
event_data_key = 'inventory_update_id'
|
||||
|
||||
# TODO: remove once inv updates run in containers
|
||||
def should_use_proot(self, inventory_update):
|
||||
'''
|
||||
Return whether this task should use proot.
|
||||
'''
|
||||
return getattr(settings, 'AWX_PROOT_ENABLED', False)
|
||||
|
||||
# TODO: remove once inv updates run in containers
|
||||
@property
|
||||
def proot_show_paths(self):
|
||||
return [settings.AWX_ANSIBLE_COLLECTIONS_PATHS]
|
||||
@@ -2485,15 +2509,11 @@ class RunInventoryUpdate(BaseTask):
|
||||
return injector.build_private_data(inventory_update, private_data_dir)
|
||||
|
||||
def build_env(self, inventory_update, private_data_dir, isolated, private_data_files=None):
|
||||
"""Build environment dictionary for inventory import.
|
||||
"""Build environment dictionary for ansible-inventory.
|
||||
|
||||
This used to be the mechanism by which any data that needs to be passed
|
||||
to the inventory update script is set up. In particular, this is how
|
||||
inventory update is aware of its proper credentials.
|
||||
|
||||
Most environment injection is now accomplished by the credential
|
||||
injectors. The primary purpose this still serves is to
|
||||
still point to the inventory update INI or config file.
|
||||
Most environment variables related to credentials or configuration
|
||||
are accomplished by the inventory source injectors (in this method)
|
||||
or custom credential type injectors (in main run method).
|
||||
"""
|
||||
env = super(RunInventoryUpdate, self).build_env(inventory_update,
|
||||
private_data_dir,
|
||||
@@ -2501,8 +2521,11 @@ class RunInventoryUpdate(BaseTask):
|
||||
private_data_files=private_data_files)
|
||||
if private_data_files is None:
|
||||
private_data_files = {}
|
||||
self.add_awx_venv(env)
|
||||
# Pass inventory source ID to inventory script.
|
||||
# TODO: remove once containers replace custom venvs
|
||||
self.add_ansible_venv(inventory_update.ansible_virtualenv_path, env, isolated=isolated)
|
||||
|
||||
# Legacy environment variables, were used as signal to awx-manage command
|
||||
# now they are provided in case some scripts may be relying on them
|
||||
env['INVENTORY_SOURCE_ID'] = str(inventory_update.inventory_source_id)
|
||||
env['INVENTORY_UPDATE_ID'] = str(inventory_update.pk)
|
||||
env.update(STANDARD_INVENTORY_UPDATE_ENV)
|
||||
@@ -2565,47 +2588,25 @@ class RunInventoryUpdate(BaseTask):
|
||||
if inventory is None:
|
||||
raise RuntimeError('Inventory Source is not associated with an Inventory.')
|
||||
|
||||
# Piece together the initial command to run via. the shell.
|
||||
args = ['awx-manage', 'inventory_import']
|
||||
args.extend(['--inventory-id', str(inventory.pk)])
|
||||
args = ['ansible-inventory', '--list', '--export']
|
||||
|
||||
# Add appropriate arguments for overwrite if the inventory_update
|
||||
# object calls for it.
|
||||
if inventory_update.overwrite:
|
||||
args.append('--overwrite')
|
||||
if inventory_update.overwrite_vars:
|
||||
args.append('--overwrite-vars')
|
||||
# Add arguments for the source inventory file/script/thing
|
||||
source_location = self.pseudo_build_inventory(inventory_update, private_data_dir)
|
||||
args.append('-i')
|
||||
args.append(source_location)
|
||||
|
||||
# Declare the virtualenv the management command should activate
|
||||
# as it calls ansible-inventory
|
||||
args.extend(['--venv', inventory_update.ansible_virtualenv_path])
|
||||
args.append('--output')
|
||||
args.append(os.path.join(private_data_dir, 'artifacts', 'output.json'))
|
||||
|
||||
src = inventory_update.source
|
||||
if inventory_update.enabled_var:
|
||||
args.extend(['--enabled-var', shlex.quote(inventory_update.enabled_var)])
|
||||
args.extend(['--enabled-value', shlex.quote(inventory_update.enabled_value)])
|
||||
if os.path.isdir(source_location):
|
||||
playbook_dir = source_location
|
||||
else:
|
||||
if getattr(settings, '%s_ENABLED_VAR' % src.upper(), False):
|
||||
args.extend(['--enabled-var',
|
||||
getattr(settings, '%s_ENABLED_VAR' % src.upper())])
|
||||
if getattr(settings, '%s_ENABLED_VALUE' % src.upper(), False):
|
||||
args.extend(['--enabled-value',
|
||||
getattr(settings, '%s_ENABLED_VALUE' % src.upper())])
|
||||
if inventory_update.host_filter:
|
||||
args.extend(['--host-filter', shlex.quote(inventory_update.host_filter)])
|
||||
if getattr(settings, '%s_EXCLUDE_EMPTY_GROUPS' % src.upper()):
|
||||
args.append('--exclude-empty-groups')
|
||||
if getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper(), False):
|
||||
args.extend(['--instance-id-var',
|
||||
"'{}'".format(getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper())),])
|
||||
# Add arguments for the source inventory script
|
||||
args.append('--source')
|
||||
args.append(self.pseudo_build_inventory(inventory_update, private_data_dir))
|
||||
if src == 'custom':
|
||||
args.append("--custom")
|
||||
args.append('-v%d' % inventory_update.verbosity)
|
||||
if settings.DEBUG:
|
||||
args.append('--traceback')
|
||||
playbook_dir = os.path.dirname(source_location)
|
||||
args.extend(['--playbook-dir', playbook_dir])
|
||||
|
||||
if inventory_update.verbosity:
|
||||
args.append('-' + 'v' * min(5, inventory_update.verbosity * 2 + 1))
|
||||
|
||||
return args
|
||||
|
||||
def build_inventory(self, inventory_update, private_data_dir):
|
||||
@@ -2645,11 +2646,9 @@ class RunInventoryUpdate(BaseTask):
|
||||
|
||||
def build_cwd(self, inventory_update, private_data_dir):
|
||||
'''
|
||||
There are two cases where the inventory "source" is in a different
|
||||
There is one case where the inventory "source" is in a different
|
||||
location from the private data:
|
||||
- deprecated vendored inventory scripts in awx/plugins/inventory
|
||||
- SCM, where source needs to live in the project folder
|
||||
in these cases, the inventory does not exist in the standard tempdir
|
||||
'''
|
||||
src = inventory_update.source
|
||||
if src == 'scm' and inventory_update.source_project_update:
|
||||
@@ -2707,6 +2706,75 @@ class RunInventoryUpdate(BaseTask):
|
||||
# This follows update, not sync, so make copy here
|
||||
RunProjectUpdate.make_local_copy(source_project, private_data_dir)
|
||||
|
||||
def post_run_hook(self, inventory_update, status):
|
||||
if status != 'successful':
|
||||
return # nothing to save, step out of the way to allow error reporting
|
||||
|
||||
private_data_dir = inventory_update.job_env['AWX_PRIVATE_DATA_DIR']
|
||||
expected_output = os.path.join(private_data_dir, 'artifacts', 'output.json')
|
||||
with open(expected_output) as f:
|
||||
data = json.load(f)
|
||||
|
||||
# build inventory save options
|
||||
options = dict(
|
||||
overwrite=inventory_update.overwrite,
|
||||
overwrite_vars=inventory_update.overwrite_vars,
|
||||
)
|
||||
src = inventory_update.source
|
||||
|
||||
if inventory_update.enabled_var:
|
||||
options['enabled_var'] = inventory_update.enabled_var
|
||||
options['enabled_value'] = inventory_update.enabled_value
|
||||
else:
|
||||
if getattr(settings, '%s_ENABLED_VAR' % src.upper(), False):
|
||||
options['enabled_var'] = getattr(settings, '%s_ENABLED_VAR' % src.upper())
|
||||
if getattr(settings, '%s_ENABLED_VALUE' % src.upper(), False):
|
||||
options['enabled_value'] = getattr(settings, '%s_ENABLED_VALUE' % src.upper())
|
||||
|
||||
if inventory_update.host_filter:
|
||||
options['host_filter'] = inventory_update.host_filter
|
||||
|
||||
if getattr(settings, '%s_EXCLUDE_EMPTY_GROUPS' % src.upper()):
|
||||
options['exclude_empty_groups'] = True
|
||||
if getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper(), False):
|
||||
options['instance_id_var'] = getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper())
|
||||
|
||||
# Verbosity is applied to saving process, as well as ansible-inventory CLI option
|
||||
if inventory_update.verbosity:
|
||||
options['verbosity'] = inventory_update.verbosity
|
||||
|
||||
handler = SpecialInventoryHandler(
|
||||
self.event_handler, self.cancel_callback,
|
||||
verbosity=inventory_update.verbosity,
|
||||
job_timeout=self.get_instance_timeout(self.instance),
|
||||
start_time=inventory_update.started,
|
||||
counter=self.event_ct, initial_line=self.end_line
|
||||
)
|
||||
inv_logger = logging.getLogger('awx.main.commands.inventory_import')
|
||||
formatter = inv_logger.handlers[0].formatter
|
||||
formatter.job_start = inventory_update.started
|
||||
handler.formatter = formatter
|
||||
inv_logger.handlers[0] = handler
|
||||
|
||||
from awx.main.management.commands.inventory_import import Command as InventoryImportCommand
|
||||
cmd = InventoryImportCommand()
|
||||
try:
|
||||
# save the inventory data to database.
|
||||
# canceling exceptions will be handled in the global post_run_hook
|
||||
cmd.perform_update(options, data, inventory_update)
|
||||
except PermissionDenied as exc:
|
||||
logger.exception('License error saving {} content'.format(inventory_update.log_format))
|
||||
raise PostRunError(str(exc), status='error')
|
||||
except PostRunError:
|
||||
logger.exception('Error saving {} content, rolling back changes'.format(inventory_update.log_format))
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception('Exception saving {} content, rolling back changes.'.format(
|
||||
inventory_update.log_format))
|
||||
raise PostRunError(
|
||||
'Error occured while saving inventory data, see traceback or server logs',
|
||||
status='error', tb=traceback.format_exc())
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
class RunAdHocCommand(BaseTask):
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
import pytest
|
||||
import random
|
||||
|
||||
from awx.main.models import Project
|
||||
from awx.main.analytics import collectors
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty():
|
||||
assert collectors.projects_by_scm_type(None) == {
|
||||
'manual': 0,
|
||||
'git': 0,
|
||||
'svn': 0,
|
||||
'hg': 0,
|
||||
'insights': 0,
|
||||
'archive': 0,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('scm_type', [t[0] for t in Project.SCM_TYPE_CHOICES])
|
||||
def test_multiple(scm_type):
|
||||
expected = {
|
||||
'manual': 0,
|
||||
'git': 0,
|
||||
'svn': 0,
|
||||
'hg': 0,
|
||||
'insights': 0,
|
||||
'archive': 0,
|
||||
}
|
||||
for i in range(random.randint(0, 10)):
|
||||
Project(scm_type=scm_type).save()
|
||||
expected[scm_type or 'manual'] += 1
|
||||
assert collectors.projects_by_scm_type(None) == expected
|
||||
@@ -675,33 +675,6 @@ def test_net_create_ok(post, organization, admin):
|
||||
assert cred.inputs['authorize'] is True
|
||||
|
||||
|
||||
#
|
||||
# Cloudforms Credentials
|
||||
#
|
||||
@pytest.mark.django_db
|
||||
def test_cloudforms_create_ok(post, organization, admin):
|
||||
params = {
|
||||
'credential_type': 1,
|
||||
'name': 'Best credential ever',
|
||||
'inputs': {
|
||||
'host': 'some_host',
|
||||
'username': 'some_username',
|
||||
'password': 'some_password',
|
||||
}
|
||||
}
|
||||
cloudforms = CredentialType.defaults['cloudforms']()
|
||||
cloudforms.save()
|
||||
params['organization'] = organization.id
|
||||
response = post(reverse('api:credential_list'), params, admin)
|
||||
assert response.status_code == 201
|
||||
|
||||
assert Credential.objects.count() == 1
|
||||
cred = Credential.objects.all()[:1].get()
|
||||
assert cred.inputs['host'] == 'some_host'
|
||||
assert cred.inputs['username'] == 'some_username'
|
||||
assert decrypt_field(cred, 'password') == 'some_password'
|
||||
|
||||
|
||||
#
|
||||
# GCE Credentials
|
||||
#
|
||||
|
||||
@@ -2,7 +2,7 @@ import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
from awx.main.models import Project
|
||||
from awx.main.models import Project, Host
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -81,6 +81,8 @@ def test_org_counts_detail_admin(resourced_organization, user, get):
|
||||
assert response.status_code == 200
|
||||
|
||||
counts = response.data['summary_fields']['related_field_counts']
|
||||
assert counts['hosts'] == 0
|
||||
counts.pop('hosts')
|
||||
assert counts == COUNTS_PRIMES
|
||||
|
||||
|
||||
@@ -93,6 +95,8 @@ def test_org_counts_detail_member(resourced_organization, user, get):
|
||||
assert response.status_code == 200
|
||||
|
||||
counts = response.data['summary_fields']['related_field_counts']
|
||||
assert counts['hosts'] == 0
|
||||
counts.pop('hosts')
|
||||
assert counts == {
|
||||
'users': COUNTS_PRIMES['users'], # Policy is that members can see other users and admins
|
||||
'admins': COUNTS_PRIMES['admins'],
|
||||
@@ -111,6 +115,7 @@ def test_org_counts_list_admin(resourced_organization, user, get):
|
||||
assert response.status_code == 200
|
||||
|
||||
counts = response.data['results'][0]['summary_fields']['related_field_counts']
|
||||
assert 'hosts' not in counts # doesn't show in list view
|
||||
assert counts == COUNTS_PRIMES
|
||||
|
||||
|
||||
@@ -123,6 +128,7 @@ def test_org_counts_list_member(resourced_organization, user, get):
|
||||
assert response.status_code == 200
|
||||
|
||||
counts = response.data['results'][0]['summary_fields']['related_field_counts']
|
||||
assert 'hosts' not in counts # doesn't show in list view
|
||||
|
||||
assert counts == {
|
||||
'users': COUNTS_PRIMES['users'], # Policy is that members can see other users and admins
|
||||
@@ -145,6 +151,7 @@ def test_new_org_zero_counts(user, post):
|
||||
|
||||
new_org_list = post_response.render().data
|
||||
counts_dict = new_org_list['summary_fields']['related_field_counts']
|
||||
assert 'hosts' not in counts_dict # doesn't show in list view
|
||||
assert counts_dict == COUNTS_ZEROS
|
||||
|
||||
|
||||
@@ -167,6 +174,19 @@ def test_two_organizations(resourced_organization, organizations, user, get):
|
||||
assert counts[org_id_zero] == COUNTS_ZEROS
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_hosts_counted(resourced_organization, user, get):
|
||||
admin_user = user('admin', True)
|
||||
assert Host.objects.org_active_count(resourced_organization.id) == 0
|
||||
resourced_organization.inventories.first().hosts.create(name='Some Host')
|
||||
assert Host.objects.org_active_count(resourced_organization.id) == 1
|
||||
response = get(reverse('api:organization_detail', kwargs={'pk': resourced_organization.pk}), admin_user)
|
||||
assert response.status_code == 200
|
||||
|
||||
counts = response.data['summary_fields']['related_field_counts']
|
||||
assert counts['hosts'] == Host.objects.org_active_count(resourced_organization.id) == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_scan_JT_counted(resourced_organization, user, get):
|
||||
admin_user = user('admin', True)
|
||||
@@ -180,7 +200,10 @@ def test_scan_JT_counted(resourced_organization, user, get):
|
||||
# Test detail view
|
||||
detail_response = get(reverse('api:organization_detail', kwargs={'pk': resourced_organization.pk}), admin_user)
|
||||
assert detail_response.status_code == 200
|
||||
assert detail_response.data['summary_fields']['related_field_counts'] == counts_dict
|
||||
counts = detail_response.data['summary_fields']['related_field_counts']
|
||||
assert 'hosts' in counts
|
||||
counts.pop('hosts')
|
||||
assert counts == counts_dict
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -205,4 +228,7 @@ def test_JT_not_double_counted(resourced_organization, user, get):
|
||||
# Test detail view
|
||||
detail_response = get(reverse('api:organization_detail', kwargs={'pk': resourced_organization.pk}), admin_user)
|
||||
assert detail_response.status_code == 200
|
||||
assert detail_response.data['summary_fields']['related_field_counts'] == counts_dict
|
||||
counts = detail_response.data['summary_fields']['related_field_counts']
|
||||
assert 'hosts' in counts
|
||||
counts.pop('hosts')
|
||||
assert counts == counts_dict
|
||||
|
||||
@@ -99,3 +99,12 @@ def test_changing_overwrite_behavior_okay_if_not_used(post, patch, organization,
|
||||
expect=200
|
||||
)
|
||||
assert Project.objects.get(pk=r1.data['id']).allow_override is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_scm_project_local_path_invalid(get, patch, project, admin):
|
||||
url = reverse('api:project_detail', kwargs={'pk': project.id})
|
||||
resp = patch(url, {'local_path': '/foo/bar'}, user=admin, expect=400)
|
||||
assert resp.data['local_path'] == [
|
||||
'Cannot change local_path for git-based projects'
|
||||
]
|
||||
|
||||
@@ -282,10 +282,6 @@ def test_prefetch_ujt_project_capabilities(alice, project, job_template, mocker)
|
||||
list_serializer.child.to_representation(project)
|
||||
assert 'capability_map' not in list_serializer.child.context
|
||||
|
||||
# Models for which the prefetch is valid for do
|
||||
list_serializer.child.to_representation(job_template)
|
||||
assert set(list_serializer.child.context['capability_map'][job_template.id].keys()) == set(('copy', 'edit', 'start'))
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_prefetch_group_capabilities(group, rando):
|
||||
|
||||
@@ -349,7 +349,7 @@ def test_months_with_31_days(post, admin_user):
|
||||
('MINUTELY', 1, 60),
|
||||
('MINUTELY', 15, 15 * 60),
|
||||
('HOURLY', 1, 3600),
|
||||
('HOURLY', 4, 3600 * 4),
|
||||
('HOURLY', 2, 3600 * 2),
|
||||
))
|
||||
def test_really_old_dtstart(post, admin_user, freq, delta, total_seconds):
|
||||
url = reverse('api:schedule_rrule')
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -89,7 +89,7 @@ class TestApprovalNodes():
|
||||
url = reverse('api:workflow_job_template_node_create_approval',
|
||||
kwargs={'pk': approval_node.pk, 'version': 'v2'})
|
||||
post(url, {'name': 'Test', 'description': 'Approval Node', 'timeout': 0},
|
||||
user=admin_user, expect=200)
|
||||
user=admin_user, expect=201)
|
||||
|
||||
approval_node = WorkflowJobTemplateNode.objects.get(pk=approval_node.pk)
|
||||
assert isinstance(approval_node.unified_job_template, WorkflowApprovalTemplate)
|
||||
@@ -108,9 +108,9 @@ class TestApprovalNodes():
|
||||
assert {'name': ['This field may not be blank.']} == json.loads(r.content)
|
||||
|
||||
@pytest.mark.parametrize("is_admin, is_org_admin, status", [
|
||||
[True, False, 200], # if they're a WFJT admin, they get a 200
|
||||
[True, False, 201], # if they're a WFJT admin, they get a 201
|
||||
[False, False, 403], # if they're not a WFJT *nor* org admin, they get a 403
|
||||
[False, True, 200], # if they're an organization admin, they get a 200
|
||||
[False, True, 201], # if they're an organization admin, they get a 201
|
||||
])
|
||||
def test_approval_node_creation_rbac(self, post, approval_node, alice, is_admin, is_org_admin, status):
|
||||
url = reverse('api:workflow_job_template_node_create_approval',
|
||||
@@ -165,7 +165,7 @@ class TestApprovalNodes():
|
||||
url = reverse('api:workflow_job_template_node_create_approval',
|
||||
kwargs={'pk': node.pk, 'version': 'v2'})
|
||||
post(url, {'name': 'Approve Test', 'description': '', 'timeout': 0},
|
||||
user=admin_user, expect=200)
|
||||
user=admin_user, expect=201)
|
||||
post(reverse('api:workflow_job_template_launch', kwargs={'pk': wfjt.pk}),
|
||||
user=admin_user, expect=201)
|
||||
wf_job = WorkflowJob.objects.first()
|
||||
@@ -195,7 +195,7 @@ class TestApprovalNodes():
|
||||
url = reverse('api:workflow_job_template_node_create_approval',
|
||||
kwargs={'pk': node.pk, 'version': 'v2'})
|
||||
post(url, {'name': 'Deny Test', 'description': '', 'timeout': 0},
|
||||
user=admin_user, expect=200)
|
||||
user=admin_user, expect=201)
|
||||
post(reverse('api:workflow_job_template_launch', kwargs={'pk': wfjt.pk}),
|
||||
user=admin_user, expect=201)
|
||||
wf_job = WorkflowJob.objects.first()
|
||||
|
||||
@@ -6,7 +6,7 @@ from collections import OrderedDict
|
||||
from django.db.models.deletion import Collector, SET_NULL, CASCADE
|
||||
from django.core.management import call_command
|
||||
|
||||
from awx.main.management.commands.deletion import AWXCollector
|
||||
from awx.main.utils.deletion import AWXCollector
|
||||
from awx.main.models import (
|
||||
JobTemplate, User, Job, JobEvent, Notification,
|
||||
WorkflowJobNode, JobHostSummary
|
||||
|
||||
@@ -9,6 +9,9 @@ import os
|
||||
# Django
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
# for license errors
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
# AWX
|
||||
from awx.main.management.commands import inventory_import
|
||||
from awx.main.models import Inventory, Host, Group, InventorySource
|
||||
@@ -83,7 +86,7 @@ class MockLoader:
|
||||
return self._data
|
||||
|
||||
|
||||
def mock_logging(self):
|
||||
def mock_logging(self, level):
|
||||
pass
|
||||
|
||||
|
||||
@@ -322,6 +325,6 @@ def test_tower_version_compare():
|
||||
"version": "2.0.1-1068-g09684e2c41"
|
||||
}
|
||||
}
|
||||
with pytest.raises(CommandError):
|
||||
with pytest.raises(PermissionDenied):
|
||||
cmd.remote_tower_license_compare('very_supported')
|
||||
cmd.remote_tower_license_compare('open')
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from awx.main.utils.common import StubLicense
|
||||
|
||||
|
||||
def test_stub_license():
|
||||
license_actual = StubLicense().validate()
|
||||
assert license_actual['license_key'] == 'OPEN'
|
||||
assert license_actual['valid_key']
|
||||
assert license_actual['compliant']
|
||||
assert license_actual['license_type'] == 'open'
|
||||
|
||||
@@ -16,7 +16,7 @@ def test_awx_virtualenv_from_settings(inventory, project, machine_credential):
|
||||
)
|
||||
jt.credentials.add(machine_credential)
|
||||
job = jt.create_unified_job()
|
||||
assert job.ansible_virtualenv_path == '/venv/ansible'
|
||||
assert job.ansible_virtualenv_path == '/var/lib/awx/venv/ansible'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -43,28 +43,28 @@ def test_awx_custom_virtualenv(inventory, project, machine_credential, organizat
|
||||
jt.credentials.add(machine_credential)
|
||||
job = jt.create_unified_job()
|
||||
|
||||
job.organization.custom_virtualenv = '/venv/fancy-org'
|
||||
job.organization.custom_virtualenv = '/var/lib/awx/venv/fancy-org'
|
||||
job.organization.save()
|
||||
assert job.ansible_virtualenv_path == '/venv/fancy-org'
|
||||
assert job.ansible_virtualenv_path == '/var/lib/awx/venv/fancy-org'
|
||||
|
||||
job.project.custom_virtualenv = '/venv/fancy-proj'
|
||||
job.project.custom_virtualenv = '/var/lib/awx/venv/fancy-proj'
|
||||
job.project.save()
|
||||
assert job.ansible_virtualenv_path == '/venv/fancy-proj'
|
||||
assert job.ansible_virtualenv_path == '/var/lib/awx/venv/fancy-proj'
|
||||
|
||||
job.job_template.custom_virtualenv = '/venv/fancy-jt'
|
||||
job.job_template.custom_virtualenv = '/var/lib/awx/venv/fancy-jt'
|
||||
job.job_template.save()
|
||||
assert job.ansible_virtualenv_path == '/venv/fancy-jt'
|
||||
assert job.ansible_virtualenv_path == '/var/lib/awx/venv/fancy-jt'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_awx_custom_virtualenv_without_jt(project):
|
||||
project.custom_virtualenv = '/venv/fancy-proj'
|
||||
project.custom_virtualenv = '/var/lib/awx/venv/fancy-proj'
|
||||
project.save()
|
||||
job = Job(project=project)
|
||||
job.save()
|
||||
|
||||
job = Job.objects.get(pk=job.id)
|
||||
assert job.ansible_virtualenv_path == '/venv/fancy-proj'
|
||||
assert job.ansible_virtualenv_path == '/var/lib/awx/venv/fancy-proj'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -123,6 +123,15 @@ class TestJobNotificationMixin(object):
|
||||
context = job.context(job_serialization)
|
||||
check_structure(TestJobNotificationMixin.CONTEXT_STRUCTURE, context)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_context_job_metadata_with_unicode(self):
|
||||
job = Job.objects.create(name='批量安装项目')
|
||||
job_serialization = UnifiedJobSerializer(job).to_representation(job)
|
||||
context = job.context(job_serialization)
|
||||
assert '批量安装项目' in context['job_metadata']
|
||||
|
||||
|
||||
def test_context_stub(self):
|
||||
"""The context stub is a fake context used to validate custom notification messages. Ensure that
|
||||
this also has the expected structure. Furthermore, ensure that the stub context contains
|
||||
|
||||
@@ -79,7 +79,6 @@ def test_default_cred_types():
|
||||
'aws',
|
||||
'azure_kv',
|
||||
'azure_rm',
|
||||
'cloudforms',
|
||||
'conjur',
|
||||
'galaxy_api_token',
|
||||
'gce',
|
||||
|
||||
@@ -214,6 +214,9 @@ def test_inventory_update_injected_content(this_kind, inventory, fake_credential
|
||||
f"'{inventory_filename}' file not found in inventory update runtime files {content.keys()}"
|
||||
|
||||
env.pop('ANSIBLE_COLLECTIONS_PATHS', None) # collection paths not relevant to this test
|
||||
env.pop('PYTHONPATH')
|
||||
env.pop('VIRTUAL_ENV')
|
||||
env.pop('PROOT_TMP_DIR')
|
||||
base_dir = os.path.join(DATA, 'plugins')
|
||||
if not os.path.exists(base_dir):
|
||||
os.mkdir(base_dir)
|
||||
|
||||
@@ -5,7 +5,7 @@ from awx.main.migrations import _inventory_source as invsrc
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
from awx.main.models import InventorySource
|
||||
from awx.main.models import InventorySource, InventoryUpdate, ManagedCredentialType, CredentialType, Credential
|
||||
|
||||
|
||||
@pytest.mark.parametrize('vars,id_var,result', [
|
||||
@@ -42,16 +42,40 @@ def test_apply_new_instance_id(inventory_source):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_replacement_scm_sources(inventory):
|
||||
inv_source = InventorySource.objects.create(
|
||||
name='test',
|
||||
inventory=inventory,
|
||||
organization=inventory.organization,
|
||||
source='ec2'
|
||||
def test_cloudforms_inventory_removal(inventory):
|
||||
ManagedCredentialType(
|
||||
name='Red Hat CloudForms',
|
||||
namespace='cloudforms',
|
||||
kind='cloud',
|
||||
managed_by_tower=True,
|
||||
inputs={},
|
||||
)
|
||||
invsrc.create_scm_script_substitute(apps, 'ec2')
|
||||
inv_source.refresh_from_db()
|
||||
assert inv_source.source == 'scm'
|
||||
assert inv_source.source_project
|
||||
project = inv_source.source_project
|
||||
assert 'Replacement project for' in project.name
|
||||
CredentialType.defaults['cloudforms']().save()
|
||||
cloudforms = CredentialType.objects.get(namespace='cloudforms')
|
||||
Credential.objects.create(
|
||||
name='test',
|
||||
credential_type=cloudforms,
|
||||
)
|
||||
|
||||
for source in ('ec2', 'cloudforms'):
|
||||
i = InventorySource.objects.create(
|
||||
name='test',
|
||||
inventory=inventory,
|
||||
organization=inventory.organization,
|
||||
source=source,
|
||||
)
|
||||
InventoryUpdate.objects.create(
|
||||
name='test update',
|
||||
inventory_source=i,
|
||||
source=source,
|
||||
)
|
||||
assert Credential.objects.count() == 1
|
||||
assert InventorySource.objects.count() == 2 # ec2 + cf
|
||||
assert InventoryUpdate.objects.count() == 2 # ec2 + cf
|
||||
invsrc.delete_cloudforms_inv_source(apps, None)
|
||||
assert InventorySource.objects.count() == 1 # ec2
|
||||
assert InventoryUpdate.objects.count() == 1 # ec2
|
||||
assert InventorySource.objects.first().source == 'ec2'
|
||||
assert InventoryUpdate.objects.first().source == 'ec2'
|
||||
assert Credential.objects.count() == 0
|
||||
assert CredentialType.objects.filter(namespace='cloudforms').exists() is False
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
|
||||
from django.conf import settings
|
||||
@@ -30,8 +29,7 @@ def test_python_and_js_licenses():
|
||||
# Check variations of '-' and '_' in filenames due to python
|
||||
for fname in [name, name.replace('-','_')]:
|
||||
if entry.startswith(fname) and entry.endswith('.tar.gz'):
|
||||
entry = entry[:-7]
|
||||
(n, v) = entry.rsplit('-',1)
|
||||
v = entry.split(name + '-')[1].split('.tar.gz')[0]
|
||||
return v
|
||||
return None
|
||||
|
||||
@@ -66,28 +64,6 @@ def test_python_and_js_licenses():
|
||||
ret[name] = { 'name': name, 'version': version}
|
||||
return ret
|
||||
|
||||
|
||||
def read_ui_requirements(path):
|
||||
def json_deps(jsondata):
|
||||
ret = {}
|
||||
deps = jsondata.get('dependencies',{})
|
||||
for key in deps.keys():
|
||||
key = key.lower()
|
||||
devonly = deps[key].get('dev',False)
|
||||
if not devonly:
|
||||
if key not in ret.keys():
|
||||
depname = key.replace('/','-')
|
||||
ret[depname] = {
|
||||
'name': depname,
|
||||
'version': deps[key]['version']
|
||||
}
|
||||
ret.update(json_deps(deps[key]))
|
||||
return ret
|
||||
|
||||
with open('%s/package-lock.json' % path) as f:
|
||||
jsondata = json.load(f)
|
||||
return json_deps(jsondata)
|
||||
|
||||
def remediate_licenses_and_requirements(licenses, requirements):
|
||||
errors = []
|
||||
items = list(licenses.keys())
|
||||
@@ -114,12 +90,9 @@ def test_python_and_js_licenses():
|
||||
|
||||
base_dir = settings.BASE_DIR
|
||||
api_licenses = index_licenses('%s/../docs/licenses' % base_dir)
|
||||
ui_licenses = index_licenses('%s/../docs/licenses/ui' % base_dir)
|
||||
api_requirements = read_api_requirements('%s/../requirements' % base_dir)
|
||||
ui_requirements = read_ui_requirements('%s/ui' % base_dir)
|
||||
|
||||
errors = []
|
||||
errors += remediate_licenses_and_requirements(ui_licenses, ui_requirements)
|
||||
errors += remediate_licenses_and_requirements(api_licenses, api_requirements)
|
||||
if errors:
|
||||
raise Exception('Included licenses not consistent with requirements:\n%s' %
|
||||
|
||||
@@ -4,7 +4,7 @@ from unittest import mock
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from awx.main.access import UserAccess, RoleAccess, TeamAccess
|
||||
from awx.main.models import User, Organization, Inventory
|
||||
from awx.main.models import User, Organization, Inventory, Role
|
||||
|
||||
|
||||
class TestSysAuditorTransactional(TransactionTestCase):
|
||||
@@ -170,4 +170,34 @@ def test_org_admin_cannot_delete_member_attached_to_other_group(org_admin, org_m
|
||||
access = UserAccess(org_admin)
|
||||
other_org.member_role.members.add(org_member)
|
||||
assert not access.can_delete(org_member)
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize('reverse', (True, False))
|
||||
@pytest.mark.django_db
|
||||
def test_consistency_of_is_superuser_flag(reverse):
|
||||
users = [User.objects.create(username='rando_{}'.format(i)) for i in range(2)]
|
||||
for u in users:
|
||||
assert u.is_superuser is False
|
||||
|
||||
system_admin = Role.singleton('system_administrator')
|
||||
if reverse:
|
||||
for u in users:
|
||||
u.roles.add(system_admin)
|
||||
else:
|
||||
system_admin.members.add(*[u.id for u in users]) # like .add(42, 54)
|
||||
|
||||
for u in users:
|
||||
u.refresh_from_db()
|
||||
assert u.is_superuser is True
|
||||
|
||||
users[0].roles.clear()
|
||||
for u in users:
|
||||
u.refresh_from_db()
|
||||
assert users[0].is_superuser is False
|
||||
assert users[1].is_superuser is True
|
||||
|
||||
system_admin.members.clear()
|
||||
|
||||
for u in users:
|
||||
u.refresh_from_db()
|
||||
assert u.is_superuser is False
|
||||
|
||||
@@ -33,32 +33,6 @@ class TestInvalidOptions:
|
||||
assert 'inventory-id' in str(err.value)
|
||||
assert 'exclusive' in str(err.value)
|
||||
|
||||
def test_invalid_options_id_and_keep_vars(self):
|
||||
# You can't overwrite and keep_vars at the same time, that wouldn't make sense
|
||||
cmd = Command()
|
||||
with pytest.raises(CommandError) as err:
|
||||
cmd.handle(
|
||||
inventory_id=42, overwrite=True, keep_vars=True
|
||||
)
|
||||
assert 'overwrite-vars' in str(err.value)
|
||||
assert 'exclusive' in str(err.value)
|
||||
|
||||
def test_invalid_options_id_but_no_source(self):
|
||||
# Need a source to import
|
||||
cmd = Command()
|
||||
with pytest.raises(CommandError) as err:
|
||||
cmd.handle(
|
||||
inventory_id=42, overwrite=True, keep_vars=True
|
||||
)
|
||||
assert 'overwrite-vars' in str(err.value)
|
||||
assert 'exclusive' in str(err.value)
|
||||
with pytest.raises(CommandError) as err:
|
||||
cmd.handle(
|
||||
inventory_id=42, overwrite_vars=True, keep_vars=True
|
||||
)
|
||||
assert 'overwrite-vars' in str(err.value)
|
||||
assert 'exclusive' in str(err.value)
|
||||
|
||||
def test_invalid_options_missing_source(self):
|
||||
cmd = Command()
|
||||
with pytest.raises(CommandError) as err:
|
||||
|
||||
@@ -45,19 +45,14 @@ class TestInstanceGroup(object):
|
||||
(T(100), Is([50, 0, 20, 99, 11, 1, 5, 99]), None, "The task don't a fit, you must a quit!"),
|
||||
])
|
||||
def test_fit_task_to_most_remaining_capacity_instance(self, task, instances, instance_fit_index, reason):
|
||||
with mock.patch.object(InstanceGroup,
|
||||
'instances',
|
||||
Mock(spec_set=['filter'],
|
||||
filter=lambda *args, **kargs: Mock(spec_set=['order_by'],
|
||||
order_by=lambda x: instances))):
|
||||
ig = InstanceGroup(id=10)
|
||||
ig = InstanceGroup(id=10)
|
||||
|
||||
if instance_fit_index is None:
|
||||
assert ig.fit_task_to_most_remaining_capacity_instance(task) is None, reason
|
||||
else:
|
||||
assert ig.fit_task_to_most_remaining_capacity_instance(task) == \
|
||||
instances[instance_fit_index], reason
|
||||
instance_picked = ig.fit_task_to_most_remaining_capacity_instance(task, instances)
|
||||
|
||||
if instance_fit_index is None:
|
||||
assert instance_picked is None, reason
|
||||
else:
|
||||
assert instance_picked == instances[instance_fit_index], reason
|
||||
|
||||
@pytest.mark.parametrize('instances,instance_fit_index,reason', [
|
||||
(Is([(0, 100)]), 0, "One idle instance, pick it"),
|
||||
@@ -70,16 +65,12 @@ class TestInstanceGroup(object):
|
||||
def filter_offline_instances(*args):
|
||||
return filter(lambda i: i.capacity > 0, instances)
|
||||
|
||||
with mock.patch.object(InstanceGroup,
|
||||
'instances',
|
||||
Mock(spec_set=['filter'],
|
||||
filter=lambda *args, **kargs: Mock(spec_set=['order_by'],
|
||||
order_by=filter_offline_instances))):
|
||||
ig = InstanceGroup(id=10)
|
||||
ig = InstanceGroup(id=10)
|
||||
instances_online_only = filter_offline_instances(instances)
|
||||
|
||||
if instance_fit_index is None:
|
||||
assert ig.find_largest_idle_instance() is None, reason
|
||||
else:
|
||||
assert ig.find_largest_idle_instance() == \
|
||||
instances[instance_fit_index], reason
|
||||
if instance_fit_index is None:
|
||||
assert ig.find_largest_idle_instance(instances_online_only) is None, reason
|
||||
else:
|
||||
assert ig.find_largest_idle_instance(instances_online_only) == \
|
||||
instances[instance_fit_index], reason
|
||||
|
||||
|
||||
@@ -39,6 +39,8 @@ from awx.main import tasks
|
||||
from awx.main.utils import encrypt_field, encrypt_value
|
||||
from awx.main.utils.safe_yaml import SafeLoader
|
||||
|
||||
from awx.main.utils.licensing import Licenser
|
||||
|
||||
|
||||
class TestJobExecution(object):
|
||||
EXAMPLE_PRIVATE_KEY = '-----BEGIN PRIVATE KEY-----\nxyz==\n-----END PRIVATE KEY-----'
|
||||
@@ -178,7 +180,7 @@ def test_openstack_client_config_generation(mocker, source, expected, private_da
|
||||
'source_vars_dict': {},
|
||||
'get_cloud_credential': mocker.Mock(return_value=credential),
|
||||
'get_extra_credentials': lambda x: [],
|
||||
'ansible_virtualenv_path': '/venv/foo'
|
||||
'ansible_virtualenv_path': '/var/lib/awx/venv/foo'
|
||||
})
|
||||
cloud_config = update.build_private_data(inventory_update, private_data_dir)
|
||||
cloud_credential = yaml.safe_load(
|
||||
@@ -222,6 +224,52 @@ def test_openstack_client_config_generation_with_project_domain_name(mocker, sou
|
||||
'source_vars_dict': {},
|
||||
'get_cloud_credential': mocker.Mock(return_value=credential),
|
||||
'get_extra_credentials': lambda x: [],
|
||||
'ansible_virtualenv_path': '/var/lib/awx/venv/foo'
|
||||
})
|
||||
cloud_config = update.build_private_data(inventory_update, private_data_dir)
|
||||
cloud_credential = yaml.safe_load(
|
||||
cloud_config.get('credentials')[credential]
|
||||
)
|
||||
assert cloud_credential['clouds'] == {
|
||||
'devstack': {
|
||||
'auth': {
|
||||
'auth_url': 'https://keystone.openstack.example.org',
|
||||
'password': 'secrete',
|
||||
'project_name': 'demo-project',
|
||||
'username': 'demo',
|
||||
'domain_name': 'my-demo-domain',
|
||||
'project_domain_name': 'project-domain',
|
||||
},
|
||||
'verify': expected,
|
||||
'private': True,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("source,expected", [
|
||||
(None, True), (False, False), (True, True)
|
||||
])
|
||||
def test_openstack_client_config_generation_with_project_region_name(mocker, source, expected, private_data_dir):
|
||||
update = tasks.RunInventoryUpdate()
|
||||
credential_type = CredentialType.defaults['openstack']()
|
||||
inputs = {
|
||||
'host': 'https://keystone.openstack.example.org',
|
||||
'username': 'demo',
|
||||
'password': 'secrete',
|
||||
'project': 'demo-project',
|
||||
'domain': 'my-demo-domain',
|
||||
'project_domain_name': 'project-domain',
|
||||
'project_region_name': 'region-name',
|
||||
}
|
||||
if source is not None:
|
||||
inputs['verify_ssl'] = source
|
||||
credential = Credential(pk=1, credential_type=credential_type, inputs=inputs)
|
||||
|
||||
inventory_update = mocker.Mock(**{
|
||||
'source': 'openstack',
|
||||
'source_vars_dict': {},
|
||||
'get_cloud_credential': mocker.Mock(return_value=credential),
|
||||
'get_extra_credentials': lambda x: [],
|
||||
'ansible_virtualenv_path': '/venv/foo'
|
||||
})
|
||||
cloud_config = update.build_private_data(inventory_update, private_data_dir)
|
||||
@@ -240,6 +288,7 @@ def test_openstack_client_config_generation_with_project_domain_name(mocker, sou
|
||||
},
|
||||
'verify': expected,
|
||||
'private': True,
|
||||
'region_name': 'region-name',
|
||||
}
|
||||
}
|
||||
|
||||
@@ -265,7 +314,7 @@ def test_openstack_client_config_generation_with_private_source_vars(mocker, sou
|
||||
'source_vars_dict': {'private': source},
|
||||
'get_cloud_credential': mocker.Mock(return_value=credential),
|
||||
'get_extra_credentials': lambda x: [],
|
||||
'ansible_virtualenv_path': '/venv/foo'
|
||||
'ansible_virtualenv_path': '/var/lib/awx/venv/foo'
|
||||
})
|
||||
cloud_config = update.build_private_data(inventory_update, private_data_dir)
|
||||
cloud_credential = yaml.load(
|
||||
@@ -623,13 +672,13 @@ class TestGenericRun():
|
||||
|
||||
def test_invalid_custom_virtualenv(self, patch_Job, private_data_dir):
|
||||
job = Job(project=Project(), inventory=Inventory())
|
||||
job.project.custom_virtualenv = '/venv/missing'
|
||||
job.project.custom_virtualenv = '/var/lib/awx/venv/missing'
|
||||
task = tasks.RunJob()
|
||||
|
||||
with pytest.raises(tasks.InvalidVirtualenvError) as e:
|
||||
task.build_env(job, private_data_dir)
|
||||
|
||||
assert 'Invalid virtual environment selected: /venv/missing' == str(e.value)
|
||||
assert 'Invalid virtual environment selected: /var/lib/awx/venv/missing' == str(e.value)
|
||||
|
||||
|
||||
class TestAdhocRun(TestJobExecution):
|
||||
@@ -1830,7 +1879,10 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution):
|
||||
|
||||
task = RunProjectUpdate()
|
||||
env = task.build_env(project_update, private_data_dir)
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}):
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
assert task.__vars__['roles_enabled'] is False
|
||||
assert task.__vars__['collections_enabled'] is False
|
||||
for k in env:
|
||||
@@ -1850,7 +1902,10 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution):
|
||||
project_update.project.organization.galaxy_credentials.add(public_galaxy)
|
||||
task = RunProjectUpdate()
|
||||
env = task.build_env(project_update, private_data_dir)
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}):
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
assert task.__vars__['roles_enabled'] is True
|
||||
assert task.__vars__['collections_enabled'] is True
|
||||
assert sorted([
|
||||
@@ -1901,19 +1956,16 @@ class TestProjectUpdateCredentials(TestJobExecution):
|
||||
parametrize = {
|
||||
'test_username_and_password_auth': [
|
||||
dict(scm_type='git'),
|
||||
dict(scm_type='hg'),
|
||||
dict(scm_type='svn'),
|
||||
dict(scm_type='archive'),
|
||||
],
|
||||
'test_ssh_key_auth': [
|
||||
dict(scm_type='git'),
|
||||
dict(scm_type='hg'),
|
||||
dict(scm_type='svn'),
|
||||
dict(scm_type='archive'),
|
||||
],
|
||||
'test_awx_task_env': [
|
||||
dict(scm_type='git'),
|
||||
dict(scm_type='hg'),
|
||||
dict(scm_type='svn'),
|
||||
dict(scm_type='archive'),
|
||||
]
|
||||
@@ -1935,7 +1987,9 @@ class TestProjectUpdateCredentials(TestJobExecution):
|
||||
assert settings.PROJECTS_ROOT in process_isolation['process_isolation_show_paths']
|
||||
|
||||
task._write_extra_vars_file = mock.Mock()
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}):
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
call_args, _ = task._write_extra_vars_file.call_args_list[0]
|
||||
_, extra_vars = call_args
|
||||
@@ -2051,8 +2105,8 @@ class TestInventoryUpdateCredentials(TestJobExecution):
|
||||
credential, env, {}, [], private_data_dir
|
||||
)
|
||||
|
||||
assert '--custom' in ' '.join(args)
|
||||
script = args[args.index('--source') + 1]
|
||||
assert '-i' in ' '.join(args)
|
||||
script = args[args.index('-i') + 1]
|
||||
with open(script, 'r') as f:
|
||||
assert f.read() == inventory_update.source_script.script
|
||||
assert env['FOO'] == 'BAR'
|
||||
@@ -2140,10 +2194,6 @@ class TestInventoryUpdateCredentials(TestJobExecution):
|
||||
return cred
|
||||
inventory_update.get_cloud_credential = get_cred
|
||||
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
|
||||
inventory_update.source_vars = {
|
||||
'include_powerstate': 'yes',
|
||||
'group_by_resource_group': 'no'
|
||||
}
|
||||
|
||||
private_data_files = task.build_private_data_files(inventory_update, private_data_dir)
|
||||
env = task.build_env(inventory_update, private_data_dir, False, private_data_files)
|
||||
@@ -2177,11 +2227,6 @@ class TestInventoryUpdateCredentials(TestJobExecution):
|
||||
return cred
|
||||
inventory_update.get_cloud_credential = get_cred
|
||||
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
|
||||
inventory_update.source_vars = {
|
||||
'include_powerstate': 'yes',
|
||||
'group_by_resource_group': 'no',
|
||||
'group_by_security_group': 'no'
|
||||
}
|
||||
|
||||
private_data_files = task.build_private_data_files(inventory_update, private_data_dir)
|
||||
env = task.build_env(inventory_update, private_data_dir, False, private_data_files)
|
||||
@@ -2296,21 +2341,14 @@ class TestInventoryUpdateCredentials(TestJobExecution):
|
||||
inventory_update.get_cloud_credential = get_cred
|
||||
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
|
||||
|
||||
inventory_update.source_vars = {
|
||||
'satellite6_group_patterns': '[a,b,c]',
|
||||
'satellite6_group_prefix': 'hey_',
|
||||
'satellite6_want_hostcollections': True,
|
||||
'satellite6_want_ansible_ssh_host': True,
|
||||
'satellite6_rich_params': True,
|
||||
'satellite6_want_facts': False
|
||||
}
|
||||
|
||||
private_data_files = task.build_private_data_files(inventory_update, private_data_dir)
|
||||
env = task.build_env(inventory_update, private_data_dir, False, private_data_files)
|
||||
safe_env = build_safe_env(env)
|
||||
|
||||
env["FOREMAN_SERVER"] == "https://example.org",
|
||||
env["FOREMAN_USER"] == "bob",
|
||||
env["FOREMAN_PASSWORD"] == "secret",
|
||||
assert env["FOREMAN_SERVER"] == "https://example.org"
|
||||
assert env["FOREMAN_USER"] == "bob"
|
||||
assert env["FOREMAN_PASSWORD"] == "secret"
|
||||
assert safe_env["FOREMAN_PASSWORD"] == tasks.HIDDEN_PASSWORD
|
||||
|
||||
@pytest.mark.parametrize('verify', [True, False])
|
||||
def test_tower_source(self, verify, inventory_update, private_data_dir, mocker):
|
||||
|
||||
@@ -55,8 +55,7 @@ __all__ = [
|
||||
'model_instance_diff', 'parse_yaml_or_json', 'RequireDebugTrueOrTest',
|
||||
'has_model_field_prefetched', 'set_environ', 'IllegalArgumentError',
|
||||
'get_custom_venv_choices', 'get_external_account', 'task_manager_bulk_reschedule',
|
||||
'schedule_task_manager', 'classproperty', 'create_temporary_fifo', 'truncate_stdout',
|
||||
'StubLicense'
|
||||
'schedule_task_manager', 'classproperty', 'create_temporary_fifo', 'truncate_stdout'
|
||||
]
|
||||
|
||||
|
||||
@@ -190,7 +189,7 @@ def get_awx_version():
|
||||
|
||||
|
||||
def get_awx_http_client_headers():
|
||||
license = get_license(show_key=False).get('license_type', 'UNLICENSED')
|
||||
license = get_license().get('license_type', 'UNLICENSED')
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': '{} {} ({})'.format(
|
||||
@@ -202,34 +201,15 @@ def get_awx_http_client_headers():
|
||||
return headers
|
||||
|
||||
|
||||
class StubLicense(object):
|
||||
|
||||
features = {
|
||||
'activity_streams': True,
|
||||
'ha': True,
|
||||
'ldap': True,
|
||||
'multiple_organizations': True,
|
||||
'surveys': True,
|
||||
'system_tracking': True,
|
||||
'rebranding': True,
|
||||
'enterprise_auth': True,
|
||||
'workflows': True,
|
||||
}
|
||||
|
||||
def validate(self):
|
||||
return dict(license_key='OPEN',
|
||||
valid_key=True,
|
||||
compliant=True,
|
||||
features=self.features,
|
||||
license_type='open')
|
||||
|
||||
|
||||
def get_licenser(*args, **kwargs):
|
||||
from awx.main.utils.licensing import Licenser, OpenLicense
|
||||
try:
|
||||
from tower_license import TowerLicense
|
||||
return TowerLicense(*args, **kwargs)
|
||||
except ImportError:
|
||||
return StubLicense(*args, **kwargs)
|
||||
if os.path.exists('/var/lib/awx/.tower_version'):
|
||||
return Licenser(*args, **kwargs)
|
||||
else:
|
||||
return OpenLicense()
|
||||
except Exception as e:
|
||||
raise ValueError(_('Error importing Tower License: %s') % e)
|
||||
|
||||
|
||||
def update_scm_url(scm_type, url, username=True, password=True,
|
||||
@@ -242,9 +222,8 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
'''
|
||||
# Handle all of the URL formats supported by the SCM systems:
|
||||
# git: https://www.kernel.org/pub/software/scm/git/docs/git-clone.html#URLS
|
||||
# hg: http://www.selenic.com/mercurial/hg.1.html#url-paths
|
||||
# svn: http://svnbook.red-bean.com/en/1.7/svn-book.html#svn.advanced.reposurls
|
||||
if scm_type not in ('git', 'hg', 'svn', 'insights', 'archive'):
|
||||
if scm_type not in ('git', 'svn', 'insights', 'archive'):
|
||||
raise ValueError(_('Unsupported SCM type "%s"') % str(scm_type))
|
||||
if not url.strip():
|
||||
return ''
|
||||
@@ -276,8 +255,8 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
# SCP style before passed to git module.
|
||||
parts = urllib.parse.urlsplit('git+ssh://%s' % modified_url)
|
||||
# Handle local paths specified without file scheme (e.g. /path/to/foo).
|
||||
# Only supported by git and hg.
|
||||
elif scm_type in ('git', 'hg'):
|
||||
# Only supported by git.
|
||||
elif scm_type == 'git':
|
||||
if not url.startswith('/'):
|
||||
parts = urllib.parse.urlsplit('file:///%s' % url)
|
||||
else:
|
||||
@@ -288,7 +267,6 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
# Validate that scheme is valid for given scm_type.
|
||||
scm_type_schemes = {
|
||||
'git': ('ssh', 'git', 'git+ssh', 'http', 'https', 'ftp', 'ftps', 'file'),
|
||||
'hg': ('http', 'https', 'ssh', 'file'),
|
||||
'svn': ('http', 'https', 'svn', 'svn+ssh', 'file'),
|
||||
'insights': ('http', 'https'),
|
||||
'archive': ('http', 'https'),
|
||||
@@ -320,12 +298,6 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
if scm_type == 'git' and parts.scheme.endswith('ssh') and parts.hostname in special_git_hosts and netloc_password:
|
||||
#raise ValueError('Password not allowed for SSH access to %s.' % parts.hostname)
|
||||
netloc_password = ''
|
||||
special_hg_hosts = ('bitbucket.org', 'altssh.bitbucket.org')
|
||||
if scm_type == 'hg' and parts.scheme == 'ssh' and parts.hostname in special_hg_hosts and netloc_username != 'hg':
|
||||
raise ValueError(_('Username must be "hg" for SSH access to %s.') % parts.hostname)
|
||||
if scm_type == 'hg' and parts.scheme == 'ssh' and netloc_password:
|
||||
#raise ValueError('Password not supported for SSH with Mercurial.')
|
||||
netloc_password = ''
|
||||
|
||||
if netloc_username and parts.scheme != 'file' and scm_type not in ("insights", "archive"):
|
||||
netloc = u':'.join([urllib.parse.quote(x,safe='') for x in (netloc_username, netloc_password) if x])
|
||||
|
||||
@@ -9,6 +9,7 @@ import socket
|
||||
from datetime import datetime
|
||||
|
||||
from dateutil.tz import tzutc
|
||||
from django.utils.timezone import now
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.conf import settings
|
||||
|
||||
@@ -17,8 +18,15 @@ class TimeFormatter(logging.Formatter):
|
||||
'''
|
||||
Custom log formatter used for inventory imports
|
||||
'''
|
||||
def __init__(self, start_time=None, **kwargs):
|
||||
if start_time is None:
|
||||
self.job_start = now()
|
||||
else:
|
||||
self.job_start = start_time
|
||||
super(TimeFormatter, self).__init__(**kwargs)
|
||||
|
||||
def format(self, record):
|
||||
record.relativeSeconds = record.relativeCreated / 1000.0
|
||||
record.relativeSeconds = (now() - self.job_start).total_seconds()
|
||||
return logging.Formatter.format(self, record)
|
||||
|
||||
|
||||
|
||||
@@ -7,6 +7,10 @@ import os.path
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils.timezone import now
|
||||
|
||||
# AWX
|
||||
from awx.main.exceptions import PostRunError
|
||||
|
||||
|
||||
class RSysLogHandler(logging.handlers.SysLogHandler):
|
||||
@@ -40,6 +44,58 @@ class RSysLogHandler(logging.handlers.SysLogHandler):
|
||||
pass
|
||||
|
||||
|
||||
class SpecialInventoryHandler(logging.Handler):
|
||||
"""Logging handler used for the saving-to-database part of inventory updates
|
||||
ran by the task system
|
||||
this dispatches events directly to be processed by the callback receiver,
|
||||
as opposed to ansible-runner
|
||||
"""
|
||||
|
||||
def __init__(self, event_handler, cancel_callback, job_timeout, verbosity,
|
||||
start_time=None, counter=0, initial_line=0, **kwargs):
|
||||
self.event_handler = event_handler
|
||||
self.cancel_callback = cancel_callback
|
||||
self.job_timeout = job_timeout
|
||||
if start_time is None:
|
||||
self.job_start = now()
|
||||
else:
|
||||
self.job_start = start_time
|
||||
self.last_check = self.job_start
|
||||
self.counter = counter
|
||||
self.skip_level = [logging.WARNING, logging.INFO, logging.DEBUG, 0][verbosity]
|
||||
self._current_line = initial_line
|
||||
super(SpecialInventoryHandler, self).__init__(**kwargs)
|
||||
|
||||
def emit(self, record):
|
||||
# check cancel and timeout status regardless of log level
|
||||
this_time = now()
|
||||
if (this_time - self.last_check).total_seconds() > 0.5: # cancel callback is expensive
|
||||
self.last_check = this_time
|
||||
if self.cancel_callback():
|
||||
raise PostRunError('Inventory update has been canceled', status='canceled')
|
||||
if self.job_timeout and ((this_time - self.job_start).total_seconds() > self.job_timeout):
|
||||
raise PostRunError('Inventory update has timed out', status='canceled')
|
||||
|
||||
# skip logging for low severity logs
|
||||
if record.levelno < self.skip_level:
|
||||
return
|
||||
|
||||
self.counter += 1
|
||||
msg = self.format(record)
|
||||
n_lines = len(msg.strip().split('\n')) # don't count line breaks at boundry of text
|
||||
dispatch_data = dict(
|
||||
created=now().isoformat(),
|
||||
event='verbose',
|
||||
counter=self.counter,
|
||||
stdout=msg,
|
||||
start_line=self._current_line,
|
||||
end_line=self._current_line + n_lines
|
||||
)
|
||||
self._current_line += n_lines
|
||||
|
||||
self.event_handler(dispatch_data)
|
||||
|
||||
|
||||
ColorHandler = logging.StreamHandler
|
||||
|
||||
if settings.COLOR_LOGS is True:
|
||||
|
||||
439
awx/main/utils/licensing.py
Normal file
439
awx/main/utils/licensing.py
Normal file
@@ -0,0 +1,439 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
'''
|
||||
This is intended to be a lightweight license class for verifying subscriptions, and parsing subscription data
|
||||
from entitlement certificates.
|
||||
|
||||
The Licenser class can do the following:
|
||||
- Parse an Entitlement cert to generate license
|
||||
'''
|
||||
|
||||
import base64
|
||||
import configparser
|
||||
from datetime import datetime, timezone
|
||||
import collections
|
||||
import copy
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import requests
|
||||
import time
|
||||
import zipfile
|
||||
|
||||
from dateutil.parser import parse as parse_date
|
||||
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.asymmetric import padding
|
||||
from cryptography import x509
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# AWX
|
||||
from awx.main.models import Host
|
||||
|
||||
MAX_INSTANCES = 9999999
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def rhsm_config():
|
||||
path = '/etc/rhsm/rhsm.conf'
|
||||
config = configparser.ConfigParser()
|
||||
config.read(path)
|
||||
return config
|
||||
|
||||
|
||||
def validate_entitlement_manifest(data):
|
||||
buff = io.BytesIO()
|
||||
buff.write(base64.b64decode(data))
|
||||
try:
|
||||
z = zipfile.ZipFile(buff)
|
||||
except zipfile.BadZipFile as e:
|
||||
raise ValueError(_("Invalid manifest: a subscription manifest zip file is required.")) from e
|
||||
buff = io.BytesIO()
|
||||
|
||||
files = z.namelist()
|
||||
if 'consumer_export.zip' not in files or 'signature' not in files:
|
||||
raise ValueError(_("Invalid manifest: missing required files."))
|
||||
export = z.open('consumer_export.zip').read()
|
||||
sig = z.open('signature').read()
|
||||
with open('/etc/tower/candlepin-redhat-ca.crt', 'rb') as f:
|
||||
cert = x509.load_pem_x509_certificate(f.read(), backend=default_backend())
|
||||
key = cert.public_key()
|
||||
try:
|
||||
key.verify(sig, export, padding=padding.PKCS1v15(), algorithm=hashes.SHA256())
|
||||
except InvalidSignature as e:
|
||||
raise ValueError(_("Invalid manifest: signature verification failed.")) from e
|
||||
|
||||
buff.write(export)
|
||||
z = zipfile.ZipFile(buff)
|
||||
subs = []
|
||||
for f in z.filelist:
|
||||
if f.filename.startswith('export/entitlements') and f.filename.endswith('.json'):
|
||||
subs.append(json.loads(z.open(f).read()))
|
||||
if subs:
|
||||
return subs
|
||||
raise ValueError(_("Invalid manifest: manifest contains no subscriptions."))
|
||||
|
||||
|
||||
class OpenLicense(object):
|
||||
def validate(self):
|
||||
return dict(
|
||||
license_type='open',
|
||||
valid_key=True,
|
||||
subscription_name='OPEN',
|
||||
product_name="AWX",
|
||||
)
|
||||
|
||||
|
||||
class Licenser(object):
|
||||
# warn when there is a month (30 days) left on the subscription
|
||||
SUBSCRIPTION_TIMEOUT = 60 * 60 * 24 * 30
|
||||
|
||||
UNLICENSED_DATA = dict(
|
||||
subscription_name=None,
|
||||
sku=None,
|
||||
support_level=None,
|
||||
instance_count=0,
|
||||
license_date=0,
|
||||
license_type="UNLICENSED",
|
||||
product_name="Red Hat Ansible Automation Platform",
|
||||
valid_key=False
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self._attrs = dict(
|
||||
instance_count=0,
|
||||
license_date=0,
|
||||
license_type='UNLICENSED',
|
||||
)
|
||||
self.config = rhsm_config()
|
||||
if not kwargs:
|
||||
license_setting = getattr(settings, 'LICENSE', None)
|
||||
if license_setting is not None:
|
||||
kwargs = license_setting
|
||||
|
||||
if 'company_name' in kwargs:
|
||||
kwargs.pop('company_name')
|
||||
self._attrs.update(kwargs)
|
||||
if 'valid_key' in self._attrs:
|
||||
if not self._attrs['valid_key']:
|
||||
self._unset_attrs()
|
||||
else:
|
||||
self._unset_attrs()
|
||||
|
||||
|
||||
def _unset_attrs(self):
|
||||
self._attrs = self.UNLICENSED_DATA.copy()
|
||||
|
||||
|
||||
def license_from_manifest(self, manifest):
|
||||
def is_appropriate_manifest_sub(sub):
|
||||
if sub['pool']['activeSubscription'] is False:
|
||||
return False
|
||||
now = datetime.now(timezone.utc)
|
||||
if parse_date(sub['startDate']) > now:
|
||||
return False
|
||||
if parse_date(sub['endDate']) < now:
|
||||
return False
|
||||
products = sub['pool']['providedProducts']
|
||||
if any(product.get('productId') == '480' for product in products):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _can_aggregate(sub, license):
|
||||
# We aggregate multiple subs into a larger meta-sub, if they match
|
||||
#
|
||||
# No current sub in aggregate
|
||||
if not license:
|
||||
return True
|
||||
# Same SKU type (SER vs MCT vs others)?
|
||||
if license['sku'][0:3] != sub['pool']['productId'][0:3]:
|
||||
return False
|
||||
return True
|
||||
|
||||
# Parse output for subscription metadata to build config
|
||||
license = dict()
|
||||
for sub in manifest:
|
||||
if not is_appropriate_manifest_sub(sub):
|
||||
logger.warning("Subscription %s (%s) in manifest is not active or for another product" %
|
||||
(sub['pool']['productName'], sub['pool']['productId']))
|
||||
continue
|
||||
if not _can_aggregate(sub, license):
|
||||
logger.warning("Subscription %s (%s) in manifest does not match other manifest subscriptions" %
|
||||
(sub['pool']['productName'], sub['pool']['productId']))
|
||||
continue
|
||||
|
||||
license.setdefault('sku', sub['pool']['productId'])
|
||||
license.setdefault('subscription_name', sub['pool']['productName'])
|
||||
license.setdefault('pool_id', sub['pool']['id'])
|
||||
license.setdefault('product_name', sub['pool']['productName'])
|
||||
license.setdefault('valid_key', True)
|
||||
license.setdefault('license_type', 'enterprise')
|
||||
license.setdefault('satellite', False)
|
||||
# Use the nearest end date
|
||||
endDate = parse_date(sub['endDate'])
|
||||
currentEndDateStr = license.get('license_date', '4102462800') # 2100-01-01
|
||||
currentEndDate = datetime.fromtimestamp(int(currentEndDateStr), timezone.utc)
|
||||
if endDate < currentEndDate:
|
||||
license['license_date'] = endDate.strftime('%s')
|
||||
instances = sub['quantity']
|
||||
license['instance_count'] = license.get('instance_count', 0) + instances
|
||||
license['subscription_name'] = re.sub(r'[\d]* Managed Nodes', '%d Managed Nodes' % license['instance_count'], license['subscription_name'])
|
||||
|
||||
if not license:
|
||||
logger.error("No valid subscriptions found in manifest")
|
||||
self._attrs.update(license)
|
||||
settings.LICENSE = self._attrs
|
||||
return self._attrs
|
||||
|
||||
|
||||
def update(self, **kwargs):
|
||||
# Update attributes of the current license.
|
||||
if 'instance_count' in kwargs:
|
||||
kwargs['instance_count'] = int(kwargs['instance_count'])
|
||||
if 'license_date' in kwargs:
|
||||
kwargs['license_date'] = int(kwargs['license_date'])
|
||||
self._attrs.update(kwargs)
|
||||
|
||||
|
||||
def validate_rh(self, user, pw):
|
||||
try:
|
||||
host = 'https://' + str(self.config.get("server", "hostname"))
|
||||
except Exception:
|
||||
logger.exception('Cannot access rhsm.conf, make sure subscription manager is installed and configured.')
|
||||
host = None
|
||||
if not host:
|
||||
host = getattr(settings, 'REDHAT_CANDLEPIN_HOST', None)
|
||||
|
||||
if not user:
|
||||
raise ValueError('subscriptions_username is required')
|
||||
|
||||
if not pw:
|
||||
raise ValueError('subscriptions_password is required')
|
||||
|
||||
if host and user and pw:
|
||||
if 'subscription.rhsm.redhat.com' in host:
|
||||
json = self.get_rhsm_subs(host, user, pw)
|
||||
else:
|
||||
json = self.get_satellite_subs(host, user, pw)
|
||||
return self.generate_license_options_from_entitlements(json)
|
||||
return []
|
||||
|
||||
|
||||
def get_rhsm_subs(self, host, user, pw):
|
||||
verify = getattr(settings, 'REDHAT_CANDLEPIN_VERIFY', True)
|
||||
json = []
|
||||
try:
|
||||
subs = requests.get(
|
||||
'/'.join([host, 'subscription/users/{}/owners'.format(user)]),
|
||||
verify=verify,
|
||||
auth=(user, pw)
|
||||
)
|
||||
except requests.exceptions.ConnectionError as error:
|
||||
raise error
|
||||
except OSError as error:
|
||||
raise OSError('Unable to open certificate bundle {}. Check that Ansible Tower is running on Red Hat Enterprise Linux.'.format(verify)) from error # noqa
|
||||
subs.raise_for_status()
|
||||
|
||||
for sub in subs.json():
|
||||
resp = requests.get(
|
||||
'/'.join([
|
||||
host,
|
||||
'subscription/owners/{}/pools/?match=*tower*'.format(sub['key'])
|
||||
]),
|
||||
verify=verify,
|
||||
auth=(user, pw)
|
||||
)
|
||||
resp.raise_for_status()
|
||||
json.extend(resp.json())
|
||||
return json
|
||||
|
||||
|
||||
def get_satellite_subs(self, host, user, pw):
|
||||
port = None
|
||||
try:
|
||||
verify = str(self.config.get("rhsm", "repo_ca_cert"))
|
||||
port = str(self.config.get("server", "port"))
|
||||
except Exception as e:
|
||||
logger.exception('Unable to read rhsm config to get ca_cert location. {}'.format(str(e)))
|
||||
verify = getattr(settings, 'REDHAT_CANDLEPIN_VERIFY', True)
|
||||
if port:
|
||||
host = ':'.join([host, port])
|
||||
json = []
|
||||
try:
|
||||
orgs = requests.get(
|
||||
'/'.join([host, 'katello/api/organizations']),
|
||||
verify=verify,
|
||||
auth=(user, pw)
|
||||
)
|
||||
except requests.exceptions.ConnectionError as error:
|
||||
raise error
|
||||
except OSError as error:
|
||||
raise OSError('Unable to open certificate bundle {}. Check that Ansible Tower is running on Red Hat Enterprise Linux.'.format(verify)) from error # noqa
|
||||
orgs.raise_for_status()
|
||||
|
||||
for org in orgs.json()['results']:
|
||||
resp = requests.get(
|
||||
'/'.join([
|
||||
host,
|
||||
'/katello/api/organizations/{}/subscriptions/?search=Red Hat Ansible Automation'.format(org['id'])
|
||||
]),
|
||||
verify=verify,
|
||||
auth=(user, pw)
|
||||
)
|
||||
resp.raise_for_status()
|
||||
results = resp.json()['results']
|
||||
if results != []:
|
||||
for sub in results:
|
||||
# Parse output for subscription metadata to build config
|
||||
license = dict()
|
||||
license['productId'] = sub['product_id']
|
||||
license['quantity'] = int(sub['quantity'])
|
||||
license['support_level'] = sub['support_level']
|
||||
license['subscription_name'] = sub['name']
|
||||
license['id'] = sub['upstream_pool_id']
|
||||
license['endDate'] = sub['end_date']
|
||||
license['productName'] = "Red Hat Ansible Automation"
|
||||
license['valid_key'] = True
|
||||
license['license_type'] = 'enterprise'
|
||||
license['satellite'] = True
|
||||
json.append(license)
|
||||
return json
|
||||
|
||||
|
||||
def is_appropriate_sat_sub(self, sub):
|
||||
if 'Red Hat Ansible Automation' not in sub['subscription_name']:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_appropriate_sub(self, sub):
|
||||
if sub['activeSubscription'] is False:
|
||||
return False
|
||||
# Products that contain Ansible Tower
|
||||
products = sub.get('providedProducts', [])
|
||||
if any(product.get('productId') == '480' for product in products):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def generate_license_options_from_entitlements(self, json):
|
||||
from dateutil.parser import parse
|
||||
ValidSub = collections.namedtuple('ValidSub', 'sku name support_level end_date trial quantity pool_id satellite')
|
||||
valid_subs = []
|
||||
for sub in json:
|
||||
satellite = sub.get('satellite')
|
||||
if satellite:
|
||||
is_valid = self.is_appropriate_sat_sub(sub)
|
||||
else:
|
||||
is_valid = self.is_appropriate_sub(sub)
|
||||
if is_valid:
|
||||
try:
|
||||
end_date = parse(sub.get('endDate'))
|
||||
except Exception:
|
||||
continue
|
||||
now = datetime.utcnow()
|
||||
now = now.replace(tzinfo=end_date.tzinfo)
|
||||
if end_date < now:
|
||||
# If the sub has a past end date, skip it
|
||||
continue
|
||||
try:
|
||||
quantity = int(sub['quantity'])
|
||||
if quantity == -1:
|
||||
# effectively, unlimited
|
||||
quantity = MAX_INSTANCES
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
sku = sub['productId']
|
||||
trial = sku.startswith('S') # i.e.,, SER/SVC
|
||||
support_level = ''
|
||||
pool_id = sub['id']
|
||||
if satellite:
|
||||
support_level = sub['support_level']
|
||||
else:
|
||||
for attr in sub.get('productAttributes', []):
|
||||
if attr.get('name') == 'support_level':
|
||||
support_level = attr.get('value')
|
||||
|
||||
valid_subs.append(ValidSub(
|
||||
sku, sub['productName'], support_level, end_date, trial, quantity, pool_id, satellite
|
||||
))
|
||||
|
||||
if valid_subs:
|
||||
licenses = []
|
||||
for sub in valid_subs:
|
||||
license = self.__class__(subscription_name='Red Hat Ansible Automation Platform')
|
||||
license._attrs['instance_count'] = int(sub.quantity)
|
||||
license._attrs['sku'] = sub.sku
|
||||
license._attrs['support_level'] = sub.support_level
|
||||
license._attrs['license_type'] = 'enterprise'
|
||||
if sub.trial:
|
||||
license._attrs['trial'] = True
|
||||
license._attrs['license_type'] = 'trial'
|
||||
license._attrs['instance_count'] = min(
|
||||
MAX_INSTANCES, license._attrs['instance_count']
|
||||
)
|
||||
human_instances = license._attrs['instance_count']
|
||||
if human_instances == MAX_INSTANCES:
|
||||
human_instances = 'Unlimited'
|
||||
subscription_name = re.sub(
|
||||
r' \([\d]+ Managed Nodes',
|
||||
' ({} Managed Nodes'.format(human_instances),
|
||||
sub.name
|
||||
)
|
||||
license._attrs['subscription_name'] = subscription_name
|
||||
license._attrs['satellite'] = satellite
|
||||
license._attrs['valid_key'] = True
|
||||
license.update(
|
||||
license_date=int(sub.end_date.strftime('%s'))
|
||||
)
|
||||
license.update(
|
||||
pool_id=sub.pool_id
|
||||
)
|
||||
licenses.append(license._attrs.copy())
|
||||
return licenses
|
||||
|
||||
raise ValueError(
|
||||
'No valid Red Hat Ansible Automation subscription could be found for this account.' # noqa
|
||||
)
|
||||
|
||||
|
||||
def validate(self):
|
||||
# Return license attributes with additional validation info.
|
||||
attrs = copy.deepcopy(self._attrs)
|
||||
type = attrs.get('license_type', 'none')
|
||||
|
||||
if (type == 'UNLICENSED' or False):
|
||||
attrs.update(dict(valid_key=False, compliant=False))
|
||||
return attrs
|
||||
attrs['valid_key'] = True
|
||||
|
||||
if Host:
|
||||
current_instances = Host.objects.active_count()
|
||||
else:
|
||||
current_instances = 0
|
||||
instance_count = int(attrs.get('instance_count', 0))
|
||||
attrs['current_instances'] = current_instances
|
||||
free_instances = (instance_count - current_instances)
|
||||
attrs['free_instances'] = max(0, free_instances)
|
||||
|
||||
license_date = int(attrs.get('license_date', 0) or 0)
|
||||
current_date = int(time.time())
|
||||
time_remaining = license_date - current_date
|
||||
attrs['time_remaining'] = time_remaining
|
||||
if attrs.setdefault('trial', False):
|
||||
attrs['grace_period_remaining'] = time_remaining
|
||||
else:
|
||||
attrs['grace_period_remaining'] = (license_date + 2592000) - current_date
|
||||
attrs['compliant'] = bool(time_remaining > 0 and free_instances >= 0)
|
||||
attrs['date_warning'] = bool(time_remaining < self.SUBSCRIPTION_TIMEOUT)
|
||||
attrs['date_expired'] = bool(time_remaining <= 0)
|
||||
return attrs
|
||||
151
awx/main/utils/profiling.py
Normal file
151
awx/main/utils/profiling.py
Normal file
@@ -0,0 +1,151 @@
|
||||
import cProfile
|
||||
import functools
|
||||
import pstats
|
||||
import os
|
||||
import uuid
|
||||
import datetime
|
||||
import json
|
||||
import sys
|
||||
|
||||
|
||||
class AWXProfileBase:
|
||||
def __init__(self, name, dest):
|
||||
self.name = name
|
||||
self.dest = dest
|
||||
self.results = {}
|
||||
|
||||
def generate_results(self):
|
||||
raise RuntimeError("define me")
|
||||
|
||||
def output_results(self, fname=None):
|
||||
if not os.path.isdir(self.dest):
|
||||
os.makedirs(self.dest)
|
||||
|
||||
if fname:
|
||||
fpath = os.path.join(self.dest, fname)
|
||||
with open(fpath, 'w') as f:
|
||||
f.write(json.dumps(self.results, indent=2))
|
||||
|
||||
|
||||
class AWXTiming(AWXProfileBase):
|
||||
def __init__(self, name, dest='/var/log/tower/timing'):
|
||||
super().__init__(name, dest)
|
||||
|
||||
self.time_start = None
|
||||
self.time_end = None
|
||||
|
||||
def start(self):
|
||||
self.time_start = datetime.datetime.now()
|
||||
|
||||
def stop(self):
|
||||
self.time_end = datetime.datetime.now()
|
||||
|
||||
self.generate_results()
|
||||
self.output_results()
|
||||
|
||||
def generate_results(self):
|
||||
diff = (self.time_end - self.time_start).total_seconds()
|
||||
self.results = {
|
||||
'name': self.name,
|
||||
'diff': f'{diff}-seconds',
|
||||
}
|
||||
|
||||
def output_results(self):
|
||||
fname = f"{self.results['diff']}-{self.name}-{uuid.uuid4()}.time"
|
||||
super().output_results(fname)
|
||||
|
||||
|
||||
def timing(name, *init_args, **init_kwargs):
|
||||
def decorator_profile(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper_profile(*args, **kwargs):
|
||||
timing = AWXTiming(name, *init_args, **init_kwargs)
|
||||
timing.start()
|
||||
res = func(*args, **kwargs)
|
||||
timing.stop()
|
||||
return res
|
||||
return wrapper_profile
|
||||
return decorator_profile
|
||||
|
||||
|
||||
class AWXProfiler(AWXProfileBase):
|
||||
def __init__(self, name, dest='/var/log/tower/profile', dot_enabled=True):
|
||||
'''
|
||||
Try to do as little as possible in init. Instead, do the init
|
||||
only when the profiling is started.
|
||||
'''
|
||||
super().__init__(name, dest)
|
||||
self.started = False
|
||||
self.dot_enabled = dot_enabled
|
||||
self.results = {
|
||||
'total_time_seconds': 0,
|
||||
}
|
||||
|
||||
def generate_results(self):
|
||||
self.results['total_time_seconds'] = pstats.Stats(self.prof).total_tt
|
||||
|
||||
def output_results(self):
|
||||
super().output_results()
|
||||
|
||||
filename_base = '%.3fs-%s-%s-%s' % (self.results['total_time_seconds'], self.name, self.pid, uuid.uuid4())
|
||||
pstats_filepath = os.path.join(self.dest, f"{filename_base}.pstats")
|
||||
extra_data = ""
|
||||
|
||||
if self.dot_enabled:
|
||||
try:
|
||||
from gprof2dot import main as generate_dot
|
||||
except ImportError:
|
||||
extra_data = 'Dot graph generation failed due to package "gprof2dot" being unavailable.'
|
||||
else:
|
||||
raw_filepath = os.path.join(self.dest, f"{filename_base}.raw")
|
||||
dot_filepath = os.path.join(self.dest, f"{filename_base}.dot")
|
||||
|
||||
pstats.Stats(self.prof).dump_stats(raw_filepath)
|
||||
generate_dot([
|
||||
'-n', '2.5', '-f', 'pstats', '-o',
|
||||
dot_filepath,
|
||||
raw_filepath
|
||||
])
|
||||
os.remove(raw_filepath)
|
||||
|
||||
with open(pstats_filepath, 'w') as f:
|
||||
print(f"{self.name}, {extra_data}", file=f)
|
||||
pstats.Stats(self.prof, stream=f).sort_stats('cumulative').print_stats()
|
||||
return pstats_filepath
|
||||
|
||||
|
||||
def start(self):
|
||||
self.prof = cProfile.Profile()
|
||||
self.pid = os.getpid()
|
||||
|
||||
self.prof.enable()
|
||||
self.started = True
|
||||
|
||||
def is_started(self):
|
||||
return self.started
|
||||
|
||||
def stop(self):
|
||||
if self.started:
|
||||
self.prof.disable()
|
||||
|
||||
self.generate_results()
|
||||
res = self.output_results()
|
||||
self.started = False
|
||||
return res
|
||||
else:
|
||||
print("AWXProfiler::stop() called without calling start() first", file=sys.stderr)
|
||||
return None
|
||||
|
||||
|
||||
def profile(name, *init_args, **init_kwargs):
|
||||
def decorator_profile(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper_profile(*args, **kwargs):
|
||||
prof = AWXProfiler(name, *init_args, **init_kwargs)
|
||||
prof.start()
|
||||
res = func(*args, **kwargs)
|
||||
prof.stop()
|
||||
return res
|
||||
return wrapper_profile
|
||||
return decorator_profile
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.plugins.action import ActionBase
|
||||
|
||||
|
||||
class ActionModule(ActionBase):
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
self._supports_check_mode = False
|
||||
result = super(ActionModule, self).run(tmp, task_vars)
|
||||
result['changed'] = result['failed'] = False
|
||||
result['msg'] = ''
|
||||
self._display.deprecated("Mercurial support is deprecated")
|
||||
return result
|
||||
@@ -9,6 +9,9 @@
|
||||
- ansible.posix
|
||||
|
||||
tasks:
|
||||
- name: "Output job the playbook is running for"
|
||||
debug:
|
||||
msg: "Checking on job {{ job_id }}"
|
||||
|
||||
- name: Determine if daemon process is alive.
|
||||
shell: "ansible-runner is-alive {{src}}"
|
||||
|
||||
@@ -48,12 +48,6 @@
|
||||
tags:
|
||||
- update_git
|
||||
|
||||
- block:
|
||||
- name: include hg tasks
|
||||
include_tasks: project_update_hg_tasks.yml
|
||||
tags:
|
||||
- update_hg
|
||||
|
||||
- block:
|
||||
- name: update project using svn
|
||||
subversion:
|
||||
@@ -150,7 +144,6 @@
|
||||
msg: "Repository Version {{ scm_version }}"
|
||||
tags:
|
||||
- update_git
|
||||
- update_hg
|
||||
- update_svn
|
||||
- update_insights
|
||||
- update_archive
|
||||
@@ -159,23 +152,29 @@
|
||||
gather_facts: false
|
||||
connection: local
|
||||
name: Install content with ansible-galaxy command if necessary
|
||||
vars:
|
||||
yaml_exts:
|
||||
- {ext: .yml}
|
||||
- {ext: .yaml}
|
||||
tasks:
|
||||
|
||||
- block:
|
||||
- name: detect requirements.yml
|
||||
- name: detect roles/requirements.(yml/yaml)
|
||||
stat:
|
||||
path: '{{project_path|quote}}/roles/requirements.yml'
|
||||
path: "{{project_path|quote}}/roles/requirements{{ item.ext }}"
|
||||
with_items: "{{ yaml_exts }}"
|
||||
register: doesRequirementsExist
|
||||
|
||||
- name: fetch galaxy roles from requirements.yml
|
||||
- name: fetch galaxy roles from requirements.(yml/yaml)
|
||||
command: >
|
||||
ansible-galaxy role install -r roles/requirements.yml
|
||||
ansible-galaxy role install -r {{ item.stat.path }}
|
||||
--roles-path {{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_roles
|
||||
{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}"
|
||||
register: galaxy_result
|
||||
when: doesRequirementsExist.stat.exists
|
||||
with_items: "{{ doesRequirementsExist.results }}"
|
||||
when: item.stat.exists
|
||||
changed_when: "'was installed successfully' in galaxy_result.stdout"
|
||||
environment:
|
||||
ANSIBLE_FORCE_COLOR: false
|
||||
@@ -186,20 +185,22 @@
|
||||
- install_roles
|
||||
|
||||
- block:
|
||||
- name: detect collections/requirements.yml
|
||||
- name: detect collections/requirements.(yml/yaml)
|
||||
stat:
|
||||
path: '{{project_path|quote}}/collections/requirements.yml'
|
||||
path: "{{project_path|quote}}/collections/requirements{{ item.ext }}"
|
||||
with_items: "{{ yaml_exts }}"
|
||||
register: doesCollectionRequirementsExist
|
||||
|
||||
- name: fetch galaxy collections from collections/requirements.yml
|
||||
- name: fetch galaxy collections from collections/requirements.(yml/yaml)
|
||||
command: >
|
||||
ansible-galaxy collection install -r collections/requirements.yml
|
||||
ansible-galaxy collection install -r {{ item.stat.path }}
|
||||
--collections-path {{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_collections
|
||||
{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}"
|
||||
register: galaxy_collection_result
|
||||
when: doesCollectionRequirementsExist.stat.exists
|
||||
with_items: "{{ doesCollectionRequirementsExist.results }}"
|
||||
when: item.stat.exists
|
||||
changed_when: "'Installing ' in galaxy_collection_result.stdout"
|
||||
environment:
|
||||
ANSIBLE_FORCE_COLOR: false
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
---
|
||||
- name: Mercurial support is deprecated.
|
||||
hg_deprecation:
|
||||
|
||||
- name: update project using hg
|
||||
hg:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url|quote}}"
|
||||
revision: "{{scm_branch|quote}}"
|
||||
force: "{{scm_clean}}"
|
||||
register: hg_result
|
||||
|
||||
- name: Set the hg repository version
|
||||
set_fact:
|
||||
scm_version: "{{ hg_result['after'] }}"
|
||||
when: "'after' in hg_result"
|
||||
|
||||
- name: parse hg version string properly
|
||||
set_fact:
|
||||
scm_version: "{{scm_version|regex_replace('^([A-Za-z0-9]+).*$', '\\1')}}"
|
||||
@@ -13,6 +13,10 @@
|
||||
- ansible.posix
|
||||
|
||||
tasks:
|
||||
- name: "Output job the playbook is running for"
|
||||
debug:
|
||||
msg: "Checking on job {{ job_id }}"
|
||||
|
||||
- name: synchronize job environment with isolated host
|
||||
synchronize:
|
||||
copy_links: true
|
||||
|
||||
@@ -91,7 +91,6 @@ USE_L10N = True
|
||||
USE_TZ = True
|
||||
|
||||
STATICFILES_DIRS = (
|
||||
os.path.join(BASE_DIR, 'ui', 'static'),
|
||||
os.path.join(BASE_DIR, 'ui_next', 'build', 'static'),
|
||||
os.path.join(BASE_DIR, 'static'),
|
||||
)
|
||||
@@ -117,7 +116,7 @@ LOGIN_URL = '/api/login/'
|
||||
|
||||
# Absolute filesystem path to the directory to host projects (with playbooks).
|
||||
# This directory should not be web-accessible.
|
||||
PROJECTS_ROOT = os.path.join(BASE_DIR, 'projects')
|
||||
PROJECTS_ROOT = '/var/lib/awx/projects/'
|
||||
|
||||
# Absolute filesystem path to the directory to host collections for
|
||||
# running inventory imports, isolated playbooks
|
||||
@@ -126,10 +125,10 @@ AWX_ANSIBLE_COLLECTIONS_PATHS = os.path.join(BASE_DIR, 'vendor', 'awx_ansible_co
|
||||
# Absolute filesystem path to the directory for job status stdout (default for
|
||||
# development and tests, default for production defined in production.py). This
|
||||
# directory should not be web-accessible
|
||||
JOBOUTPUT_ROOT = os.path.join(BASE_DIR, 'job_output')
|
||||
JOBOUTPUT_ROOT = '/var/lib/awx/job_status/'
|
||||
|
||||
# Absolute filesystem path to the directory to store logs
|
||||
LOG_ROOT = os.path.join(BASE_DIR)
|
||||
LOG_ROOT = '/var/log/tower/'
|
||||
|
||||
# The heartbeat file for the tower scheduler
|
||||
SCHEDULE_METADATA_LOCATION = os.path.join(BASE_DIR, '.tower_cycle')
|
||||
@@ -197,9 +196,9 @@ LOCAL_STDOUT_EXPIRE_TIME = 2592000
|
||||
# events into the database
|
||||
JOB_EVENT_WORKERS = 4
|
||||
|
||||
# The number of seconds (must be an integer) to buffer callback receiver bulk
|
||||
# The number of seconds to buffer callback receiver bulk
|
||||
# writes in memory before flushing via JobEvent.objects.bulk_create()
|
||||
JOB_EVENT_BUFFER_SECONDS = 1
|
||||
JOB_EVENT_BUFFER_SECONDS = .1
|
||||
|
||||
# The interval at which callback receiver statistics should be
|
||||
# recorded
|
||||
@@ -249,8 +248,7 @@ TEMPLATES = [
|
||||
'django.template.context_processors.static',
|
||||
'django.template.context_processors.tz',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
'awx.ui.context_processors.settings',
|
||||
'awx.ui.context_processors.version',
|
||||
'awx.ui.context_processors.csp',
|
||||
'social_django.context_processors.backends',
|
||||
'social_django.context_processors.login_redirect',
|
||||
],
|
||||
@@ -664,7 +662,7 @@ INV_ENV_VARIABLE_BLOCKED = ("HOME", "USER", "_", "TERM")
|
||||
# ----------------
|
||||
EC2_ENABLED_VAR = 'ec2_state'
|
||||
EC2_ENABLED_VALUE = 'running'
|
||||
EC2_INSTANCE_ID_VAR = 'ec2_id'
|
||||
EC2_INSTANCE_ID_VAR = 'instance_id'
|
||||
EC2_EXCLUDE_EMPTY_GROUPS = True
|
||||
|
||||
# ------------
|
||||
@@ -934,6 +932,14 @@ LOGGING = {
|
||||
'backupCount': 5,
|
||||
'formatter':'simple',
|
||||
},
|
||||
'isolated_manager': {
|
||||
'level': 'WARNING',
|
||||
'class':'logging.handlers.RotatingFileHandler',
|
||||
'filename': os.path.join(LOG_ROOT, 'isolated_manager.log'),
|
||||
'maxBytes': 1024 * 1024 * 5, # 5 MB
|
||||
'backupCount': 5,
|
||||
'formatter':'simple',
|
||||
},
|
||||
},
|
||||
'loggers': {
|
||||
'django': {
|
||||
@@ -983,6 +989,11 @@ LOGGING = {
|
||||
'awx.main.wsbroadcast': {
|
||||
'handlers': ['wsbroadcast'],
|
||||
},
|
||||
'awx.isolated.manager': {
|
||||
'level': 'WARNING',
|
||||
'handlers': ['console', 'file', 'isolated_manager'],
|
||||
'propagate': True
|
||||
},
|
||||
'awx.isolated.manager.playbooks': {
|
||||
'handlers': ['management_playbooks'],
|
||||
'propagate': False
|
||||
|
||||
@@ -148,9 +148,9 @@ include(optional('/etc/tower/settings.py'), scope=locals())
|
||||
include(optional('/etc/tower/conf.d/*.py'), scope=locals())
|
||||
|
||||
# Installed differently in Dockerfile compared to production versions
|
||||
AWX_ANSIBLE_COLLECTIONS_PATHS = '/vendor/awx_ansible_collections'
|
||||
AWX_ANSIBLE_COLLECTIONS_PATHS = '/var/lib/awx/vendor/awx_ansible_collections'
|
||||
|
||||
BASE_VENV_PATH = "/venv/"
|
||||
BASE_VENV_PATH = "/var/lib/awx/venv/"
|
||||
ANSIBLE_VENV_PATH = os.path.join(BASE_VENV_PATH, "ansible")
|
||||
AWX_VENV_PATH = os.path.join(BASE_VENV_PATH, "awx")
|
||||
|
||||
@@ -184,3 +184,6 @@ else:
|
||||
pass
|
||||
|
||||
AWX_CALLBACK_PROFILE = True
|
||||
|
||||
if 'sqlite3' not in DATABASES['default']['ENGINE']: # noqa
|
||||
DATABASES['default'].setdefault('OPTIONS', dict()).setdefault('application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63]) # noqa
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user