mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 11:34:43 -03:30
Compare commits
1144 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9479b1b824 | ||
|
|
fcf6b4ae45 | ||
|
|
7b4c63037a | ||
|
|
37a90320ec | ||
|
|
a803e86a95 | ||
|
|
196a6ff36c | ||
|
|
c3ba851908 | ||
|
|
11223472d3 | ||
|
|
d0a996b139 | ||
|
|
7dd635cd8d | ||
|
|
f715e4e410 | ||
|
|
a983d4bc1f | ||
|
|
f7cffbfe5c | ||
|
|
2329079326 | ||
|
|
055e7b4974 | ||
|
|
c44bf6f903 | ||
|
|
a6d031f46f | ||
|
|
2129f12085 | ||
|
|
23185ca22f | ||
|
|
2b6cf97157 | ||
|
|
07e5a00f14 | ||
|
|
1b0f5b05ad | ||
|
|
5ff4625eb1 | ||
|
|
1829e7cad4 | ||
|
|
e097f5a021 | ||
|
|
caa5596386 | ||
|
|
ec390b049d | ||
|
|
0814a9c4a1 | ||
|
|
0a1b220f56 | ||
|
|
d39b3b3165 | ||
|
|
19ad7d3983 | ||
|
|
cd7e358b73 | ||
|
|
bc5881ad21 | ||
|
|
dd854baba2 | ||
|
|
7cce3cad06 | ||
|
|
622fbc116b | ||
|
|
b9d489c788 | ||
|
|
5cbcfbe0c6 | ||
|
|
d46a403a49 | ||
|
|
de808d4911 | ||
|
|
43eff55fd4 | ||
|
|
6c130fa6c3 | ||
|
|
90ea9a8cc4 | ||
|
|
b09bca54b7 | ||
|
|
8e4a87d0af | ||
|
|
fd50feb258 | ||
|
|
f749a5d44d | ||
|
|
c3366db5ca | ||
|
|
07a9cd106e | ||
|
|
b2a1824d21 | ||
|
|
303443796e | ||
|
|
495dc2202f | ||
|
|
33f5200a20 | ||
|
|
8674e3b4de | ||
|
|
ace459cf70 | ||
|
|
d0c952692d | ||
|
|
af8e071840 | ||
|
|
e6abd77c96 | ||
|
|
42bfff301c | ||
|
|
0aff1a2c75 | ||
|
|
685f4018f2 | ||
|
|
1dff691830 | ||
|
|
525021214c | ||
|
|
c12c64f5e7 | ||
|
|
0eaeadad87 | ||
|
|
eb5846d1be | ||
|
|
87e1ba4dea | ||
|
|
a9427dbf1b | ||
|
|
e96e1e925c | ||
|
|
7476fefd65 | ||
|
|
8b2fc26219 | ||
|
|
9480f911b2 | ||
|
|
d7fc3f53b8 | ||
|
|
91cbaa1096 | ||
|
|
3e13eff7f4 | ||
|
|
a562994b64 | ||
|
|
b02d9ae282 | ||
|
|
57820b7056 | ||
|
|
e3bbd436b4 | ||
|
|
9aa9524257 | ||
|
|
af5a898919 | ||
|
|
a04329efed | ||
|
|
a0b2ce3ef1 | ||
|
|
cd62f39bce | ||
|
|
b7b97dd58d | ||
|
|
1d03625b27 | ||
|
|
af55c4c05e | ||
|
|
0a670e8db1 | ||
|
|
cf62fa67bd | ||
|
|
3c382322b0 | ||
|
|
f4ef3024fd | ||
|
|
67ca2fa335 | ||
|
|
c9ac805eed | ||
|
|
f40b637efc | ||
|
|
60ef160e85 | ||
|
|
be507dbefb | ||
|
|
8c26f20188 | ||
|
|
2c52a7d9a8 | ||
|
|
b006510035 | ||
|
|
8e48a3a523 | ||
|
|
b26c8f6b62 | ||
|
|
68d7532d01 | ||
|
|
e9cf1475ca | ||
|
|
1b3ae50076 | ||
|
|
7791c5f5ba | ||
|
|
19abd24c91 | ||
|
|
b2ae68850c | ||
|
|
1a6ae6e107 | ||
|
|
86c7fd3b5d | ||
|
|
46ad3fa7b1 | ||
|
|
060585434a | ||
|
|
2657779eda | ||
|
|
ac890b8cda | ||
|
|
b6d8f9c6f6 | ||
|
|
5583af2a58 | ||
|
|
2ee6713050 | ||
|
|
b28409c1c7 | ||
|
|
ac5dec272b | ||
|
|
33b19ebe1f | ||
|
|
5d3e39beac | ||
|
|
bed63b3690 | ||
|
|
43ef4183df | ||
|
|
74869494f9 | ||
|
|
bd4337976e | ||
|
|
50079c0441 | ||
|
|
f3173dbe26 | ||
|
|
a1dd5a4e19 | ||
|
|
52e86cf0c3 | ||
|
|
3d9a47f0d9 | ||
|
|
5135b8a969 | ||
|
|
8a04c22b2b | ||
|
|
f7842cf283 | ||
|
|
827ad0fa75 | ||
|
|
602ef9750f | ||
|
|
8fb65b40de | ||
|
|
a7cda95803 | ||
|
|
bb33ed6415 | ||
|
|
358ad05e51 | ||
|
|
74c84bd7df | ||
|
|
62e1d5fdd2 | ||
|
|
b8beb1c64e | ||
|
|
14d86ef5d3 | ||
|
|
9ab7752d32 | ||
|
|
3a4f56bb2b | ||
|
|
8f1c20423b | ||
|
|
6fd5f9c6d8 | ||
|
|
6b187946fb | ||
|
|
d54f633a7b | ||
|
|
d0571c2cab | ||
|
|
32ee9838af | ||
|
|
c41068edc4 | ||
|
|
f2548c5e66 | ||
|
|
66a52655df | ||
|
|
32dbe3f86a | ||
|
|
c6ae7d84a2 | ||
|
|
7d384262e4 | ||
|
|
64debd7230 | ||
|
|
d39cfd1778 | ||
|
|
2e0edcbabd | ||
|
|
2650cbfc87 | ||
|
|
df72a01f27 | ||
|
|
7cf2bc2410 | ||
|
|
a63a204a21 | ||
|
|
928de6127b | ||
|
|
85eca47a93 | ||
|
|
99c8c4bf2b | ||
|
|
50d8eb30e1 | ||
|
|
b1d9b14ab1 | ||
|
|
56b3d6c79b | ||
|
|
9e528ea898 | ||
|
|
e09684462c | ||
|
|
22c4b28917 | ||
|
|
9d0a8d2047 | ||
|
|
023fbc931d | ||
|
|
c3ae700888 | ||
|
|
ee6445d620 | ||
|
|
8fb7cb6e82 | ||
|
|
b55212368b | ||
|
|
649d854225 | ||
|
|
fb1d918c2d | ||
|
|
e8d93c99a6 | ||
|
|
b54ec6b9c8 | ||
|
|
3acb474b19 | ||
|
|
2e0d381f8f | ||
|
|
7eb483d810 | ||
|
|
7b570b59c6 | ||
|
|
09f9204917 | ||
|
|
2a8e6ecba1 | ||
|
|
abb221d942 | ||
|
|
c8fdf46dda | ||
|
|
a9226fc25f | ||
|
|
67eba3cf5c | ||
|
|
20b8cdfb3d | ||
|
|
1dcb7591c5 | ||
|
|
bca9735534 | ||
|
|
f95576764d | ||
|
|
92a600aaa9 | ||
|
|
5cf7cc21c8 | ||
|
|
499fd7b2f1 | ||
|
|
0593ac197c | ||
|
|
4fc0d220cc | ||
|
|
d309acfddb | ||
|
|
2196089216 | ||
|
|
b6bf68427a | ||
|
|
502decf8fe | ||
|
|
20347420ca | ||
|
|
b29a9cd86e | ||
|
|
2d15d13359 | ||
|
|
31f5d13a69 | ||
|
|
67753b790c | ||
|
|
661a54d356 | ||
|
|
970a714291 | ||
|
|
dc206c9ad6 | ||
|
|
658bdddac3 | ||
|
|
9f5f86c6a7 | ||
|
|
5e37882267 | ||
|
|
0fc0106cc7 | ||
|
|
bcdb590a29 | ||
|
|
6b46c7db8f | ||
|
|
8c5bcffd42 | ||
|
|
aad185e785 | ||
|
|
2ff22bd681 | ||
|
|
2934fabd98 | ||
|
|
2359231bda | ||
|
|
ca5f27aa9e | ||
|
|
80adcaab81 | ||
|
|
8100fc1cfb | ||
|
|
006797014c | ||
|
|
38934dc8d0 | ||
|
|
0ec6d652f7 | ||
|
|
1525c6d97e | ||
|
|
bf1769af6c | ||
|
|
6384e638f5 | ||
|
|
1df5e55a4e | ||
|
|
d1005f91e7 | ||
|
|
d9451ac12c | ||
|
|
a82304765d | ||
|
|
cc3f2e0819 | ||
|
|
03c07c0843 | ||
|
|
1b94b616f0 | ||
|
|
98c5cb1c4c | ||
|
|
ce5a85a53b | ||
|
|
046385d72e | ||
|
|
7eba55fbde | ||
|
|
6ac51b7b13 | ||
|
|
97cc467ae1 | ||
|
|
3312ebcb05 | ||
|
|
4d06ae48d3 | ||
|
|
cf75ea91a1 | ||
|
|
5e34f6582b | ||
|
|
60008dbd74 | ||
|
|
4afd0672a1 | ||
|
|
875a1c0b5f | ||
|
|
f5d7ca6913 | ||
|
|
df8a66e504 | ||
|
|
43a0a15f6f | ||
|
|
36ed890c14 | ||
|
|
0e8e5f65e1 | ||
|
|
6399ec59c9 | ||
|
|
e44c73883e | ||
|
|
c3406748de | ||
|
|
6d70651611 | ||
|
|
5e13da62a4 | ||
|
|
658e5f0fc8 | ||
|
|
cebd918e49 | ||
|
|
a8c4e92804 | ||
|
|
dea71d2682 | ||
|
|
ed568f569c | ||
|
|
13c05c68fc | ||
|
|
2be7f853f3 | ||
|
|
3b5681465a | ||
|
|
a6c7502217 | ||
|
|
50a87843ee | ||
|
|
238b6cbb61 | ||
|
|
3a7bf6a8ac | ||
|
|
6e64aa81fd | ||
|
|
445612315b | ||
|
|
bb276a8fcb | ||
|
|
b7b0bdaeca | ||
|
|
cc1a97b6d8 | ||
|
|
c6227797b4 | ||
|
|
b383144b69 | ||
|
|
d1bc013da9 | ||
|
|
723f581fd0 | ||
|
|
d2c345a374 | ||
|
|
22677029e1 | ||
|
|
0943f989ce | ||
|
|
c1698fff8e | ||
|
|
200028b269 | ||
|
|
0cae612159 | ||
|
|
5584f6a98b | ||
|
|
e5acf93c66 | ||
|
|
01b4b47087 | ||
|
|
14e9923037 | ||
|
|
7e47a924c5 | ||
|
|
635aa9fd56 | ||
|
|
2e4eb1885f | ||
|
|
0ce70c08bd | ||
|
|
e3c9a42741 | ||
|
|
d5d45e644d | ||
|
|
09684e2c41 | ||
|
|
04622d5786 | ||
|
|
8c9544e5ed | ||
|
|
ca043d9bfd | ||
|
|
711937b104 | ||
|
|
a9a2c1fa7b | ||
|
|
ec8a452f1d | ||
|
|
07def62373 | ||
|
|
30352e375f | ||
|
|
6abe9d5c0f | ||
|
|
9b04e93765 | ||
|
|
728fb1aaef | ||
|
|
2596ad26b9 | ||
|
|
ef3b1ee195 | ||
|
|
b1a33869dc | ||
|
|
9f04fbe4a4 | ||
|
|
1ece764547 | ||
|
|
2358e306c1 | ||
|
|
82b9f8ebb0 | ||
|
|
43ca4526b1 | ||
|
|
4174fc22b0 | ||
|
|
16e135249c | ||
|
|
889dae357b | ||
|
|
0063668582 | ||
|
|
1e4cd9ea8f | ||
|
|
954ccccbc5 | ||
|
|
6f43875e80 | ||
|
|
80cccab919 | ||
|
|
088673ceb0 | ||
|
|
4f13255f35 | ||
|
|
8f36e21c97 | ||
|
|
6a11281355 | ||
|
|
df4e4f80ad | ||
|
|
5682fb1503 | ||
|
|
640d2a2797 | ||
|
|
b173880766 | ||
|
|
30ce85b80a | ||
|
|
003ec64413 | ||
|
|
eda6d729d6 | ||
|
|
4f83d44142 | ||
|
|
7452eb2fa1 | ||
|
|
8300f7f51b | ||
|
|
eed94b641e | ||
|
|
0138e92ddc | ||
|
|
456ef49ee3 | ||
|
|
b91dee68ac | ||
|
|
781d36ef83 | ||
|
|
a1cef744a7 | ||
|
|
ba5319f479 | ||
|
|
0dbf21a15c | ||
|
|
45d522829a | ||
|
|
8b1c358dc6 | ||
|
|
ebd9d3dc67 | ||
|
|
80cf154fb7 | ||
|
|
9add96a0d3 | ||
|
|
ed2ad1e210 | ||
|
|
808ed74700 | ||
|
|
9bebf3217e | ||
|
|
ae7d26fab0 | ||
|
|
0f54d30f2c | ||
|
|
631d3515f2 | ||
|
|
551218fd44 | ||
|
|
4af54517d2 | ||
|
|
334f571ad3 | ||
|
|
295afa805c | ||
|
|
ad4d286db5 | ||
|
|
eea97c8928 | ||
|
|
c29275315e | ||
|
|
ef89195e6c | ||
|
|
06ff26752a | ||
|
|
58f5e1882e | ||
|
|
2765367308 | ||
|
|
2a94611801 | ||
|
|
e4eda3ef0d | ||
|
|
fbf6315a8c | ||
|
|
8a3c10686e | ||
|
|
c121565209 | ||
|
|
3c3e659042 | ||
|
|
406cb07018 | ||
|
|
099a82fdf8 | ||
|
|
52b88d839e | ||
|
|
e245e50ee4 | ||
|
|
a52c0415d9 | ||
|
|
98c7df3399 | ||
|
|
570283fba2 | ||
|
|
1bf2a455c6 | ||
|
|
cb222aaa40 | ||
|
|
6c9fc4a592 | ||
|
|
53a6341320 | ||
|
|
c0f9ee5e6e | ||
|
|
3321f3e34d | ||
|
|
953b6679ef | ||
|
|
d285261697 | ||
|
|
d84b58c857 | ||
|
|
4d3cacf87f | ||
|
|
b9b2affe44 | ||
|
|
f61b6f9615 | ||
|
|
fcba02cd86 | ||
|
|
205dc93e65 | ||
|
|
28a29293c7 | ||
|
|
3b259de200 | ||
|
|
63e3e733e0 | ||
|
|
ed78978b5f | ||
|
|
6e1457607e | ||
|
|
844b0f86b8 | ||
|
|
d4c3c089df | ||
|
|
1328fb80a0 | ||
|
|
1fbcd1b10b | ||
|
|
11b26c199b | ||
|
|
b4d54895ff | ||
|
|
463c4c1f7e | ||
|
|
76a16b329e | ||
|
|
123f646cea | ||
|
|
d99c9c8dce | ||
|
|
4f3a8ef766 | ||
|
|
c114243082 | ||
|
|
229e997e7e | ||
|
|
dc7ec9dfe0 | ||
|
|
5df384edd6 | ||
|
|
07aae8cefc | ||
|
|
902fb83493 | ||
|
|
dce3795e0c | ||
|
|
1ef2d4cdad | ||
|
|
a6b362e455 | ||
|
|
2c3549331c | ||
|
|
016fc7f6bf | ||
|
|
e8eda28ce5 | ||
|
|
83c232eb20 | ||
|
|
c30639c4e6 | ||
|
|
5e84782b9c | ||
|
|
1a619de91f | ||
|
|
d134291097 | ||
|
|
4b669fb16d | ||
|
|
b53621e74c | ||
|
|
925c6543c4 | ||
|
|
bb5312f4fc | ||
|
|
7333e55748 | ||
|
|
5e20dcb6ca | ||
|
|
cab6b8b333 | ||
|
|
46020379aa | ||
|
|
e23fb31a4a | ||
|
|
17c95f200a | ||
|
|
7676ccdbac | ||
|
|
4626aa0144 | ||
|
|
fb7596929f | ||
|
|
d63518d789 | ||
|
|
6f1cbac324 | ||
|
|
2b80f0f7b6 | ||
|
|
10945faba1 | ||
|
|
d4ccb00338 | ||
|
|
d10d5f1539 | ||
|
|
3e5f328b52 | ||
|
|
d558ffd699 | ||
|
|
b64d401e74 | ||
|
|
0a3f131adc | ||
|
|
6f9cf6a649 | ||
|
|
5db43b8283 | ||
|
|
aa9e60c508 | ||
|
|
2162e8e0cc | ||
|
|
1eeffe4ae2 | ||
|
|
2927803a82 | ||
|
|
1b50b26901 | ||
|
|
44819987f7 | ||
|
|
9bf0d052ab | ||
|
|
5c98d04e09 | ||
|
|
6560ab0fab | ||
|
|
efb7a729c7 | ||
|
|
6e1deed79e | ||
|
|
ad3721bdb2 | ||
|
|
ca64630740 | ||
|
|
8686575311 | ||
|
|
0ecd6542bf | ||
|
|
5e3d47683d | ||
|
|
73f617d811 | ||
|
|
ea7e15bfc4 | ||
|
|
eca530c788 | ||
|
|
c1b48e2c9c | ||
|
|
9d501327fc | ||
|
|
31b3bad658 | ||
|
|
155c214df0 | ||
|
|
5931c13b04 | ||
|
|
5d7b7d5888 | ||
|
|
53ad819d65 | ||
|
|
3ce3786303 | ||
|
|
46bc146e26 | ||
|
|
88eaf1154a | ||
|
|
5421c243d7 | ||
|
|
5cdab1b57a | ||
|
|
2a86c5b944 | ||
|
|
daeeaf413a | ||
|
|
2d119f7b02 | ||
|
|
68950d56ca | ||
|
|
477c5df022 | ||
|
|
4c8f4f4cc5 | ||
|
|
6726e203b9 | ||
|
|
9a10811366 | ||
|
|
62bffaa7e6 | ||
|
|
14423c4f3f | ||
|
|
8037cddfe5 | ||
|
|
be4b3c75b4 | ||
|
|
818b261bea | ||
|
|
4707dc2a05 | ||
|
|
ebc2b821be | ||
|
|
85a875bbfe | ||
|
|
a9663c2900 | ||
|
|
05c24df9e3 | ||
|
|
1becd4c39d | ||
|
|
9817ab14d0 | ||
|
|
51b51a9bf7 | ||
|
|
55c5dd06cf | ||
|
|
0e5e23372d | ||
|
|
1e44d5c833 | ||
|
|
f7bc8fb662 | ||
|
|
416dcc83c9 | ||
|
|
13ed656506 | ||
|
|
f683f87ce3 | ||
|
|
c15cbe0f6e | ||
|
|
a8728670e1 | ||
|
|
cf9dffbaf8 | ||
|
|
3d1b32c72f | ||
|
|
e95da84e5a | ||
|
|
fcd759fa1f | ||
|
|
6772c81927 | ||
|
|
774ec40989 | ||
|
|
b7ba280da3 | ||
|
|
058e2c0d81 | ||
|
|
072919040b | ||
|
|
91ae343e3b | ||
|
|
5afabc7a19 | ||
|
|
4788f0814f | ||
|
|
c528ece5df | ||
|
|
a1c03cd6a1 | ||
|
|
42fbb81337 | ||
|
|
5286e24721 | ||
|
|
0bde309d23 | ||
|
|
b2442d42a3 | ||
|
|
18409f89c5 | ||
|
|
88d5fb0420 | ||
|
|
1cc0f81913 | ||
|
|
8cb8e63db5 | ||
|
|
8597670299 | ||
|
|
c0ff4dad59 | ||
|
|
9b2ca04118 | ||
|
|
d98c60519e | ||
|
|
589531163a | ||
|
|
5dd8c3ace2 | ||
|
|
d021c253aa | ||
|
|
c48c8c04f4 | ||
|
|
a2102c92ec | ||
|
|
99288a5e18 | ||
|
|
44c48d1d66 | ||
|
|
c785c38748 | ||
|
|
ebe0ded9c2 | ||
|
|
2dadfbcc14 | ||
|
|
3a58a5b772 | ||
|
|
5010e98b8f | ||
|
|
3ef4cc9bfa | ||
|
|
c01c671642 | ||
|
|
a86e270905 | ||
|
|
4058d18593 | ||
|
|
caa55f112f | ||
|
|
d0af952685 | ||
|
|
fbc7f496c5 | ||
|
|
bb19a4234e | ||
|
|
11f7e90f6a | ||
|
|
5c080678a6 | ||
|
|
2df51a923d | ||
|
|
0da0a8e67b | ||
|
|
b75ba7ebea | ||
|
|
24de951f6c | ||
|
|
974306541e | ||
|
|
d2fa5cc182 | ||
|
|
e45e4b3cda | ||
|
|
65641c7edd | ||
|
|
5f01c3f5a8 | ||
|
|
7b39198f26 | ||
|
|
f1e3be5ec8 | ||
|
|
bf5657a06a | ||
|
|
f583dd73e8 | ||
|
|
57b8aa4892 | ||
|
|
474876872e | ||
|
|
3eaed52b83 | ||
|
|
28822d891c | ||
|
|
37dbfa88f9 | ||
|
|
b6c30e8ef5 | ||
|
|
d938c96a76 | ||
|
|
4ce18618cb | ||
|
|
6c7f11395b | ||
|
|
134950ade1 | ||
|
|
7258a43bad | ||
|
|
27f98163ff | ||
|
|
6d04bd34ce | ||
|
|
584ec9cf75 | ||
|
|
aebeeb170e | ||
|
|
c434d38876 | ||
|
|
ae3ab89515 | ||
|
|
0c250cd6af | ||
|
|
33c1416f6c | ||
|
|
3d7fcb3835 | ||
|
|
04da4503db | ||
|
|
2016798e0f | ||
|
|
39d119534c | ||
|
|
d273472927 | ||
|
|
5aa99b2ca1 | ||
|
|
96b9bd6ab6 | ||
|
|
2c5bdf3611 | ||
|
|
af4234556e | ||
|
|
c6482137d1 | ||
|
|
f223df303f | ||
|
|
f132ce9b64 | ||
|
|
f22fd58392 | ||
|
|
cccc038600 | ||
|
|
b9607dd415 | ||
|
|
7b32262f75 | ||
|
|
d69f6acf64 | ||
|
|
66a859872e | ||
|
|
ef3aab1357 | ||
|
|
62ebf85b96 | ||
|
|
0c074e0988 | ||
|
|
32c705a62a | ||
|
|
7194338653 | ||
|
|
d43521bb77 | ||
|
|
b1710f9523 | ||
|
|
3b456d3e72 | ||
|
|
12a04a6da6 | ||
|
|
99205fde16 | ||
|
|
8539eae114 | ||
|
|
3e2dd4f86b | ||
|
|
32c14d6eab | ||
|
|
4f9901db38 | ||
|
|
a77c981e0c | ||
|
|
77d2364022 | ||
|
|
d1b42fd583 | ||
|
|
2dfb0abb69 | ||
|
|
7bcbaabd71 | ||
|
|
9c20e1b494 | ||
|
|
2b5210842d | ||
|
|
0b3e51458d | ||
|
|
d57fc998d5 | ||
|
|
1079051b12 | ||
|
|
4641056829 | ||
|
|
db2bb19d65 | ||
|
|
e5ad2e44fb | ||
|
|
aa8cda0001 | ||
|
|
949f383564 | ||
|
|
479ad13630 | ||
|
|
23c2e1be31 | ||
|
|
7628ef01f1 | ||
|
|
c0730aa562 | ||
|
|
67d6a9f9ea | ||
|
|
f9854abfa1 | ||
|
|
2697615dbf | ||
|
|
a131250dc1 | ||
|
|
2d237b6dbb | ||
|
|
c8b15005b4 | ||
|
|
a5c4350695 | ||
|
|
9f18f8dbdb | ||
|
|
a8e1c8960f | ||
|
|
7f66053654 | ||
|
|
1d6c88b7e2 | ||
|
|
049f85f3c9 | ||
|
|
4858868428 | ||
|
|
4e37076955 | ||
|
|
e6f654b568 | ||
|
|
65e110cdbf | ||
|
|
10e99c76a8 | ||
|
|
b0e3bc96dd | ||
|
|
59df54b363 | ||
|
|
5950f26c69 | ||
|
|
a3a5c6bf9f | ||
|
|
ca16787e7c | ||
|
|
271bd10b47 | ||
|
|
e3872ebd58 | ||
|
|
eee716644b | ||
|
|
c2660af60d | ||
|
|
2758a38485 | ||
|
|
9104f485e6 | ||
|
|
ae7361f82d | ||
|
|
42562e86e4 | ||
|
|
982ed37b06 | ||
|
|
c0c666cc87 | ||
|
|
8a284889f5 | ||
|
|
b891e2c204 | ||
|
|
a8bf7366cf | ||
|
|
e517f81b8f | ||
|
|
c4c99332fc | ||
|
|
40b5ce4b2e | ||
|
|
d2cd337c1f | ||
|
|
b9913fb4f9 | ||
|
|
d1705dd0cc | ||
|
|
816cc29132 | ||
|
|
f09b8efa87 | ||
|
|
6ebc6809eb | ||
|
|
4b31367945 | ||
|
|
2a62e300a2 | ||
|
|
f6b075843e | ||
|
|
4723773354 | ||
|
|
70be95cec5 | ||
|
|
201b17012d | ||
|
|
47264b0809 | ||
|
|
c51f235fab | ||
|
|
f1b1224a27 | ||
|
|
63b0796738 | ||
|
|
5961e3ef2e | ||
|
|
246d80f177 | ||
|
|
8005b47c14 | ||
|
|
1317572979 | ||
|
|
b763c51f8a | ||
|
|
e70055a333 | ||
|
|
52f86a206a | ||
|
|
7252883094 | ||
|
|
afa7c2d69f | ||
|
|
9c44d1f526 | ||
|
|
473ce95c86 | ||
|
|
3e1e068013 | ||
|
|
746a154f2b | ||
|
|
28733800c4 | ||
|
|
cf2deefa41 | ||
|
|
e5645dd798 | ||
|
|
6205a5db83 | ||
|
|
e50dd92425 | ||
|
|
4955fc8bc4 | ||
|
|
15adb1e828 | ||
|
|
c90d81b914 | ||
|
|
8e9c28701e | ||
|
|
abc74fc9b8 | ||
|
|
21fce00102 | ||
|
|
d347a06e3d | ||
|
|
0391dbc292 | ||
|
|
349c7efa69 | ||
|
|
0f451595d7 | ||
|
|
fcb6ce2907 | ||
|
|
273d7a83f2 | ||
|
|
916c92ffc7 | ||
|
|
38bf174bda | ||
|
|
09dff99340 | ||
|
|
7f178ef28b | ||
|
|
68328109d7 | ||
|
|
d573a9a346 | ||
|
|
d6e89689ae | ||
|
|
d1d97598e2 | ||
|
|
f57fa9d1fb | ||
|
|
83760deb9d | ||
|
|
3893e29a33 | ||
|
|
feeaa0bf5c | ||
|
|
22802e7a64 | ||
|
|
1ac5bc5e2b | ||
|
|
362a3753d0 | ||
|
|
71ee9d28b9 | ||
|
|
d8d89d253d | ||
|
|
a72f3d2f2f | ||
|
|
1adeb833fb | ||
|
|
a810aaf319 | ||
|
|
d9866c35b4 | ||
|
|
4e45c3a66c | ||
|
|
87b55dc413 | ||
|
|
2e3949d612 | ||
|
|
d928ccd922 | ||
|
|
a9c51b737c | ||
|
|
51669c9765 | ||
|
|
17cc82d946 | ||
|
|
10de5b6866 | ||
|
|
55dc27f243 | ||
|
|
6fc2ba3495 | ||
|
|
7bad01e193 | ||
|
|
62a1f10c42 | ||
|
|
3975a2ecdb | ||
|
|
bfa361c87f | ||
|
|
d5f07a9652 | ||
|
|
65ec1d18ad | ||
|
|
7b4521f980 | ||
|
|
3762ba7b24 | ||
|
|
762c882cd7 | ||
|
|
343639d4b7 | ||
|
|
38dc0b8e90 | ||
|
|
ed40ba6267 | ||
|
|
54d56f2284 | ||
|
|
1477bbae30 | ||
|
|
625c6c30fc | ||
|
|
228e412478 | ||
|
|
f8f2e005ba | ||
|
|
d8bf82a8cb | ||
|
|
2eeca3cfd7 | ||
|
|
28a4bbbe8a | ||
|
|
1cfcaa72ad | ||
|
|
4c14727762 | ||
|
|
0c8dde9718 | ||
|
|
febf051748 | ||
|
|
56885a5da1 | ||
|
|
623cf54766 | ||
|
|
a804c854bf | ||
|
|
7b087d4a6c | ||
|
|
cfa098479e | ||
|
|
3c510e6344 | ||
|
|
4c9a1d6b90 | ||
|
|
d1aa52a2a6 | ||
|
|
f30f52a0a8 | ||
|
|
5b459e3c5d | ||
|
|
676c068b71 | ||
|
|
00d71cea50 | ||
|
|
72263c5c7b | ||
|
|
281345dd67 | ||
|
|
1a85fcd2d5 | ||
|
|
c1171fe4ff | ||
|
|
d6a8ad0b33 | ||
|
|
4a6a3b27fa | ||
|
|
266831e26d | ||
|
|
a6e20eeaaa | ||
|
|
6529c1bb46 | ||
|
|
ae0d0db62c | ||
|
|
b81d795c00 | ||
|
|
1b87e11d8f | ||
|
|
8bb9cfd62a | ||
|
|
a176a4b8cf | ||
|
|
3f4d14e48d | ||
|
|
0499d419c3 | ||
|
|
700860e040 | ||
|
|
3dadeb3037 | ||
|
|
16a60412cf | ||
|
|
9f3e272665 | ||
|
|
b84fc3b111 | ||
|
|
e1e8d3b372 | ||
|
|
05f4d94db2 | ||
|
|
61fb3eb390 | ||
|
|
7b95d2114d | ||
|
|
07db7a41b3 | ||
|
|
1120f8b1e1 | ||
|
|
17b3996568 | ||
|
|
584b3f4e3d | ||
|
|
f8c53f4933 | ||
|
|
6e40e9c856 | ||
|
|
2f9dc4d075 | ||
|
|
9afc38b714 | ||
|
|
dfccc9e07d | ||
|
|
7b22d1b874 | ||
|
|
29b4979736 | ||
|
|
87d6253176 | ||
|
|
1e10d4323f | ||
|
|
4111e53113 | ||
|
|
02df0c29e9 | ||
|
|
475c90fd00 | ||
|
|
2742b00a65 | ||
|
|
ea29e66a41 | ||
|
|
6ef6b649e8 | ||
|
|
9bf2a49e0f | ||
|
|
914892c3ac | ||
|
|
77661c6032 | ||
|
|
b4fc585495 | ||
|
|
ff6db37a95 | ||
|
|
1a064bdc59 | ||
|
|
ebabec0dad | ||
|
|
3506b9a7d8 | ||
|
|
cc374ca705 | ||
|
|
ad56a27cc0 | ||
|
|
779e1a34db | ||
|
|
447dfbb64d | ||
|
|
a9365a3967 | ||
|
|
f5c10f99b0 | ||
|
|
c53ccc8d4a | ||
|
|
e214dcac85 | ||
|
|
de77f6bd1f | ||
|
|
18c4771a38 | ||
|
|
042c7ffe5b | ||
|
|
f25c6effa3 | ||
|
|
46d303ceee | ||
|
|
b1bd87bcd2 | ||
|
|
50b0a5a54d | ||
|
|
d5c6c589b2 | ||
|
|
fc0a039097 | ||
|
|
4f731017ea | ||
|
|
9eb2c02e92 | ||
|
|
55e5432027 | ||
|
|
a9ae4dc5a8 | ||
|
|
0398b744a1 | ||
|
|
012511e4f0 | ||
|
|
4483d0320f | ||
|
|
32e7ddd43a | ||
|
|
0b32733dc8 | ||
|
|
d310c48988 | ||
|
|
5a6eefaf2c | ||
|
|
8c5a94fa64 | ||
|
|
05d988349c | ||
|
|
bb1473f67f | ||
|
|
79f483a66d | ||
|
|
1b09a0230d | ||
|
|
f3344e9816 | ||
|
|
554e4d45aa | ||
|
|
bfe86cbc95 | ||
|
|
29e4160d3e | ||
|
|
b4f906ceb1 | ||
|
|
e099fc58c7 | ||
|
|
e342ef5cfa | ||
|
|
8997fca457 | ||
|
|
435ab4ad67 | ||
|
|
d7a28dcea4 | ||
|
|
5f3024d395 | ||
|
|
3126480d1e | ||
|
|
ca84d312ce | ||
|
|
b790b50a1a | ||
|
|
6df26eb7a3 | ||
|
|
fccaebdc8e | ||
|
|
45728dc1bb | ||
|
|
9cd8aa1667 | ||
|
|
b74597f4dd | ||
|
|
ce3d3c3490 | ||
|
|
a9fe1ad9c1 | ||
|
|
22e7083d71 | ||
|
|
951515da2f | ||
|
|
9e2f4cff08 | ||
|
|
0c1a4439ba | ||
|
|
c2a1603a56 | ||
|
|
13e715aeb9 | ||
|
|
2bc75270e7 | ||
|
|
fabe56088d | ||
|
|
0e3bf6db09 | ||
|
|
c6a7d0859d | ||
|
|
fed00a18ad | ||
|
|
ecbdc55955 | ||
|
|
bca9bcf6dd | ||
|
|
018a8e12de | ||
|
|
e0a28e32eb | ||
|
|
c105885c7b | ||
|
|
89a0be64af | ||
|
|
c1d85f568c | ||
|
|
75566bad39 | ||
|
|
75c2d1eda1 | ||
|
|
9a4667c6c7 | ||
|
|
9917841585 | ||
|
|
fbc3cd3758 | ||
|
|
d65687f14a | ||
|
|
4ea7511ae8 | ||
|
|
a8d22b9459 | ||
|
|
f8453ffe68 | ||
|
|
38f43c147a | ||
|
|
38fbcf8ee6 | ||
|
|
2bd25b1fba | ||
|
|
7178fb83b0 | ||
|
|
2376013d49 | ||
|
|
a94042def5 | ||
|
|
2d2164a4ba | ||
|
|
3c980d373c | ||
|
|
5b3ce1e999 | ||
|
|
6d4469ebbd | ||
|
|
eb58a6cc0e | ||
|
|
a60401abb9 | ||
|
|
1203c8c0ee | ||
|
|
0c52d17951 | ||
|
|
44fa3b18a9 | ||
|
|
33328c4ad7 | ||
|
|
11adcb9800 | ||
|
|
932a1c6386 | ||
|
|
d1791fc48c | ||
|
|
5b274cfc2a | ||
|
|
1d7d2820fd | ||
|
|
605c1355a8 | ||
|
|
a6e00df041 | ||
|
|
67219e743f | ||
|
|
67273ff8c3 | ||
|
|
a3bbe308a8 | ||
|
|
f1b5bbb1f6 | ||
|
|
7330102961 | ||
|
|
61916b86b5 | ||
|
|
35d5bde690 | ||
|
|
b17c477af7 | ||
|
|
01d891cd6e | ||
|
|
e36335f68c | ||
|
|
39369c7721 | ||
|
|
4fbc39991d | ||
|
|
91075e8332 | ||
|
|
0ed50b380a | ||
|
|
53716a4c5a | ||
|
|
f30bbad07d | ||
|
|
b923efad37 | ||
|
|
3b36372880 | ||
|
|
661cc896a9 | ||
|
|
e9c3623dfd | ||
|
|
c65b362841 | ||
|
|
6a0e11a233 | ||
|
|
7417f9925f | ||
|
|
2f669685d8 | ||
|
|
86510029e1 | ||
|
|
37234ca66e | ||
|
|
4ae1fdef05 | ||
|
|
95e94a8ab5 | ||
|
|
ea35d9713a | ||
|
|
949cf53b89 | ||
|
|
a68e22b114 | ||
|
|
d70cd113e1 | ||
|
|
f737fc066f | ||
|
|
9b992c971e | ||
|
|
e0d59766e0 | ||
|
|
a9d88f728d | ||
|
|
e24d63ea9a | ||
|
|
1833a5b78b | ||
|
|
4213a00548 | ||
|
|
7345512785 | ||
|
|
d3dc126d45 | ||
|
|
758a488aee | ||
|
|
c0c358b640 | ||
|
|
49f4ed10ca | ||
|
|
5e18eccd19 | ||
|
|
c4e9daca4e | ||
|
|
5443e10697 | ||
|
|
a3f9c0b012 | ||
|
|
8517053934 | ||
|
|
0506968d4f | ||
|
|
3bb91b20d0 | ||
|
|
268b1ff436 | ||
|
|
f16a72081a | ||
|
|
cceac8d907 | ||
|
|
8d012de3e2 | ||
|
|
cee7ac9511 | ||
|
|
36faaf4720 | ||
|
|
33b8e7624b | ||
|
|
a213e01491 | ||
|
|
a00ed8e297 | ||
|
|
aeaebcd81a | ||
|
|
b5849f3712 | ||
|
|
658f87953e | ||
|
|
5562e636ea | ||
|
|
80fcdae50b | ||
|
|
2c5f209996 | ||
|
|
692b55311e | ||
|
|
10667fc855 | ||
|
|
1fc33b551d | ||
|
|
729256c3d1 | ||
|
|
23e1feba96 | ||
|
|
e3614c3012 | ||
|
|
f37391397e | ||
|
|
6a8454f748 | ||
|
|
d1328c7625 | ||
|
|
d1cce109fb | ||
|
|
32bd8b6473 | ||
|
|
001bd4ca59 | ||
|
|
541b503e06 | ||
|
|
093c29e315 | ||
|
|
eec7d7199b | ||
|
|
7dbb862673 | ||
|
|
be1422d021 | ||
|
|
459ac0e5d9 | ||
|
|
16c9d043c0 | ||
|
|
1b465c4ed9 | ||
|
|
198a0db808 | ||
|
|
91dda0a164 | ||
|
|
9341480209 | ||
|
|
21877b3378 | ||
|
|
03169a96ef | ||
|
|
ebc3dbe7b6 | ||
|
|
1bed5d4af2 | ||
|
|
0783d86c6c | ||
|
|
a3d5705cea | ||
|
|
2ae8583a86 | ||
|
|
edda4bb265 | ||
|
|
d068481aec | ||
|
|
e20d8c8e81 | ||
|
|
f6cc351f7f | ||
|
|
c2d4887043 | ||
|
|
4428dbf1ff | ||
|
|
e225489f43 | ||
|
|
5169fe3484 | ||
|
|
01d1470544 | ||
|
|
faa6ee47c5 | ||
|
|
aa1d71148c | ||
|
|
e86ded6c68 | ||
|
|
365bf4eb53 | ||
|
|
ceb9bfe486 | ||
|
|
0f85c867a0 | ||
|
|
f8a8186bd1 | ||
|
|
33dfb6bf76 | ||
|
|
28cd762dd7 | ||
|
|
217cca47f5 | ||
|
|
8faa5d8b7a | ||
|
|
2c6711e183 | ||
|
|
45328b6e6d | ||
|
|
1523feee91 | ||
|
|
856dc3645e | ||
|
|
5e4dd54112 | ||
|
|
5860689619 | ||
|
|
da7834476b | ||
|
|
d5ba981515 | ||
|
|
afe07bd874 | ||
|
|
f916bd7994 | ||
|
|
3fef7acaa8 | ||
|
|
95190c5509 | ||
|
|
76e887f46d | ||
|
|
0c2b1b7747 | ||
|
|
4c74c8c40c | ||
|
|
3a929919a3 | ||
|
|
c25af96c56 | ||
|
|
f28f1e434d | ||
|
|
b3c5df193a | ||
|
|
8645602b0a | ||
|
|
05156a5991 | ||
|
|
049d642df8 | ||
|
|
951ebf146a | ||
|
|
7a67e0f3d6 | ||
|
|
37def8cf7c | ||
|
|
e4c28fed03 | ||
|
|
f8b7259d7f | ||
|
|
6ae1e156c8 | ||
|
|
9a055dbf78 | ||
|
|
80ac44565a | ||
|
|
a338199198 | ||
|
|
47fc0a759f | ||
|
|
5eb4b35508 | ||
|
|
d93eedaedb | ||
|
|
a748a272fb | ||
|
|
d8d710a83d | ||
|
|
673068464a | ||
|
|
694e494484 | ||
|
|
c8e208dea7 | ||
|
|
f2cec03900 | ||
|
|
0b4e0678e9 | ||
|
|
6e3b2a5c2d | ||
|
|
3d378077d9 | ||
|
|
716d440a76 | ||
|
|
9d81727d16 | ||
|
|
d5626a4f3e | ||
|
|
6073e8e3b6 | ||
|
|
867ff5da71 | ||
|
|
c8b2ca7fed | ||
|
|
d4e3127fb4 | ||
|
|
503a47c509 | ||
|
|
71577bb00d | ||
|
|
cfb58eb145 | ||
|
|
5994c35975 | ||
|
|
3aa07baf26 | ||
|
|
f1c53fcd85 | ||
|
|
6c98e6c3a0 | ||
|
|
8aec4ed72e | ||
|
|
0a0cdc2e21 | ||
|
|
f0776d6838 | ||
|
|
9de63832ce | ||
|
|
70629ef7f3 | ||
|
|
1d8bb47726 | ||
|
|
5e16c72d30 | ||
|
|
02f709f8d1 | ||
|
|
90bd27f5a8 | ||
|
|
593ab90f92 | ||
|
|
27c06a7285 | ||
|
|
b2c755ba76 | ||
|
|
c88cab7d31 | ||
|
|
f82f4a9993 | ||
|
|
5a6f1a342f |
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,3 +1,7 @@
|
||||
# Ignore generated schema
|
||||
swagger.json
|
||||
schema.json
|
||||
reference-schema.json
|
||||
|
||||
# Tags
|
||||
.tags
|
||||
@@ -58,6 +62,7 @@ __pycache__
|
||||
# UI build flag files
|
||||
awx/ui/.deps_built
|
||||
awx/ui/.release_built
|
||||
awx/ui/.release_deps_built
|
||||
|
||||
# Testing
|
||||
.cache
|
||||
|
||||
@@ -34,10 +34,11 @@ Have questions about this document or anything not covered here? Come chat with
|
||||
## Things to know prior to submitting code
|
||||
|
||||
- All code submissions are done through pull requests against the `devel` branch.
|
||||
- You must use `git commit --signoff` for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md).
|
||||
- You must use `git commit --signoff` for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md).
|
||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
|
||||
- If collaborating with someone else on the same branch, consider using `--force-with-lease` instead of `--force`. This will prevent you from accidentally overwriting commits pushed by someone else. For more information, see https://git-scm.com/docs/git-push#git-push---force-with-leaseltrefnamegt
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.freenode.net, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
||||
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
|
||||
## Setting up your development environment
|
||||
|
||||
@@ -49,7 +50,7 @@ The AWX development environment workflow and toolchain is based on Docker, and t
|
||||
|
||||
Prior to starting the development services, you'll need `docker` and `docker-compose`. On Linux, you can generally find these in your distro's packaging, but you may find that Docker themselves maintain a separate repo that tracks more closely to the latest releases.
|
||||
|
||||
For macOS and Windows, we recommend [Docker for Mac](https://www.docker.com/docker-mac) and [Docker for Windows](https://www.docker.com/docker-windows)
|
||||
For macOS and Windows, we recommend [Docker for Mac](https://www.docker.com/docker-mac) and [Docker for Windows](https://www.docker.com/docker-windows)
|
||||
respectively.
|
||||
|
||||
For Linux platforms, refer to the following from Docker:
|
||||
@@ -82,12 +83,10 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
|
||||
(host)$ pip install docker-compose
|
||||
```
|
||||
|
||||
#### Node and npm
|
||||
#### Frontend Development
|
||||
|
||||
The AWX UI requires the following:
|
||||
See [the ui development documentation](awx/ui/README.md).
|
||||
|
||||
- Node 8.x LTS
|
||||
- NPM 6.x LTS
|
||||
|
||||
### Build the environment
|
||||
|
||||
@@ -137,21 +136,21 @@ Run the following to build the AWX UI:
|
||||
```
|
||||
### Running the environment
|
||||
|
||||
#### Start the containers
|
||||
#### Start the containers
|
||||
|
||||
Start the development containers by running the following:
|
||||
|
||||
```bash
|
||||
(host)$ make docker-compose
|
||||
```
|
||||
|
||||
|
||||
The above utilizes the image built in the previous step, and will automatically start all required services and dependent containers. Once the containers launch, your session will be attached to the *awx* container, and you'll be able to watch log messages and events in real time. You will see messages from Django and the front end build process.
|
||||
|
||||
If you start a second terminal session, you can take a look at the running containers using the `docker ps` command. For example:
|
||||
|
||||
```bash
|
||||
# List running containers
|
||||
(host)$ docker ps
|
||||
(host)$ docker ps
|
||||
|
||||
$ docker ps
|
||||
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
|
||||
@@ -219,7 +218,7 @@ If you want to start and use the development environment, you'll first need to b
|
||||
```
|
||||
|
||||
The above will do all the setup tasks, including running database migrations, so it may take a couple minutes.
|
||||
|
||||
|
||||
Now you can start each service individually, or start all services in a pre-configured tmux session like so:
|
||||
|
||||
```bash
|
||||
@@ -248,9 +247,9 @@ Before you can log into AWX, you need to create an admin user. With this user yo
|
||||
(container)# awx-manage createsuperuser
|
||||
```
|
||||
You will be prompted for a username, an email address, and a password, and you will be asked to confirm the password. The email address is not important, so just enter something that looks like an email address. Remember the username and password, as you will use them to log into the web interface for the first time.
|
||||
|
||||
|
||||
##### Load demo data
|
||||
|
||||
|
||||
You can optionally load some demo data. This will create a demo project, inventory, and job template. From within the container shell, run the following to load the data:
|
||||
|
||||
```bash
|
||||
@@ -276,7 +275,7 @@ in OpenAPI format. A variety of online tools are available for translating
|
||||
this data into more consumable formats (such as HTML). http://editor.swagger.io
|
||||
is an example of one such service.
|
||||
|
||||
### Accessing the AWX web interface
|
||||
### Accessing the AWX web interface
|
||||
|
||||
You can now log into the AWX web interface at [https://localhost:8043](https://localhost:8043), and access the API directly at [https://localhost:8043/api/](https://localhost:8043/api/).
|
||||
|
||||
@@ -289,7 +288,7 @@ When necessary, remove any AWX containers and images by running the following:
|
||||
```bash
|
||||
(host)$ make docker-clean
|
||||
```
|
||||
|
||||
|
||||
## What should I work on?
|
||||
|
||||
For feature work, take a look at the current [Enhancements](https://github.com/ansible/awx/issues?q=is%3Aissue+is%3Aopen+label%3Atype%3Aenhancement).
|
||||
@@ -331,6 +330,23 @@ Sometimes it might take us a while to fully review your PR. We try to keep the `
|
||||
|
||||
All submitted PRs will have the linter and unit tests run against them via Zuul, and the status reported in the PR.
|
||||
|
||||
## PR Checks ran by Zuul
|
||||
Zuul jobs for awx are defined in the [zuul-jobs](https://github.com/ansible/zuul-jobs) repo.
|
||||
|
||||
Zuul runs the following checks that must pass:
|
||||
1) `tox-awx-api-lint`
|
||||
2) `tox-awx-ui-lint`
|
||||
3) `tox-awx-api`
|
||||
4) `tox-awx-ui`
|
||||
5) `tox-awx-swagger`
|
||||
|
||||
Zuul runs the following checks that are non-voting (can not pass but serve to inform PR reviewers):
|
||||
1) `tox-awx-detect-schema-change`
|
||||
This check generates the schema and diffs it against a reference copy of the `devel` version of the schema.
|
||||
Reviewers should inspect the `job-output.txt.gz` related to the check if their is a failure (grep for `diff -u -b` to find beginning of diff).
|
||||
If the schema change is expected and makes sense in relation to the changes made by the PR, then you are good to go!
|
||||
If not, the schema changes should be fixed, but this decision must be enforced by reviewers.
|
||||
|
||||
## Reporting Issues
|
||||
|
||||
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
|
||||
|
||||
@@ -18,7 +18,7 @@ $ pip install --upgrade ansible-tower-cli
|
||||
|
||||
The AWX host URL, user, and password must be set for the AWX instance to be exported:
|
||||
```
|
||||
$ tower-cli config host <old-awx-host.example.com>
|
||||
$ tower-cli config host http://<old-awx-host.example.com>
|
||||
$ tower-cli config username <user>
|
||||
$ tower-cli config password <pass>
|
||||
```
|
||||
@@ -62,7 +62,7 @@ For other install methods, refer to the [Install.md](https://github.com/ansible/
|
||||
Configure tower-cli for your new AWX host as shown earlier. Import from a JSON file named assets.json
|
||||
|
||||
```
|
||||
$ tower-cli config host <new-awx-host.example.com>
|
||||
$ tower-cli config host http://<new-awx-host.example.com>
|
||||
$ tower-cli config username <user>
|
||||
$ tower-cli config password <pass>
|
||||
$ tower-cli send assets.json
|
||||
|
||||
21
INSTALL.md
21
INSTALL.md
@@ -27,7 +27,7 @@ This document provides a guide for installing AWX.
|
||||
- [Start the build](#start-the-build-1)
|
||||
- [Accessing AWX](#accessing-awx-1)
|
||||
- [SSL Termination](#ssl-termination)
|
||||
- [Docker or Docker Compose](#docker-or-docker-compose)
|
||||
- [Docker Compose](#docker-compose)
|
||||
- [Prerequisites](#prerequisites-3)
|
||||
- [Pre-build steps](#pre-build-steps-2)
|
||||
- [Deploying to a remote host](#deploying-to-a-remote-host)
|
||||
@@ -73,7 +73,7 @@ The system that runs the AWX service will need to satisfy the following requirem
|
||||
- At least 2 cpu cores
|
||||
- At least 20GB of space
|
||||
- Running Docker, Openshift, or Kubernetes
|
||||
- If you choose to use an external PostgreSQL database, please note that the minimum version is 9.4.
|
||||
- If you choose to use an external PostgreSQL database, please note that the minimum version is 9.6+.
|
||||
|
||||
### AWX Tunables
|
||||
|
||||
@@ -81,14 +81,14 @@ The system that runs the AWX service will need to satisfy the following requirem
|
||||
|
||||
### Choose a deployment platform
|
||||
|
||||
We currently support running AWX as a containerized application using Docker images deployed to either an OpenShift cluster, docker-compose or a standalone Docker daemon. The remainder of this document will walk you through the process of building the images, and deploying them to either platform.
|
||||
We currently support running AWX as a containerized application using Docker images deployed to either an OpenShift cluster or docker-compose. The remainder of this document will walk you through the process of building the images, and deploying them to either platform.
|
||||
|
||||
The [installer](./installer) directory contains an [inventory](./installer/inventory) file, and a playbook, [install.yml](./installer/install.yml). You'll begin by setting variables in the inventory file according to the platform you wish to use, and then you'll start the image build and deployment process by running the playbook.
|
||||
|
||||
In the sections below, you'll find deployment details and instructions for each platform:
|
||||
- [OpenShift](#openshift)
|
||||
- [Kubernetes](#kubernetes)
|
||||
- [Docker or Docker Compose](#docker-or-docker-compose).
|
||||
- [Docker Compose](#docker-compose).
|
||||
|
||||
### Official vs Building Images
|
||||
|
||||
@@ -391,14 +391,13 @@ If your provider is able to allocate an IP Address from the Ingress controller t
|
||||
Unlike Openshift's `Route` the Kubernetes `Ingress` doesn't yet handle SSL termination. As such the default configuration will only expose AWX through HTTP on port 80. You are responsible for configuring SSL support until support is added (either to Kubernetes or AWX itself).
|
||||
|
||||
|
||||
## Docker or Docker-Compose
|
||||
## Docker-Compose
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- [Docker](https://docs.docker.com/engine/installation/) on the host where AWX will be deployed. After installing Docker, the Docker service must be started (depending on your OS, you may have to add the local user that uses Docker to the ``docker`` group, refer to the documentation for details)
|
||||
- [docker-py](https://github.com/docker/docker-py) Python module.
|
||||
|
||||
If you're installing using Docker Compose, you'll need [Docker Compose](https://docs.docker.com/compose/install/).
|
||||
- [Docker Compose](https://docs.docker.com/compose/install/).
|
||||
|
||||
### Pre-build steps
|
||||
|
||||
@@ -441,13 +440,13 @@ Before starting the build process, review the [inventory](./installer/inventory)
|
||||
|
||||
> Provide a port number that can be mapped from the Docker daemon host to the web server running inside the AWX container. Defaults to *80*.
|
||||
|
||||
*use_docker_compose*
|
||||
*ssl_certificate*
|
||||
|
||||
> Switch to ``true`` to use Docker Compose instead of the standalone Docker install.
|
||||
> Optionally, provide the path to a file that contains a certificate and its private key.
|
||||
|
||||
*docker_compose_dir*
|
||||
|
||||
When using docker-compose, the `docker-compose.yml` file will be created there (default `/var/lib/awx`).
|
||||
When using docker-compose, the `docker-compose.yml` file will be created there (default `/tmp/awxcompose`).
|
||||
|
||||
*ca_trust_dir*
|
||||
|
||||
@@ -527,7 +526,7 @@ After the playbook run completes, Docker will report up to 5 running containers.
|
||||
```bash
|
||||
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
|
||||
e240ed8209cd awx_task:1.0.0.8 "/tini -- /bin/sh ..." 2 minutes ago Up About a minute 8052/tcp awx_task
|
||||
1cfd02601690 awx_web:1.0.0.8 "/tini -- /bin/sh ..." 2 minutes ago Up About a minute 0.0.0.0:80->8052/tcp awx_web
|
||||
1cfd02601690 awx_web:1.0.0.8 "/tini -- /bin/sh ..." 2 minutes ago Up About a minute 0.0.0.0:443->8052/tcp awx_web
|
||||
55a552142bcd memcached:alpine "docker-entrypoint..." 2 minutes ago Up 2 minutes 11211/tcp memcached
|
||||
84011c072aad rabbitmq:3 "docker-entrypoint..." 2 minutes ago Up 2 minutes 4369/tcp, 5671-5672/tcp, 25672/tcp rabbitmq
|
||||
97e196120ab3 postgres:9.6 "docker-entrypoint..." 2 minutes ago Up 2 minutes 5432/tcp postgres
|
||||
|
||||
109
Makefile
109
Makefile
@@ -1,4 +1,4 @@
|
||||
PYTHON ?= python
|
||||
PYTHON ?= python3
|
||||
PYTHON_VERSION = $(shell $(PYTHON) -c "from distutils.sysconfig import get_python_version; print(get_python_version())")
|
||||
SITELIB=$(shell $(PYTHON) -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")
|
||||
OFFICIAL ?= no
|
||||
@@ -11,7 +11,6 @@ GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
MANAGEMENT_COMMAND ?= awx-manage
|
||||
IMAGE_REPOSITORY_AUTH ?=
|
||||
IMAGE_REPOSITORY_BASE ?= https://gcr.io
|
||||
|
||||
VERSION := $(shell cat VERSION)
|
||||
|
||||
# NOTE: This defaults the container image version to the branch that's active
|
||||
@@ -54,6 +53,7 @@ WHEEL_FILE ?= $(WHEEL_NAME)-py2-none-any.whl
|
||||
|
||||
# UI flag files
|
||||
UI_DEPS_FLAG_FILE = awx/ui/.deps_built
|
||||
UI_RELEASE_DEPS_FLAG_FILE = awx/ui/.release_deps_built
|
||||
UI_RELEASE_FLAG_FILE = awx/ui/.release_built
|
||||
|
||||
I18N_FLAG_FILE = .i18n_built
|
||||
@@ -74,6 +74,7 @@ clean-ui:
|
||||
rm -rf awx/ui/test/e2e/reports/
|
||||
rm -rf awx/ui/client/languages/
|
||||
rm -f $(UI_DEPS_FLAG_FILE)
|
||||
rm -f $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
rm -f $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
clean-tmp:
|
||||
@@ -85,6 +86,11 @@ clean-venv:
|
||||
clean-dist:
|
||||
rm -rf dist
|
||||
|
||||
clean-schema:
|
||||
rm -rf swagger.json
|
||||
rm -rf schema.json
|
||||
rm -rf reference-schema.json
|
||||
|
||||
# Remove temporary build files, compiled Python files.
|
||||
clean: clean-ui clean-dist
|
||||
rm -rf awx/public
|
||||
@@ -116,23 +122,31 @@ virtualenv_ansible:
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/ansible" ]; then \
|
||||
virtualenv --system-site-packages $(VENV_BASE)/ansible && \
|
||||
virtualenv -p python --system-site-packages $(VENV_BASE)/ansible && \
|
||||
$(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) --ignore-installed six packaging appdirs && \
|
||||
$(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) --ignore-installed setuptools==36.0.1 && \
|
||||
$(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) --ignore-installed pip==9.0.1; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
virtualenv_ansible_py3:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
if [ ! -d "$(VENV_BASE)" ]; then \
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/ansible" ]; then \
|
||||
$(PYTHON) -m venv --system-site-packages $(VENV_BASE)/ansible; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
virtualenv_awx:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
if [ ! -d "$(VENV_BASE)" ]; then \
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/awx" ]; then \
|
||||
virtualenv --system-site-packages $(VENV_BASE)/awx && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed six packaging appdirs && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed setuptools==36.0.1 && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed pip==9.0.1; \
|
||||
$(PYTHON) -m venv --system-site-packages $(VENV_BASE)/awx; \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed docutils==0.14; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
@@ -144,20 +158,19 @@ requirements_ansible: virtualenv_ansible
|
||||
fi
|
||||
$(VENV_BASE)/ansible/bin/pip uninstall --yes -r requirements/requirements_ansible_uninstall.txt
|
||||
|
||||
requirements_ansible_py3: virtualenv_ansible_py3
|
||||
if [[ "$(PIP_OPTIONS)" == *"--no-index"* ]]; then \
|
||||
cat requirements/requirements_ansible.txt requirements/requirements_ansible_local.txt | $(VENV_BASE)/ansible/bin/pip3 install $(PIP_OPTIONS) --ignore-installed -r /dev/stdin ; \
|
||||
else \
|
||||
cat requirements/requirements_ansible.txt requirements/requirements_ansible_git.txt | $(VENV_BASE)/ansible/bin/pip3 install $(PIP_OPTIONS) --no-binary $(SRC_ONLY_PKGS) --ignore-installed -r /dev/stdin ; \
|
||||
fi
|
||||
$(VENV_BASE)/ansible/bin/pip3 uninstall --yes -r requirements/requirements_ansible_uninstall.txt
|
||||
|
||||
requirements_ansible_dev:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
$(VENV_BASE)/ansible/bin/pip install pytest mock; \
|
||||
fi
|
||||
|
||||
requirements_isolated:
|
||||
if [ ! -d "$(VENV_BASE)/awx" ]; then \
|
||||
virtualenv --system-site-packages $(VENV_BASE)/awx && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed six packaging appdirs && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed setuptools==35.0.2 && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed pip==9.0.1; \
|
||||
fi;
|
||||
$(VENV_BASE)/awx/bin/pip install -r requirements/requirements_isolated.txt
|
||||
|
||||
# Install third-party requirements needed for AWX's environment.
|
||||
requirements_awx: virtualenv_awx
|
||||
if [[ "$(PIP_OPTIONS)" == *"--no-index"* ]]; then \
|
||||
@@ -165,6 +178,7 @@ requirements_awx: virtualenv_awx
|
||||
else \
|
||||
cat requirements/requirements.txt requirements/requirements_git.txt | $(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --no-binary $(SRC_ONLY_PKGS) --ignore-installed -r /dev/stdin ; \
|
||||
fi
|
||||
echo "include-system-site-packages = true" >> $(VENV_BASE)/awx/lib/python$(PYTHON_VERSION)/pyvenv.cfg
|
||||
#$(VENV_BASE)/awx/bin/pip uninstall --yes -r requirements/requirements_tower_uninstall.txt
|
||||
|
||||
requirements_awx_dev:
|
||||
@@ -191,7 +205,7 @@ version_file:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
python -c "import awx as awx; print awx.__version__" > /var/lib/awx/.awx_version; \
|
||||
python -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \
|
||||
|
||||
# Do any one-time init tasks.
|
||||
comma := ,
|
||||
@@ -204,7 +218,7 @@ init:
|
||||
if [ "$(AWX_GROUP_QUEUES)" == "tower,thepentagon" ]; then \
|
||||
$(MANAGEMENT_COMMAND) provision_instance --hostname=isolated; \
|
||||
$(MANAGEMENT_COMMAND) register_queue --queuename='thepentagon' --hostnames=isolated --controller=tower; \
|
||||
$(MANAGEMENT_COMMAND) generate_isolated_key | ssh -o "StrictHostKeyChecking no" root@isolated 'cat >> /root/.ssh/authorized_keys'; \
|
||||
$(MANAGEMENT_COMMAND) generate_isolated_key > /awx_devel/awx/main/expect/authorized_keys; \
|
||||
fi;
|
||||
|
||||
# Refresh development environment after pulling new code.
|
||||
@@ -255,7 +269,7 @@ supervisor:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
supervisord --configuration /supervisor.conf --pidfile=/tmp/supervisor_pid
|
||||
supervisord --pidfile=/tmp/supervisor_pid
|
||||
|
||||
# Alternate approach to tmux to run all development tasks specified in
|
||||
# Procfile.
|
||||
@@ -338,18 +352,21 @@ pyflakes: reports
|
||||
pylint: reports
|
||||
@(set -o pipefail && $@ | reports/$@.report)
|
||||
|
||||
genschema: reports
|
||||
$(MAKE) swagger PYTEST_ARGS="--genschema"
|
||||
|
||||
swagger: reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
(set -o pipefail && py.test awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
||||
|
||||
check: flake8 pep8 # pyflakes pylint
|
||||
|
||||
awx-link:
|
||||
cp -R /tmp/awx.egg-info /awx_devel/ || true
|
||||
sed -i "s/placeholder/$(shell git describe --long | sed 's/\./\\./g')/" /awx_devel/awx.egg-info/PKG-INFO
|
||||
cp -f /tmp/awx.egg-link /venv/awx/lib/python2.7/site-packages/awx.egg-link
|
||||
cp -f /tmp/awx.egg-link /venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.egg-link
|
||||
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||
|
||||
@@ -446,7 +463,7 @@ messages:
|
||||
# generate l10n .json .mo
|
||||
languages: $(I18N_FLAG_FILE)
|
||||
|
||||
$(I18N_FLAG_FILE): $(UI_DEPS_FLAG_FILE)
|
||||
$(I18N_FLAG_FILE): $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run languages
|
||||
$(PYTHON) tools/scripts/compilemessages.py
|
||||
touch $(I18N_FLAG_FILE)
|
||||
@@ -454,13 +471,31 @@ $(I18N_FLAG_FILE): $(UI_DEPS_FLAG_FILE)
|
||||
# End l10n TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# UI TASKS
|
||||
# UI RELEASE TASKS
|
||||
# --------------------------------------
|
||||
ui-release: $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
$(UI_RELEASE_FLAG_FILE): $(I18N_FLAG_FILE) $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run build-release
|
||||
touch $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
$(UI_RELEASE_DEPS_FLAG_FILE):
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 $(NPM_BIN) --unsafe-perm --prefix awx/ui ci --no-save awx/ui
|
||||
touch $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
|
||||
# END UI RELEASE TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# UI TASKS
|
||||
# --------------------------------------
|
||||
ui-deps: $(UI_DEPS_FLAG_FILE)
|
||||
|
||||
$(UI_DEPS_FLAG_FILE):
|
||||
$(NPM_BIN) --unsafe-perm --prefix awx/ui install --no-save awx/ui
|
||||
@if [ -f ${UI_RELEASE_DEPS_FLAG_FILE} ]; then \
|
||||
rm -rf awx/ui/node_modules; \
|
||||
rm -f ${UI_RELEASE_DEPS_FLAG_FILE}; \
|
||||
fi; \
|
||||
$(NPM_BIN) --unsafe-perm --prefix awx/ui ci --no-save awx/ui
|
||||
touch $(UI_DEPS_FLAG_FILE)
|
||||
|
||||
ui-docker-machine: $(UI_DEPS_FLAG_FILE)
|
||||
@@ -474,12 +509,6 @@ ui-docker: $(UI_DEPS_FLAG_FILE)
|
||||
ui-devel: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run build-devel -- $(MAKEFLAGS)
|
||||
|
||||
ui-release: $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
$(UI_RELEASE_FLAG_FILE): $(I18N_FLAG_FILE) $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run build-release
|
||||
touch $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
ui-test: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run test
|
||||
|
||||
@@ -490,9 +519,6 @@ ui-test-ci: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run test:ci
|
||||
$(NPM_BIN) --prefix awx/ui run unit
|
||||
|
||||
testjs_ci:
|
||||
echo "Update UI unittests later" #ui-test-ci
|
||||
|
||||
jshint: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) run --prefix awx/ui jshint
|
||||
$(NPM_BIN) run --prefix awx/ui lint
|
||||
@@ -540,12 +566,6 @@ docker-isolated:
|
||||
TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-isolated-override.yml create
|
||||
docker start tools_awx_1
|
||||
docker start tools_isolated_1
|
||||
echo "__version__ = '`git describe --long | cut -d - -f 1-1`'" | docker exec -i tools_isolated_1 /bin/bash -c "cat > /venv/awx/lib/python2.7/site-packages/awx.py"
|
||||
if [ "`docker exec -i -t tools_isolated_1 cat /root/.ssh/authorized_keys`" == "`docker exec -t tools_awx_1 cat /root/.ssh/id_rsa.pub`" ]; then \
|
||||
echo "SSH keys already copied to isolated instance"; \
|
||||
else \
|
||||
docker exec "tools_isolated_1" bash -c "mkdir -p /root/.ssh && rm -f /root/.ssh/authorized_keys && echo $$(docker exec -t tools_awx_1 cat /root/.ssh/id_rsa.pub) >> /root/.ssh/authorized_keys"; \
|
||||
fi
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-isolated-override.yml up
|
||||
|
||||
# Docker Compose Development environment
|
||||
@@ -564,6 +584,16 @@ docker-compose-runtest:
|
||||
docker-compose-build-swagger:
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm --service-ports awx /start_tests.sh swagger
|
||||
|
||||
docker-compose-genschema:
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm --service-ports awx /start_tests.sh genschema
|
||||
mv swagger.json schema.json
|
||||
|
||||
docker-compose-detect-schema-change:
|
||||
$(MAKE) docker-compose-genschema
|
||||
curl https://s3.amazonaws.com/awx-public-ci-files/schema.json -o reference-schema.json
|
||||
# Ignore differences in whitespace with -b
|
||||
diff -u -b reference-schema.json schema.json
|
||||
|
||||
docker-compose-clean:
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm -w /awx_devel --service-ports awx make clean
|
||||
cd tools && TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose rm -sf
|
||||
@@ -600,7 +630,6 @@ docker-compose-cluster-elk: docker-auth
|
||||
minishift-dev:
|
||||
ansible-playbook -i localhost, -e devtree_directory=$(CURDIR) tools/clusterdevel/start_minishift_dev.yml
|
||||
|
||||
|
||||
clean-elk:
|
||||
docker stop tools_kibana_1
|
||||
docker stop tools_logstash_1
|
||||
|
||||
@@ -21,6 +21,48 @@ except ImportError: # pragma: no cover
|
||||
MODE = 'production'
|
||||
|
||||
|
||||
import hashlib
|
||||
|
||||
try:
|
||||
import django
|
||||
from django.utils.encoding import force_bytes
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.backends.base import schema
|
||||
HAS_DJANGO = True
|
||||
except ImportError:
|
||||
HAS_DJANGO = False
|
||||
|
||||
|
||||
if HAS_DJANGO is True:
|
||||
# This line exists to make sure we don't regress on FIPS support if we
|
||||
# upgrade Django; if you're upgrading Django and see this error,
|
||||
# update the version check below, and confirm that FIPS still works.
|
||||
if django.__version__ != '1.11.16':
|
||||
raise RuntimeError("Django version other than 1.11.16 detected {}. \
|
||||
Subclassing BaseDatabaseSchemaEditor is known to work for Django 1.11.16 \
|
||||
and may not work in newer Django versions.".format(django.__version__))
|
||||
|
||||
|
||||
class FipsBaseDatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
|
||||
@classmethod
|
||||
def _digest(cls, *args):
|
||||
"""
|
||||
Generates a 32-bit digest of a set of arguments that can be used to
|
||||
shorten identifying names.
|
||||
"""
|
||||
try:
|
||||
h = hashlib.md5()
|
||||
except ValueError:
|
||||
h = hashlib.md5(usedforsecurity=False)
|
||||
for arg in args:
|
||||
h.update(force_bytes(arg))
|
||||
return h.hexdigest()[:8]
|
||||
|
||||
|
||||
schema.BaseDatabaseSchemaEditor = FipsBaseDatabaseSchemaEditor
|
||||
|
||||
|
||||
def find_commands(management_dir):
|
||||
# Modified version of function from django/core/management/__init__.py.
|
||||
command_dir = os.path.join(management_dir, 'commands')
|
||||
@@ -49,16 +91,6 @@ def prepare_env():
|
||||
# Monkeypatch Django find_commands to also work with .pyc files.
|
||||
import django.core.management
|
||||
django.core.management.find_commands = find_commands
|
||||
# Fixup sys.modules reference to django.utils.six to allow jsonfield to
|
||||
# work when using Django 1.4.
|
||||
import django.utils
|
||||
try:
|
||||
import django.utils.six
|
||||
except ImportError: # pragma: no cover
|
||||
import six
|
||||
sys.modules['django.utils.six'] = sys.modules['six']
|
||||
django.utils.six = sys.modules['django.utils.six']
|
||||
from django.utils import six # noqa
|
||||
# Use the AWX_TEST_DATABASE_* environment variables to specify the test
|
||||
# database settings to use when management command is run as an external
|
||||
# program via unit tests.
|
||||
|
||||
@@ -43,7 +43,7 @@ register(
|
||||
help_text=_('Dictionary for customizing OAuth 2 timeouts, available items are '
|
||||
'`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number '
|
||||
'of seconds, and `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
|
||||
'authorization grants in the number of seconds.'),
|
||||
'authorization codes in the number of seconds.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
@@ -101,6 +101,10 @@ class DeprecatedCredentialField(serializers.IntegerField):
|
||||
super(DeprecatedCredentialField, self).__init__(**kwargs)
|
||||
|
||||
def to_internal_value(self, pk):
|
||||
try:
|
||||
pk = int(pk)
|
||||
except ValueError:
|
||||
self.fail('invalid')
|
||||
try:
|
||||
Credential.objects.get(pk=pk)
|
||||
except ObjectDoesNotExist:
|
||||
|
||||
@@ -25,7 +25,6 @@ from rest_framework.filters import BaseFilterBackend
|
||||
from awx.main.utils import get_type_for_model, to_python_boolean
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.models.credential import CredentialType
|
||||
from awx.main.models.rbac import RoleAncestorEntry
|
||||
|
||||
|
||||
class V1CredentialFilterBackend(BaseFilterBackend):
|
||||
@@ -66,7 +65,7 @@ class TypeFilterBackend(BaseFilterBackend):
|
||||
model = queryset.model
|
||||
model_type = get_type_for_model(model)
|
||||
if 'polymorphic_ctype' in get_all_field_names(model):
|
||||
types_pks = set([v for k,v in types_map.items() if k in types])
|
||||
types_pks = set([v for k, v in types_map.items() if k in types])
|
||||
queryset = queryset.filter(polymorphic_ctype_id__in=types_pks)
|
||||
elif model_type in types:
|
||||
queryset = queryset
|
||||
@@ -193,7 +192,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
|
||||
def value_to_python(self, model, lookup, value):
|
||||
try:
|
||||
lookup = lookup.encode("ascii")
|
||||
lookup.encode("ascii")
|
||||
except UnicodeEncodeError:
|
||||
raise ValueError("%r is not an allowed field name. Must be ascii encodable." % lookup)
|
||||
|
||||
@@ -347,12 +346,12 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
else:
|
||||
args.append(Q(**{k:v}))
|
||||
for role_name in role_filters:
|
||||
if not hasattr(queryset.model, 'accessible_pk_qs'):
|
||||
raise ParseError(_(
|
||||
'Cannot apply role_level filter to this list because its model '
|
||||
'does not use roles for access control.'))
|
||||
args.append(
|
||||
Q(pk__in=RoleAncestorEntry.objects.filter(
|
||||
ancestor__in=request.user.roles.all(),
|
||||
content_type_id=ContentType.objects.get_for_model(queryset.model).id,
|
||||
role_field=role_name
|
||||
).values_list('object_id').distinct())
|
||||
Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name))
|
||||
)
|
||||
if or_filters:
|
||||
q = Q()
|
||||
@@ -364,12 +363,12 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
args.append(q)
|
||||
if search_filters and search_filter_relation == 'OR':
|
||||
q = Q()
|
||||
for term, constrains in search_filters.iteritems():
|
||||
for term, constrains in search_filters.items():
|
||||
for constrain in constrains:
|
||||
q |= Q(**{constrain: term})
|
||||
args.append(q)
|
||||
elif search_filters and search_filter_relation == 'AND':
|
||||
for term, constrains in search_filters.iteritems():
|
||||
for term, constrains in search_filters.items():
|
||||
q_chain = Q()
|
||||
for constrain in constrains:
|
||||
q_chain |= Q(**{constrain: term})
|
||||
|
||||
@@ -5,8 +5,7 @@
|
||||
import inspect
|
||||
import logging
|
||||
import time
|
||||
import six
|
||||
import urllib
|
||||
import urllib.parse
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -32,14 +31,19 @@ from rest_framework.permissions import AllowAny
|
||||
from rest_framework.renderers import StaticHTMLRenderer, JSONRenderer
|
||||
from rest_framework.negotiation import DefaultContentNegotiation
|
||||
|
||||
# cryptography
|
||||
from cryptography.fernet import InvalidToken
|
||||
|
||||
# AWX
|
||||
from awx.api.filters import FieldLookupBackend
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.models import (
|
||||
UnifiedJob, UnifiedJobTemplate, User, Role
|
||||
)
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.utils import * # noqa
|
||||
from awx.main.utils import (
|
||||
camelcase_to_underscore,
|
||||
get_search_fields,
|
||||
getattrd,
|
||||
get_object_or_400,
|
||||
decrypt_field
|
||||
)
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
|
||||
from awx.api.versioning import URLPathVersioning, get_request_version
|
||||
@@ -56,7 +60,7 @@ __all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
|
||||
'ParentMixin',
|
||||
'DeleteLastUnattachLabelMixin',
|
||||
'SubListAttachDetachAPIView',
|
||||
'CopyAPIView']
|
||||
'CopyAPIView', 'BaseUsersList',]
|
||||
|
||||
logger = logging.getLogger('awx.api.generics')
|
||||
analytics_logger = logging.getLogger('awx.analytics.performance')
|
||||
@@ -90,12 +94,14 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
logger.info(smart_text(u"User {} logged in.".format(self.request.user.username)))
|
||||
ret.set_cookie('userLoggedIn', 'true')
|
||||
current_user = UserSerializer(self.request.user)
|
||||
current_user = JSONRenderer().render(current_user.data)
|
||||
current_user = urllib.quote('%s' % current_user, '')
|
||||
ret.set_cookie('current_user', current_user)
|
||||
current_user = smart_text(JSONRenderer().render(current_user.data))
|
||||
current_user = urllib.parse.quote('%s' % current_user, '')
|
||||
ret.set_cookie('current_user', current_user, secure=settings.SESSION_COOKIE_SECURE or None)
|
||||
|
||||
return ret
|
||||
else:
|
||||
if 'username' in self.request.POST:
|
||||
logger.warn(smart_text(u"Login failed for user {} from {}".format(self.request.POST.get('username'),request.META.get('REMOTE_ADDR', None))))
|
||||
ret.status_code = 401
|
||||
return ret
|
||||
|
||||
@@ -305,7 +311,7 @@ class APIView(views.APIView):
|
||||
# submitted data was rejected.
|
||||
request_method = getattr(self, '_raw_data_request_method', None)
|
||||
response_status = getattr(self, '_raw_data_response_status', 0)
|
||||
if request_method in ('POST', 'PUT', 'PATCH') and response_status in xrange(400, 500):
|
||||
if request_method in ('POST', 'PUT', 'PATCH') and response_status in range(400, 500):
|
||||
return self.request.data.copy()
|
||||
|
||||
return data
|
||||
@@ -348,7 +354,7 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
# form.
|
||||
if hasattr(self, '_raw_data_form_marker'):
|
||||
# Always remove read only fields from serializer.
|
||||
for name, field in serializer.fields.items():
|
||||
for name, field in list(serializer.fields.items()):
|
||||
if getattr(field, 'read_only', None):
|
||||
del serializer.fields[name]
|
||||
serializer._data = self.update_raw_data(serializer.data)
|
||||
@@ -552,9 +558,8 @@ class SubListDestroyAPIView(DestroyAPIView, SubListAPIView):
|
||||
|
||||
def perform_list_destroy(self, instance_list):
|
||||
if self.check_sub_obj_permission:
|
||||
# Check permissions for all before deleting, avoiding half-deleted lists
|
||||
for instance in instance_list:
|
||||
if self.has_delete_permission(instance):
|
||||
if not self.has_delete_permission(instance):
|
||||
raise PermissionDenied()
|
||||
for instance in instance_list:
|
||||
self.perform_destroy(instance, check_permission=False)
|
||||
@@ -749,7 +754,7 @@ class SubListAttachDetachAPIView(SubListCreateAttachDetachAPIView):
|
||||
def update_raw_data(self, data):
|
||||
request_method = getattr(self, '_raw_data_request_method', None)
|
||||
response_status = getattr(self, '_raw_data_response_status', 0)
|
||||
if request_method == 'POST' and response_status in xrange(400, 500):
|
||||
if request_method == 'POST' and response_status in range(400, 500):
|
||||
return super(SubListAttachDetachAPIView, self).update_raw_data(data)
|
||||
return {'id': None}
|
||||
|
||||
@@ -855,15 +860,18 @@ class CopyAPIView(GenericAPIView):
|
||||
return field_val
|
||||
if isinstance(field_val, dict):
|
||||
for sub_field in field_val:
|
||||
if isinstance(sub_field, six.string_types) \
|
||||
and isinstance(field_val[sub_field], six.string_types):
|
||||
if isinstance(sub_field, str) \
|
||||
and isinstance(field_val[sub_field], str):
|
||||
try:
|
||||
field_val[sub_field] = decrypt_field(obj, field_name, sub_field)
|
||||
except InvalidToken:
|
||||
except AttributeError:
|
||||
# Catching the corner case with v1 credential fields
|
||||
field_val[sub_field] = decrypt_field(obj, sub_field)
|
||||
elif isinstance(field_val, six.string_types):
|
||||
field_val = decrypt_field(obj, field_name)
|
||||
elif isinstance(field_val, str):
|
||||
try:
|
||||
field_val = decrypt_field(obj, field_name)
|
||||
except AttributeError:
|
||||
return field_val
|
||||
return field_val
|
||||
|
||||
def _build_create_dict(self, obj):
|
||||
@@ -917,7 +925,7 @@ class CopyAPIView(GenericAPIView):
|
||||
obj, field.name, field_val
|
||||
)
|
||||
new_obj = model.objects.create(**create_kwargs)
|
||||
logger.debug(six.text_type('Deep copy: Created new object {}({})').format(
|
||||
logger.debug('Deep copy: Created new object {}({})'.format(
|
||||
new_obj, model
|
||||
))
|
||||
# Need to save separatedly because Djang-crum get_current_user would
|
||||
@@ -990,3 +998,22 @@ class CopyAPIView(GenericAPIView):
|
||||
serializer = self._get_copy_return_serializer(new_obj)
|
||||
headers = {'Location': new_obj.get_absolute_url(request=request)}
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
|
||||
class BaseUsersList(SubListCreateAttachDetachAPIView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(BaseUsersList, self).post( request, *args, **kwargs)
|
||||
if ret.status_code != 201:
|
||||
return ret
|
||||
try:
|
||||
if ret.data is not None and request.data.get('is_system_auditor', False):
|
||||
# This is a faux-field that just maps to checking the system
|
||||
# auditor role member list.. unfortunately this means we can't
|
||||
# set it on creation, and thus needs to be set here.
|
||||
user = User.objects.get(id=ret.data['id'])
|
||||
user.is_system_auditor = request.data['is_system_auditor']
|
||||
ret.data['is_system_auditor'] = request.data['is_system_auditor']
|
||||
except AttributeError as exc:
|
||||
print(exc)
|
||||
pass
|
||||
return ret
|
||||
|
||||
@@ -157,7 +157,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
finally:
|
||||
view.request = request
|
||||
|
||||
for field, meta in actions[method].items():
|
||||
for field, meta in list(actions[method].items()):
|
||||
if not isinstance(meta, dict):
|
||||
continue
|
||||
|
||||
@@ -234,17 +234,15 @@ class RoleMetadata(Metadata):
|
||||
|
||||
# TODO: Tower 3.3 remove class and all uses in views.py when API v1 is removed
|
||||
class JobTypeMetadata(Metadata):
|
||||
def get_field_info(self, field):
|
||||
res = super(JobTypeMetadata, self).get_field_info(field)
|
||||
def get_field_info(self, field):
|
||||
res = super(JobTypeMetadata, self).get_field_info(field)
|
||||
|
||||
if field.field_name == 'job_type':
|
||||
index = 0
|
||||
for choice in res['choices']:
|
||||
if choice[0] == 'scan':
|
||||
res['choices'].pop(index)
|
||||
break
|
||||
index += 1
|
||||
return res
|
||||
if field.field_name == 'job_type':
|
||||
res['choices'] = [
|
||||
choice for choice in res['choices']
|
||||
if choice[0] != 'scan'
|
||||
]
|
||||
return res
|
||||
|
||||
|
||||
class SublistAttachDetatchMetadata(Metadata):
|
||||
@@ -253,7 +251,7 @@ class SublistAttachDetatchMetadata(Metadata):
|
||||
actions = super(SublistAttachDetatchMetadata, self).determine_actions(request, view)
|
||||
method = 'POST'
|
||||
if method in actions:
|
||||
for field in actions[method]:
|
||||
for field in list(actions[method].keys()):
|
||||
if field == 'id':
|
||||
continue
|
||||
actions[method].pop(field)
|
||||
|
||||
@@ -4,7 +4,7 @@ import json
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils import six
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
@@ -25,7 +25,7 @@ class JSONParser(parsers.JSONParser):
|
||||
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
|
||||
|
||||
try:
|
||||
data = stream.read().decode(encoding)
|
||||
data = smart_str(stream.read(), encoding=encoding)
|
||||
if not data:
|
||||
return {}
|
||||
obj = json.loads(data, object_pairs_hook=OrderedDict)
|
||||
@@ -33,4 +33,4 @@ class JSONParser(parsers.JSONParser):
|
||||
raise ParseError(_('JSON parse error - not a JSON object'))
|
||||
return obj
|
||||
except ValueError as exc:
|
||||
raise ParseError(_('JSON parse error - %s\nPossible cause: trailing comma.' % six.text_type(exc)))
|
||||
raise ParseError(_('JSON parse error - %s\nPossible cause: trailing comma.' % str(exc)))
|
||||
|
||||
@@ -9,8 +9,8 @@ from rest_framework.exceptions import MethodNotAllowed, PermissionDenied
|
||||
from rest_framework import permissions
|
||||
|
||||
# AWX
|
||||
from awx.main.access import * # noqa
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.access import check_user_access
|
||||
from awx.main.models import Inventory, UnifiedJob
|
||||
from awx.main.utils import get_object_or_400
|
||||
|
||||
logger = logging.getLogger('awx.api.permissions')
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.utils.safestring import SafeText
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework import renderers
|
||||
from rest_framework.request import override_method
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
'''
|
||||
@@ -20,6 +20,19 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
return renderers.JSONRenderer()
|
||||
return renderer
|
||||
|
||||
def get_content(self, renderer, data, accepted_media_type, renderer_context):
|
||||
if isinstance(data, SafeText):
|
||||
# Older versions of Django (pre-2.0) have a py3 bug which causes
|
||||
# bytestrings marked as "safe" to not actually get _treated_ as
|
||||
# safe; this causes certain embedded strings (like the stdout HTML
|
||||
# view) to be improperly escaped
|
||||
# see: https://github.com/ansible/awx/issues/3108
|
||||
# https://code.djangoproject.com/ticket/28121
|
||||
return data
|
||||
return super(BrowsableAPIRenderer, self).get_content(renderer, data,
|
||||
accepted_media_type,
|
||||
renderer_context)
|
||||
|
||||
def get_context(self, data, accepted_media_type, renderer_context):
|
||||
# Store the associated response status to know how to populate the raw
|
||||
# data form.
|
||||
@@ -71,8 +84,8 @@ class PlainTextRenderer(renderers.BaseRenderer):
|
||||
format = 'txt'
|
||||
|
||||
def render(self, data, media_type=None, renderer_context=None):
|
||||
if not isinstance(data, six.string_types):
|
||||
data = six.text_type(data)
|
||||
if not isinstance(data, str):
|
||||
data = str(data)
|
||||
return data.encode(self.charset)
|
||||
|
||||
|
||||
|
||||
@@ -5,10 +5,8 @@
|
||||
import copy
|
||||
import json
|
||||
import logging
|
||||
import operator
|
||||
import re
|
||||
import six
|
||||
import urllib
|
||||
import urllib.parse
|
||||
from collections import OrderedDict
|
||||
from datetime import timedelta
|
||||
|
||||
@@ -40,17 +38,30 @@ from rest_framework.utils.serializer_helpers import ReturnList
|
||||
from polymorphic.models import PolymorphicModel
|
||||
|
||||
# AWX
|
||||
from awx.main.access import get_user_capabilities
|
||||
from awx.main.constants import (
|
||||
SCHEDULEABLE_PROVIDERS,
|
||||
ANSI_SGR_PATTERN,
|
||||
ACTIVE_STATES,
|
||||
CENSOR_VALUE,
|
||||
CHOICES_PRIVILEGE_ESCALATION_METHODS,
|
||||
)
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.models.base import NEW_JOB_TYPE_CHOICES
|
||||
from awx.main.access import get_user_capabilities
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.models import (
|
||||
ActivityStream, AdHocCommand, AdHocCommandEvent, Credential,
|
||||
CredentialType, CustomInventoryScript, Fact, Group, Host, Instance,
|
||||
InstanceGroup, Inventory, InventorySource, InventoryUpdate,
|
||||
InventoryUpdateEvent, Job, JobEvent, JobHostSummary, JobLaunchConfig,
|
||||
JobTemplate, Label, Notification, NotificationTemplate, OAuth2AccessToken,
|
||||
OAuth2Application, Organization, Project, ProjectUpdate,
|
||||
ProjectUpdateEvent, RefreshToken, Role, Schedule, SystemJob,
|
||||
SystemJobEvent, SystemJobTemplate, Team, UnifiedJob, UnifiedJobTemplate,
|
||||
UserSessionMembership, V1Credential, WorkflowJob, WorkflowJobNode,
|
||||
WorkflowJobTemplate, WorkflowJobTemplateNode, StdoutMaxBytesExceeded
|
||||
)
|
||||
from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES
|
||||
from awx.main.models.rbac import (
|
||||
get_roles_on_resource, role_summary_fields_generator
|
||||
)
|
||||
from awx.main.fields import ImplicitRoleField, JSONBField
|
||||
from awx.main.utils import (
|
||||
get_type_for_model, get_model_for_type, timestamp_apiformat,
|
||||
camelcase_to_underscore, getattrd, parse_yaml_or_json,
|
||||
@@ -61,7 +72,7 @@ from awx.main.redact import UriCleaner, REPLACE_STR
|
||||
|
||||
from awx.main.validators import vars_validate_or_raise
|
||||
|
||||
from awx.conf.license import feature_enabled
|
||||
from awx.conf.license import feature_enabled, LicenseForbids
|
||||
from awx.api.versioning import reverse, get_request_version
|
||||
from awx.api.fields import (BooleanNullField, CharNullField, ChoiceNullField,
|
||||
VerbatimField, DeprecatedCredentialField)
|
||||
@@ -104,7 +115,7 @@ SUMMARIZABLE_FK_FIELDS = {
|
||||
'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed',),
|
||||
'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
'vault_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed'),
|
||||
'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed', 'type'),
|
||||
'job_template': DEFAULT_SUMMARY_FIELDS,
|
||||
'workflow_job_template': DEFAULT_SUMMARY_FIELDS,
|
||||
'workflow_job': DEFAULT_SUMMARY_FIELDS,
|
||||
@@ -203,11 +214,11 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass):
|
||||
|
||||
@staticmethod
|
||||
def _is_list_of_strings(x):
|
||||
return isinstance(x, (list, tuple)) and all([isinstance(y, basestring) for y in x])
|
||||
return isinstance(x, (list, tuple)) and all([isinstance(y, str) for y in x])
|
||||
|
||||
@staticmethod
|
||||
def _is_extra_kwargs(x):
|
||||
return isinstance(x, dict) and all([isinstance(k, basestring) and isinstance(v, dict) for k,v in x.items()])
|
||||
return isinstance(x, dict) and all([isinstance(k, str) and isinstance(v, dict) for k,v in x.items()])
|
||||
|
||||
@classmethod
|
||||
def _update_meta(cls, base, meta, other=None):
|
||||
@@ -259,9 +270,7 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass):
|
||||
return super(BaseSerializerMetaclass, cls).__new__(cls, name, bases, attrs)
|
||||
|
||||
|
||||
class BaseSerializer(serializers.ModelSerializer):
|
||||
|
||||
__metaclass__ = BaseSerializerMetaclass
|
||||
class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
class Meta:
|
||||
fields = ('id', 'type', 'url', 'related', 'summary_fields', 'created',
|
||||
@@ -284,7 +293,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
# The following lines fix the problem of being able to pass JSON dict into PrimaryKeyRelatedField.
|
||||
data = kwargs.get('data', False)
|
||||
if data:
|
||||
for field_name, field_instance in six.iteritems(self.fields):
|
||||
for field_name, field_instance in self.fields.items():
|
||||
if isinstance(field_instance, ManyRelatedField) and not field_instance.read_only:
|
||||
if isinstance(data.get(field_name, False), dict):
|
||||
raise serializers.ValidationError(_('Cannot use dictionary for %s' % field_name))
|
||||
@@ -294,7 +303,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
"""
|
||||
The request version component of the URL as an integer i.e., 1 or 2
|
||||
"""
|
||||
return get_request_version(self.context.get('request'))
|
||||
return get_request_version(self.context.get('request')) or 1
|
||||
|
||||
def get_type(self, obj):
|
||||
return get_type_for_model(self.Meta.model)
|
||||
@@ -612,7 +621,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
v2.extend(e)
|
||||
else:
|
||||
v2.append(e)
|
||||
d[k] = map(force_text, v2)
|
||||
d[k] = list(map(force_text, v2))
|
||||
raise ValidationError(d)
|
||||
return attrs
|
||||
|
||||
@@ -632,9 +641,7 @@ class EmptySerializer(serializers.Serializer):
|
||||
pass
|
||||
|
||||
|
||||
class BaseFactSerializer(BaseSerializer):
|
||||
|
||||
__metaclass__ = BaseSerializerMetaclass
|
||||
class BaseFactSerializer(BaseSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
def get_fields(self):
|
||||
ret = super(BaseFactSerializer, self).get_fields()
|
||||
@@ -886,7 +893,7 @@ class UserSerializer(BaseSerializer):
|
||||
model = User
|
||||
fields = ('*', '-name', '-description', '-modified',
|
||||
'username', 'first_name', 'last_name',
|
||||
'email', 'is_superuser', 'is_system_auditor', 'password', 'ldap_dn', 'external_account')
|
||||
'email', 'is_superuser', 'is_system_auditor', 'password', 'ldap_dn', 'last_login', 'external_account')
|
||||
|
||||
def to_representation(self, obj): # TODO: Remove in 3.3
|
||||
ret = super(UserSerializer, self).to_representation(obj)
|
||||
@@ -1050,7 +1057,7 @@ class BaseOAuth2TokenSerializer(BaseSerializer):
|
||||
return ret
|
||||
|
||||
def _is_valid_scope(self, value):
|
||||
if not value or (not isinstance(value, six.string_types)):
|
||||
if not value or (not isinstance(value, str)):
|
||||
return False
|
||||
words = value.split()
|
||||
for word in words:
|
||||
@@ -1222,7 +1229,7 @@ class OrganizationSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Organization
|
||||
fields = ('*', 'custom_virtualenv',)
|
||||
fields = ('*', 'max_hosts', 'custom_virtualenv',)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(OrganizationSerializer, self).get_related(obj)
|
||||
@@ -1258,6 +1265,20 @@ class OrganizationSerializer(BaseSerializer):
|
||||
summary_dict['related_field_counts'] = counts_dict[obj.id]
|
||||
return summary_dict
|
||||
|
||||
def validate(self, attrs):
|
||||
obj = self.instance
|
||||
view = self.context['view']
|
||||
|
||||
obj_limit = getattr(obj, 'max_hosts', None)
|
||||
api_limit = attrs.get('max_hosts')
|
||||
|
||||
if not view.request.user.is_superuser:
|
||||
if api_limit is not None and api_limit != obj_limit:
|
||||
# Only allow superusers to edit the max_hosts field
|
||||
raise serializers.ValidationError(_('Cannot change max_hosts.'))
|
||||
|
||||
return super(OrganizationSerializer, self).validate(attrs)
|
||||
|
||||
|
||||
class ProjectOptionsSerializer(BaseSerializer):
|
||||
|
||||
@@ -1311,16 +1332,12 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
|
||||
'admin', 'update',
|
||||
{'copy': 'organization.project_admin'}
|
||||
]
|
||||
scm_delete_on_next_update = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = ('*', 'organization', 'scm_delete_on_next_update', 'scm_update_on_launch',
|
||||
fields = ('*', 'organization', 'scm_update_on_launch',
|
||||
'scm_update_cache_timeout', 'scm_revision', 'custom_virtualenv',) + \
|
||||
('last_update_failed', 'last_updated') # Backwards compatibility
|
||||
read_only_fields = ('scm_delete_on_next_update',)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(ProjectSerializer, self).get_related(obj)
|
||||
@@ -1503,6 +1520,12 @@ class InventorySerializer(BaseSerializerWithVariables):
|
||||
'admin', 'adhoc',
|
||||
{'copy': 'organization.inventory_admin'}
|
||||
]
|
||||
groups_with_active_failures = serializers.IntegerField(
|
||||
read_only=True,
|
||||
min_value=0,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release')
|
||||
)
|
||||
|
||||
|
||||
class Meta:
|
||||
model = Inventory
|
||||
@@ -1547,6 +1570,18 @@ class InventorySerializer(BaseSerializerWithVariables):
|
||||
def validate_host_filter(self, host_filter):
|
||||
if host_filter:
|
||||
try:
|
||||
for match in JSONBField.get_lookups().keys():
|
||||
if match == 'exact':
|
||||
# __exact is allowed
|
||||
continue
|
||||
match = '__{}'.format(match)
|
||||
if re.match(
|
||||
'ansible_facts[^=]+{}='.format(match),
|
||||
host_filter
|
||||
):
|
||||
raise models.base.ValidationError({
|
||||
'host_filter': 'ansible_facts does not support searching with {}'.format(match)
|
||||
})
|
||||
SmartFilter().query_from_string(host_filter)
|
||||
except RuntimeError as e:
|
||||
raise models.base.ValidationError(e)
|
||||
@@ -1724,6 +1759,11 @@ class AnsibleFactsSerializer(BaseSerializer):
|
||||
|
||||
class GroupSerializer(BaseSerializerWithVariables):
|
||||
capabilities_prefetch = ['inventory.admin', 'inventory.adhoc']
|
||||
groups_with_active_failures = serializers.IntegerField(
|
||||
read_only=True,
|
||||
min_value=0,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Group
|
||||
@@ -2132,10 +2172,10 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
|
||||
return attrs.get(fd, self.instance and getattr(self.instance, fd) or None)
|
||||
|
||||
if get_field_from_model_or_attrs('source') != 'scm':
|
||||
redundant_scm_fields = filter(
|
||||
redundant_scm_fields = list(filter(
|
||||
lambda x: attrs.get(x, None),
|
||||
['source_project', 'source_path', 'update_on_project_update']
|
||||
)
|
||||
))
|
||||
if redundant_scm_fields:
|
||||
raise serializers.ValidationError(
|
||||
{"detail": _("Cannot set %s if not SCM type." % ' '.join(redundant_scm_fields))}
|
||||
@@ -2172,10 +2212,12 @@ class InventorySourceUpdateSerializer(InventorySourceSerializer):
|
||||
|
||||
class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSerializer):
|
||||
|
||||
custom_virtualenv = serializers.ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
model = InventoryUpdate
|
||||
fields = ('*', 'inventory', 'inventory_source', 'license_error', 'source_project_update',
|
||||
'-controller_node',)
|
||||
fields = ('*', 'inventory', 'inventory_source', 'license_error', 'org_host_limit_error',
|
||||
'source_project_update', 'custom_virtualenv', '-controller_node',)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(InventoryUpdateSerializer, self).get_related(obj)
|
||||
@@ -2204,6 +2246,44 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri
|
||||
return res
|
||||
|
||||
|
||||
class InventoryUpdateDetailSerializer(InventoryUpdateSerializer):
|
||||
|
||||
source_project = serializers.SerializerMethodField(
|
||||
help_text=_('The project used for this job.'),
|
||||
method_name='get_source_project_id'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = InventoryUpdate
|
||||
fields = ('*', 'source_project',)
|
||||
|
||||
def get_source_project(self, obj):
|
||||
return getattrd(obj, 'source_project_update.unified_job_template', None)
|
||||
|
||||
def get_source_project_id(self, obj):
|
||||
return getattrd(obj, 'source_project_update.unified_job_template.id', None)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(InventoryUpdateDetailSerializer, self).get_related(obj)
|
||||
source_project_id = self.get_source_project_id(obj)
|
||||
|
||||
if source_project_id:
|
||||
res['source_project'] = self.reverse('api:project_detail', kwargs={'pk': source_project_id})
|
||||
return res
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
summary_fields = super(InventoryUpdateDetailSerializer, self).get_summary_fields(obj)
|
||||
summary_obj = self.get_source_project(obj)
|
||||
|
||||
if summary_obj:
|
||||
summary_fields['source_project'] = {}
|
||||
for field in SUMMARIZABLE_FK_FIELDS['project']:
|
||||
value = getattr(summary_obj, field, None)
|
||||
if value is not None:
|
||||
summary_fields['source_project'][field] = value
|
||||
return summary_fields
|
||||
|
||||
|
||||
class InventoryUpdateListSerializer(InventoryUpdateSerializer, UnifiedJobListSerializer):
|
||||
|
||||
class Meta:
|
||||
@@ -2456,25 +2536,21 @@ class CredentialTypeSerializer(BaseSerializer):
|
||||
field['label'] = _(field['label'])
|
||||
if 'help_text' in field:
|
||||
field['help_text'] = _(field['help_text'])
|
||||
if field['type'] == 'become_method':
|
||||
field.pop('type')
|
||||
field['choices'] = map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS)
|
||||
return value
|
||||
|
||||
def filter_field_metadata(self, fields, method):
|
||||
# API-created/modified CredentialType kinds are limited to
|
||||
# `cloud` and `net`
|
||||
if method in ('PUT', 'POST'):
|
||||
fields['kind']['choices'] = filter(
|
||||
fields['kind']['choices'] = list(filter(
|
||||
lambda choice: choice[0] in ('cloud', 'net'),
|
||||
fields['kind']['choices']
|
||||
)
|
||||
))
|
||||
return fields
|
||||
|
||||
|
||||
# TODO: remove when API v1 is removed
|
||||
@six.add_metaclass(BaseSerializerMetaclass)
|
||||
class V1CredentialFields(BaseSerializer):
|
||||
class V1CredentialFields(BaseSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
class Meta:
|
||||
model = Credential
|
||||
@@ -2492,8 +2568,7 @@ class V1CredentialFields(BaseSerializer):
|
||||
return super(V1CredentialFields, self).build_field(field_name, info, model_class, nested_depth)
|
||||
|
||||
|
||||
@six.add_metaclass(BaseSerializerMetaclass)
|
||||
class V2CredentialFields(BaseSerializer):
|
||||
class V2CredentialFields(BaseSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
class Meta:
|
||||
model = Credential
|
||||
@@ -2619,8 +2694,8 @@ class CredentialSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError({"kind": _('"%s" is not a valid choice' % kind)})
|
||||
data['credential_type'] = credential_type.pk
|
||||
value = OrderedDict(
|
||||
{'credential_type': credential_type}.items() +
|
||||
super(CredentialSerializer, self).to_internal_value(data).items()
|
||||
list({'credential_type': credential_type}.items()) +
|
||||
list(super(CredentialSerializer, self).to_internal_value(data).items())
|
||||
)
|
||||
|
||||
# Make a set of the keys in the POST/PUT payload
|
||||
@@ -2781,8 +2856,7 @@ class LabelsListMixin(object):
|
||||
|
||||
|
||||
# TODO: remove when API v1 is removed
|
||||
@six.add_metaclass(BaseSerializerMetaclass)
|
||||
class V1JobOptionsSerializer(BaseSerializer):
|
||||
class V1JobOptionsSerializer(BaseSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
class Meta:
|
||||
model = Credential
|
||||
@@ -2796,8 +2870,7 @@ class V1JobOptionsSerializer(BaseSerializer):
|
||||
return super(V1JobOptionsSerializer, self).build_field(field_name, info, model_class, nested_depth)
|
||||
|
||||
|
||||
@six.add_metaclass(BaseSerializerMetaclass)
|
||||
class LegacyCredentialFields(BaseSerializer):
|
||||
class LegacyCredentialFields(BaseSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
class Meta:
|
||||
model = Credential
|
||||
@@ -2976,12 +3049,16 @@ class JobTemplateMixin(object):
|
||||
'''
|
||||
|
||||
def _recent_jobs(self, obj):
|
||||
if hasattr(obj, 'workflow_jobs'):
|
||||
job_mgr = obj.workflow_jobs
|
||||
else:
|
||||
job_mgr = obj.jobs
|
||||
return [{'id': x.id, 'status': x.status, 'finished': x.finished}
|
||||
for x in job_mgr.all().order_by('-created')[:10]]
|
||||
# Exclude "joblets", jobs that ran as part of a sliced workflow job
|
||||
uj_qs = obj.unifiedjob_unified_jobs.exclude(job__job_slice_count__gt=1).order_by('-created')
|
||||
# Would like to apply an .only, but does not play well with non_polymorphic
|
||||
# .only('id', 'status', 'finished', 'polymorphic_ctype_id')
|
||||
optimized_qs = uj_qs.non_polymorphic()
|
||||
return [{
|
||||
'id': x.id, 'status': x.status, 'finished': x.finished,
|
||||
# Make type consistent with API top-level key, for instance workflow_job
|
||||
'type': x.get_real_instance_class()._meta.verbose_name.replace(' ', '_')
|
||||
} for x in optimized_qs[:10]]
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
d = super(JobTemplateMixin, self).get_summary_fields(obj)
|
||||
@@ -3050,7 +3127,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
|
||||
prompting_error_message = _("Must either set a default value or ask to prompt on launch.")
|
||||
if project is None:
|
||||
raise serializers.ValidationError({'project': _("Job types 'run' and 'check' must have assigned a project.")})
|
||||
raise serializers.ValidationError({'project': _("Job Templates must have a project assigned.")})
|
||||
elif inventory is None and not get_field_from_model_or_attrs('ask_inventory_on_launch'):
|
||||
raise serializers.ValidationError({'inventory': prompting_error_message})
|
||||
|
||||
@@ -3059,6 +3136,13 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
def validate_extra_vars(self, value):
|
||||
return vars_validate_or_raise(value)
|
||||
|
||||
def validate_job_slice_count(self, value):
|
||||
if value > 1 and not feature_enabled('workflows'):
|
||||
raise LicenseForbids({'job_slice_count': [_(
|
||||
"Job slicing is a workflows-based feature and your license does not allow use of workflows."
|
||||
)]})
|
||||
return value
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
summary_fields = super(JobTemplateSerializer, self).get_summary_fields(obj)
|
||||
all_creds = []
|
||||
@@ -3103,19 +3187,46 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
return summary_fields
|
||||
|
||||
|
||||
class JobTemplateWithSpecSerializer(JobTemplateSerializer):
|
||||
'''
|
||||
Used for activity stream entries.
|
||||
'''
|
||||
|
||||
class Meta:
|
||||
model = JobTemplate
|
||||
fields = ('*', 'survey_spec')
|
||||
|
||||
|
||||
class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
||||
|
||||
passwords_needed_to_start = serializers.ReadOnlyField()
|
||||
ask_diff_mode_on_launch = serializers.ReadOnlyField()
|
||||
ask_variables_on_launch = serializers.ReadOnlyField()
|
||||
ask_limit_on_launch = serializers.ReadOnlyField()
|
||||
ask_skip_tags_on_launch = serializers.ReadOnlyField()
|
||||
ask_tags_on_launch = serializers.ReadOnlyField()
|
||||
ask_job_type_on_launch = serializers.ReadOnlyField()
|
||||
ask_verbosity_on_launch = serializers.ReadOnlyField()
|
||||
ask_inventory_on_launch = serializers.ReadOnlyField()
|
||||
ask_credential_on_launch = serializers.ReadOnlyField()
|
||||
ask_diff_mode_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_variables_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_limit_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_skip_tags_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_tags_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_job_type_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_verbosity_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_inventory_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_credential_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
artifacts = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
@@ -3252,10 +3363,11 @@ class JobDetailSerializer(JobSerializer):
|
||||
playbook_counts = serializers.SerializerMethodField(
|
||||
help_text=_('A count of all plays and tasks for the job run.'),
|
||||
)
|
||||
custom_virtualenv = serializers.ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
model = Job
|
||||
fields = ('*', 'host_status_counts', 'playbook_counts',)
|
||||
fields = ('*', 'host_status_counts', 'playbook_counts', 'custom_virtualenv')
|
||||
|
||||
def get_playbook_counts(self, obj):
|
||||
task_count = obj.job_events.filter(event='playbook_on_task_start').count()
|
||||
@@ -3442,12 +3554,16 @@ class AdHocCommandSerializer(UnifiedJobSerializer):
|
||||
ret['name'] = obj.module_name
|
||||
return ret
|
||||
|
||||
def validate(self, attrs):
|
||||
ret = super(AdHocCommandSerializer, self).validate(attrs)
|
||||
return ret
|
||||
|
||||
def validate_extra_vars(self, value):
|
||||
redacted_extra_vars, removed_vars = extract_ansible_vars(value)
|
||||
if removed_vars:
|
||||
raise serializers.ValidationError(_(
|
||||
"{} are prohibited from use in ad hoc commands."
|
||||
).format(", ".join(removed_vars)))
|
||||
).format(", ".join(sorted(removed_vars, reverse=True))))
|
||||
return vars_validate_or_raise(value)
|
||||
|
||||
|
||||
@@ -3558,7 +3674,7 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
|
||||
class Meta:
|
||||
model = WorkflowJobTemplate
|
||||
fields = ('*', 'extra_vars', 'organization', 'survey_enabled', 'allow_simultaneous',
|
||||
'ask_variables_on_launch',)
|
||||
'ask_variables_on_launch', 'inventory', 'ask_inventory_on_launch',)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(WorkflowJobTemplateSerializer, self).get_related(obj)
|
||||
@@ -3586,13 +3702,24 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
|
||||
return vars_validate_or_raise(value)
|
||||
|
||||
|
||||
class WorkflowJobTemplateWithSpecSerializer(WorkflowJobTemplateSerializer):
|
||||
'''
|
||||
Used for activity stream entries.
|
||||
'''
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJobTemplate
|
||||
fields = ('*', 'survey_spec')
|
||||
|
||||
|
||||
class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJob
|
||||
fields = ('*', 'workflow_job_template', 'extra_vars', 'allow_simultaneous',
|
||||
'job_template', 'is_sliced_job',
|
||||
'-execution_node', '-event_processing_finished', '-controller_node',)
|
||||
'-execution_node', '-event_processing_finished', '-controller_node',
|
||||
'inventory',)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(WorkflowJobSerializer, self).get_related(obj)
|
||||
@@ -3664,7 +3791,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
for field in self.instance._meta.fields:
|
||||
setattr(mock_obj, field.name, getattr(self.instance, field.name))
|
||||
field_names = set(field.name for field in self.Meta.model._meta.fields)
|
||||
for field_name, value in attrs.items():
|
||||
for field_name, value in list(attrs.items()):
|
||||
setattr(mock_obj, field_name, value)
|
||||
if field_name not in field_names:
|
||||
attrs.pop(field_name)
|
||||
@@ -3675,7 +3802,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
if obj is None:
|
||||
return ret
|
||||
if 'extra_data' in ret and obj.survey_passwords:
|
||||
ret['extra_data'] = obj.display_extra_data()
|
||||
ret['extra_data'] = obj.display_extra_vars()
|
||||
return ret
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
@@ -3816,9 +3943,6 @@ class WorkflowJobTemplateNodeSerializer(LaunchConfigurationBaseSerializer):
|
||||
ujt_obj = attrs['unified_job_template']
|
||||
elif self.instance:
|
||||
ujt_obj = self.instance.unified_job_template
|
||||
if isinstance(ujt_obj, (WorkflowJobTemplate)):
|
||||
raise serializers.ValidationError({
|
||||
"unified_job_template": _("Cannot nest a %s inside a WorkflowJobTemplate") % ujt_obj.__class__.__name__})
|
||||
if 'credential' in deprecated_fields: # TODO: remove when v2 API is deprecated
|
||||
cred = deprecated_fields['credential']
|
||||
attrs['credential'] = cred
|
||||
@@ -3867,7 +3991,8 @@ class WorkflowJobNodeSerializer(LaunchConfigurationBaseSerializer):
|
||||
class Meta:
|
||||
model = WorkflowJobNode
|
||||
fields = ('*', 'credential', 'job', 'workflow_job', '-name', '-description', 'id', 'url', 'related',
|
||||
'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',)
|
||||
'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',
|
||||
'do_not_run',)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(WorkflowJobNodeSerializer, self).get_related(obj)
|
||||
@@ -3896,7 +4021,7 @@ class WorkflowJobTemplateNodeDetailSerializer(WorkflowJobTemplateNodeSerializer)
|
||||
Influence the api browser sample data to not include workflow_job_template
|
||||
when editing a WorkflowNode.
|
||||
|
||||
Note: I was not able to accomplish this trough the use of extra_kwargs.
|
||||
Note: I was not able to accomplish this through the use of extra_kwargs.
|
||||
Maybe something to do with workflow_job_template being a relational field?
|
||||
'''
|
||||
def build_relational_field(self, field_name, relation_info):
|
||||
@@ -3927,7 +4052,8 @@ class JobHostSummarySerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = JobHostSummary
|
||||
fields = ('*', '-name', '-description', 'job', 'host', 'host_name', 'changed',
|
||||
'dark', 'failures', 'ok', 'processed', 'skipped', 'failed')
|
||||
'dark', 'failures', 'ok', 'processed', 'skipped', 'failed',
|
||||
'ignored', 'rescued')
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(JobHostSummarySerializer, self).get_related(obj)
|
||||
@@ -4280,7 +4406,7 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
passwords_needed=cred.passwords_needed
|
||||
)
|
||||
if cred.credential_type.managed_by_tower and 'vault_id' in cred.credential_type.defined_fields:
|
||||
cred_dict['vault_id'] = cred.inputs.get('vault_id') or None
|
||||
cred_dict['vault_id'] = cred.get_input('vault_id', default=None)
|
||||
defaults_dict.setdefault(field_name, []).append(cred_dict)
|
||||
else:
|
||||
defaults_dict[field_name] = getattr(obj, field_name)
|
||||
@@ -4330,7 +4456,7 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
errors.setdefault('credentials', []).append(_(
|
||||
'Removing {} credential at launch time without replacement is not supported. '
|
||||
'Provided list lacked credential(s): {}.'
|
||||
).format(cred.unique_hash(display=True), ', '.join([six.text_type(c) for c in removed_creds])))
|
||||
).format(cred.unique_hash(display=True), ', '.join([str(c) for c in removed_creds])))
|
||||
|
||||
# verify that credentials (either provided or existing) don't
|
||||
# require launch-time passwords that have not been provided
|
||||
@@ -4369,37 +4495,63 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
class WorkflowJobLaunchSerializer(BaseSerializer):
|
||||
|
||||
can_start_without_user_input = serializers.BooleanField(read_only=True)
|
||||
defaults = serializers.SerializerMethodField()
|
||||
variables_needed_to_start = serializers.ReadOnlyField()
|
||||
survey_enabled = serializers.SerializerMethodField()
|
||||
extra_vars = VerbatimField(required=False, write_only=True)
|
||||
inventory = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Inventory.objects.all(),
|
||||
required=False, write_only=True
|
||||
)
|
||||
workflow_job_template_data = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJobTemplate
|
||||
fields = ('can_start_without_user_input', 'extra_vars',
|
||||
'survey_enabled', 'variables_needed_to_start',
|
||||
fields = ('ask_inventory_on_launch', 'can_start_without_user_input', 'defaults', 'extra_vars',
|
||||
'inventory', 'survey_enabled', 'variables_needed_to_start',
|
||||
'node_templates_missing', 'node_prompts_rejected',
|
||||
'workflow_job_template_data')
|
||||
'workflow_job_template_data', 'survey_enabled', 'ask_variables_on_launch')
|
||||
read_only_fields = ('ask_inventory_on_launch', 'ask_variables_on_launch')
|
||||
|
||||
def get_survey_enabled(self, obj):
|
||||
if obj:
|
||||
return obj.survey_enabled and 'spec' in obj.survey_spec
|
||||
return False
|
||||
|
||||
def get_defaults(self, obj):
|
||||
defaults_dict = {}
|
||||
for field_name in WorkflowJobTemplate.get_ask_mapping().keys():
|
||||
if field_name == 'inventory':
|
||||
defaults_dict[field_name] = dict(
|
||||
name=getattrd(obj, '%s.name' % field_name, None),
|
||||
id=getattrd(obj, '%s.pk' % field_name, None))
|
||||
else:
|
||||
defaults_dict[field_name] = getattr(obj, field_name)
|
||||
return defaults_dict
|
||||
|
||||
def get_workflow_job_template_data(self, obj):
|
||||
return dict(name=obj.name, id=obj.id, description=obj.description)
|
||||
|
||||
def validate(self, attrs):
|
||||
obj = self.instance
|
||||
template = self.instance
|
||||
|
||||
accepted, rejected, errors = obj._accept_or_ignore_job_kwargs(
|
||||
_exclude_errors=['required'],
|
||||
**attrs)
|
||||
accepted, rejected, errors = template._accept_or_ignore_job_kwargs(**attrs)
|
||||
self._ignored_fields = rejected
|
||||
|
||||
WFJT_extra_vars = obj.extra_vars
|
||||
attrs = super(WorkflowJobLaunchSerializer, self).validate(attrs)
|
||||
obj.extra_vars = WFJT_extra_vars
|
||||
return attrs
|
||||
if template.inventory and template.inventory.pending_deletion is True:
|
||||
errors['inventory'] = _("The inventory associated with this Workflow is being deleted.")
|
||||
elif 'inventory' in accepted and accepted['inventory'].pending_deletion:
|
||||
errors['inventory'] = _("The provided inventory is being deleted.")
|
||||
|
||||
if errors:
|
||||
raise serializers.ValidationError(errors)
|
||||
|
||||
WFJT_extra_vars = template.extra_vars
|
||||
WFJT_inventory = template.inventory
|
||||
super(WorkflowJobLaunchSerializer, self).validate(attrs)
|
||||
template.extra_vars = WFJT_extra_vars
|
||||
template.inventory = WFJT_inventory
|
||||
return accepted
|
||||
|
||||
|
||||
class NotificationTemplateSerializer(BaseSerializer):
|
||||
@@ -4410,11 +4562,11 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
model = NotificationTemplate
|
||||
fields = ('*', 'organization', 'notification_type', 'notification_configuration')
|
||||
|
||||
type_map = {"string": (str, unicode),
|
||||
type_map = {"string": (str,),
|
||||
"int": (int,),
|
||||
"bool": (bool,),
|
||||
"list": (list,),
|
||||
"password": (str, unicode),
|
||||
"password": (str,),
|
||||
"object": (dict, OrderedDict)}
|
||||
|
||||
def to_representation(self, obj):
|
||||
@@ -4466,12 +4618,15 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
object_actual = self.context['view'].get_object()
|
||||
else:
|
||||
object_actual = None
|
||||
for field in notification_class.init_parameters:
|
||||
for field, params in notification_class.init_parameters.items():
|
||||
if field not in attrs['notification_configuration']:
|
||||
missing_fields.append(field)
|
||||
continue
|
||||
if 'default' in params:
|
||||
attrs['notification_configuration'][field] = params['default']
|
||||
else:
|
||||
missing_fields.append(field)
|
||||
continue
|
||||
field_val = attrs['notification_configuration'][field]
|
||||
field_type = notification_class.init_parameters[field]['type']
|
||||
field_type = params['type']
|
||||
expected_types = self.type_map[field_type]
|
||||
if not type(field_val) in expected_types:
|
||||
incorrect_type_fields.append((field, field_type))
|
||||
@@ -4618,6 +4773,23 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
||||
res['inventory'] = obj.unified_job_template.inventory.get_absolute_url(self.context.get('request'))
|
||||
return res
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
summary_fields = super(ScheduleSerializer, self).get_summary_fields(obj)
|
||||
if 'inventory' in summary_fields:
|
||||
return summary_fields
|
||||
|
||||
inventory = None
|
||||
if obj.unified_job_template and getattr(obj.unified_job_template, 'inventory', None):
|
||||
inventory = obj.unified_job_template.inventory
|
||||
else:
|
||||
return summary_fields
|
||||
|
||||
summary_fields['inventory'] = dict()
|
||||
for field in SUMMARIZABLE_FK_FIELDS['inventory']:
|
||||
summary_fields['inventory'][field] = getattr(inventory, field, None)
|
||||
|
||||
return summary_fields
|
||||
|
||||
def validate_unified_job_template(self, value):
|
||||
if type(value) == InventorySource and value.source not in SCHEDULEABLE_PROVIDERS:
|
||||
raise serializers.ValidationError(_('Inventory Source must be a cloud resource.'))
|
||||
@@ -4625,8 +4797,8 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
||||
raise serializers.ValidationError(_('Manual Project cannot have a schedule set.'))
|
||||
elif type(value) == InventorySource and value.source == 'scm' and value.update_on_project_update:
|
||||
raise serializers.ValidationError(_(
|
||||
six.text_type('Inventory sources with `update_on_project_update` cannot be scheduled. '
|
||||
'Schedule its source project `{}` instead.').format(value.source_project.name)))
|
||||
'Inventory sources with `update_on_project_update` cannot be scheduled. '
|
||||
'Schedule its source project `{}` instead.'.format(value.source_project.name)))
|
||||
return value
|
||||
|
||||
|
||||
@@ -4780,7 +4952,7 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
for key in summary_dict.keys():
|
||||
if 'id' not in summary_dict[key]:
|
||||
summary_dict[key] = summary_dict[key] + ('id',)
|
||||
field_list = summary_dict.items()
|
||||
field_list = list(summary_dict.items())
|
||||
# Needed related fields that are not in the default summary fields
|
||||
field_list += [
|
||||
('workflow_job_template_node', ('id', 'unified_job_template_id')),
|
||||
@@ -4800,7 +4972,7 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
|
||||
def get_fields(self):
|
||||
ret = super(ActivityStreamSerializer, self).get_fields()
|
||||
for key, field in ret.items():
|
||||
for key, field in list(ret.items()):
|
||||
if key == 'changes':
|
||||
field.help_text = _('A summary of the new and changed values when an object is created, updated, or deleted')
|
||||
if key == 'object1':
|
||||
@@ -4841,10 +5013,6 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
|
||||
def get_related(self, obj):
|
||||
rel = {}
|
||||
VIEW_NAME_EXCEPTIONS = {
|
||||
'custom_inventory_script': 'inventory_script_detail',
|
||||
'o_auth2_access_token': 'o_auth2_token_detail'
|
||||
}
|
||||
if obj.actor is not None:
|
||||
rel['actor'] = self.reverse('api:user_detail', kwargs={'pk': obj.actor.pk})
|
||||
for fk, __ in self._local_summarizable_fk_fields:
|
||||
@@ -4858,11 +5026,12 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
if getattr(thisItem, 'id', None) in id_list:
|
||||
continue
|
||||
id_list.append(getattr(thisItem, 'id', None))
|
||||
if fk in VIEW_NAME_EXCEPTIONS:
|
||||
view_name = VIEW_NAME_EXCEPTIONS[fk]
|
||||
if hasattr(thisItem, 'get_absolute_url'):
|
||||
rel_url = thisItem.get_absolute_url(self.context.get('request'))
|
||||
else:
|
||||
view_name = fk + '_detail'
|
||||
rel[fk].append(self.reverse('api:' + view_name, kwargs={'pk': thisItem.id}))
|
||||
rel_url = self.reverse('api:' + view_name, kwargs={'pk': thisItem.id})
|
||||
rel[fk].append(rel_url)
|
||||
|
||||
if fk == 'schedule':
|
||||
rel['unified_job_template'] = thisItem.unified_job_template.get_absolute_url(self.context.get('request'))
|
||||
@@ -4904,6 +5073,17 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
if fval is not None:
|
||||
job_template_item[field] = fval
|
||||
summary_fields['job_template'].append(job_template_item)
|
||||
if fk == 'workflow_job_template_node':
|
||||
summary_fields['workflow_job_template'] = []
|
||||
workflow_job_template_item = {}
|
||||
workflow_job_template_fields = SUMMARIZABLE_FK_FIELDS['workflow_job_template']
|
||||
workflow_job_template = getattr(thisItem, 'workflow_job_template', None)
|
||||
if workflow_job_template is not None:
|
||||
for field in workflow_job_template_fields:
|
||||
fval = getattr(workflow_job_template, field, None)
|
||||
if fval is not None:
|
||||
workflow_job_template_item[field] = fval
|
||||
summary_fields['workflow_job_template'].append(workflow_job_template_item)
|
||||
if fk == 'schedule':
|
||||
unified_job_template = getattr(thisItem, 'unified_job_template', None)
|
||||
if unified_job_template is not None:
|
||||
@@ -4945,7 +5125,7 @@ class FactVersionSerializer(BaseFactSerializer):
|
||||
}
|
||||
res['fact_view'] = '%s?%s' % (
|
||||
reverse('api:host_fact_compare_view', kwargs={'pk': obj.host.pk}, request=self.context.get('request')),
|
||||
urllib.urlencode(params)
|
||||
urllib.parse.urlencode(params)
|
||||
)
|
||||
return res
|
||||
|
||||
@@ -4967,6 +5147,6 @@ class FactSerializer(BaseFactSerializer):
|
||||
ret = super(FactSerializer, self).to_representation(obj)
|
||||
if obj is None:
|
||||
return ret
|
||||
if 'facts' in ret and isinstance(ret['facts'], six.string_types):
|
||||
if 'facts' in ret and isinstance(ret['facts'], str):
|
||||
ret['facts'] = json.loads(ret['facts'])
|
||||
return ret
|
||||
|
||||
@@ -13,6 +13,17 @@ from rest_framework.views import APIView
|
||||
from rest_framework_swagger import renderers
|
||||
|
||||
|
||||
class SuperUserSchemaGenerator(SchemaGenerator):
|
||||
|
||||
def has_view_permissions(self, path, method, view):
|
||||
#
|
||||
# Generate the Swagger schema as if you were a superuser and
|
||||
# permissions didn't matter; this short-circuits the schema path
|
||||
# discovery to include _all_ potential paths in the API.
|
||||
#
|
||||
return True
|
||||
|
||||
|
||||
class AutoSchema(DRFAuthSchema):
|
||||
|
||||
def get_link(self, path, method, base_url):
|
||||
@@ -59,7 +70,7 @@ class SwaggerSchemaView(APIView):
|
||||
]
|
||||
|
||||
def get(self, request):
|
||||
generator = SchemaGenerator(
|
||||
generator = SuperUserSchemaGenerator(
|
||||
title='Ansible Tower API',
|
||||
patterns=None,
|
||||
urlconf=None
|
||||
|
||||
@@ -56,6 +56,7 @@ For example:
|
||||
|
||||
```bash
|
||||
curl -X POST \
|
||||
-H "Content-Type: application/x-www-form-urlencoded" \
|
||||
-d "grant_type=password&username=<username>&password=<password>&scope=read" \
|
||||
-u "gwSPoasWSdNkMDtBN3Hu2WYQpPWCO9SwUEsKK22l:fI6ZpfocHYBGfm1tP92r0yIgCyfRdDQt0Tos9L8a4fNsJjQQMwp9569e
|
||||
IaUBsaVDgt2eiwOGe0bg5m5vCSstClZmtdy359RVx2rQK5YlIWyPlrolpt2LEpVeKXWaiybo" \
|
||||
@@ -85,6 +86,7 @@ format:
|
||||
The `/api/o/token/` endpoint is used for refreshing access token:
|
||||
```bash
|
||||
curl -X POST \
|
||||
-H "Content-Type: application/x-www-form-urlencoded" \
|
||||
-d "grant_type=refresh_token&refresh_token=AL0NK9TTpv0qp54dGbC4VUZtsZ9r8z" \
|
||||
-u "gwSPoasWSdNkMDtBN3Hu2WYQpPWCO9SwUEsKK22l:fI6ZpfocHYBGfm1tP92r0yIgCyfRdDQt0Tos9L8a4fNsJjQQMwp9569eIaUBsaVDgt2eiwOGe0bg5m5vCSstClZmtdy359RVx2rQK5YlIWyPlrolpt2LEpVeKXWaiybo" \
|
||||
http://localhost:8013/api/o/token/ -i
|
||||
@@ -114,6 +116,7 @@ Revoking is done by POSTing to `/api/o/revoke_token/` with the token to revoke a
|
||||
|
||||
```bash
|
||||
curl -X POST -d "token=rQONsve372fQwuc2pn76k3IHDCYpi7" \
|
||||
-H "Content-Type: application/x-www-form-urlencoded" \
|
||||
-u "gwSPoasWSdNkMDtBN3Hu2WYQpPWCO9SwUEsKK22l:fI6ZpfocHYBGfm1tP92r0yIgCyfRdDQt0Tos9L8a4fNsJjQQMwp9569eIaUBsaVDgt2eiwOGe0bg5m5vCSstClZmtdy359RVx2rQK5YlIWyPlrolpt2LEpVeKXWaiybo" \
|
||||
http://localhost:8013/api/o/revoke_token/ -i
|
||||
```
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
211
awx/api/views/inventory.py
Normal file
211
awx/api/views/inventory.py
Normal file
@@ -0,0 +1,211 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import logging
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
# AWX
|
||||
from awx.main.models import (
|
||||
ActivityStream,
|
||||
Inventory,
|
||||
JobTemplate,
|
||||
Role,
|
||||
User,
|
||||
InstanceGroup,
|
||||
InventoryUpdateEvent,
|
||||
InventoryUpdate,
|
||||
InventorySource,
|
||||
CustomInventoryScript,
|
||||
)
|
||||
from awx.api.generics import (
|
||||
ListCreateAPIView,
|
||||
RetrieveUpdateDestroyAPIView,
|
||||
SubListAPIView,
|
||||
SubListAttachDetachAPIView,
|
||||
ResourceAccessList,
|
||||
CopyAPIView,
|
||||
)
|
||||
|
||||
from awx.api.serializers import (
|
||||
InventorySerializer,
|
||||
ActivityStreamSerializer,
|
||||
RoleSerializer,
|
||||
InstanceGroupSerializer,
|
||||
InventoryUpdateEventSerializer,
|
||||
CustomInventoryScriptSerializer,
|
||||
InventoryDetailSerializer,
|
||||
JobTemplateSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import (
|
||||
ActivityStreamEnforcementMixin,
|
||||
RelatedJobsPreventDeleteMixin,
|
||||
ControlledByScmMixin,
|
||||
)
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
|
||||
class InventoryUpdateEventsList(SubListAPIView):
|
||||
|
||||
model = InventoryUpdateEvent
|
||||
serializer_class = InventoryUpdateEventSerializer
|
||||
parent_model = InventoryUpdate
|
||||
relationship = 'inventory_update_events'
|
||||
view_name = _('Inventory Update Events List')
|
||||
search_fields = ('stdout',)
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
response['X-UI-Max-Events'] = settings.MAX_UI_JOB_EVENTS
|
||||
return super(InventoryUpdateEventsList, self).finalize_response(request, response, *args, **kwargs)
|
||||
|
||||
|
||||
class InventoryScriptList(ListCreateAPIView):
|
||||
|
||||
model = CustomInventoryScript
|
||||
serializer_class = CustomInventoryScriptSerializer
|
||||
|
||||
|
||||
class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
model = CustomInventoryScript
|
||||
serializer_class = CustomInventoryScriptSerializer
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
can_delete = request.user.can_access(self.model, 'delete', instance)
|
||||
if not can_delete:
|
||||
raise PermissionDenied(_("Cannot delete inventory script."))
|
||||
for inv_src in InventorySource.objects.filter(source_script=instance):
|
||||
inv_src.source_script = None
|
||||
inv_src.save()
|
||||
return super(InventoryScriptDetail, self).destroy(request, *args, **kwargs)
|
||||
|
||||
|
||||
class InventoryScriptObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = CustomInventoryScript
|
||||
search_fields = ('role_field', 'content_type__model',)
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
content_type = ContentType.objects.get_for_model(self.parent_model)
|
||||
return Role.objects.filter(content_type=content_type, object_id=po.pk)
|
||||
|
||||
|
||||
class InventoryScriptCopy(CopyAPIView):
|
||||
|
||||
model = CustomInventoryScript
|
||||
copy_return_serializer_class = CustomInventoryScriptSerializer
|
||||
|
||||
|
||||
class InventoryList(ListCreateAPIView):
|
||||
|
||||
model = Inventory
|
||||
serializer_class = InventorySerializer
|
||||
|
||||
def get_queryset(self):
|
||||
qs = Inventory.accessible_objects(self.request.user, 'read_role')
|
||||
qs = qs.select_related('admin_role', 'read_role', 'update_role', 'use_role', 'adhoc_role')
|
||||
qs = qs.prefetch_related('created_by', 'modified_by', 'organization')
|
||||
return qs
|
||||
|
||||
|
||||
class InventoryDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, RetrieveUpdateDestroyAPIView):
|
||||
|
||||
model = Inventory
|
||||
serializer_class = InventoryDetailSerializer
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
kind = self.request.data.get('kind') or kwargs.get('kind')
|
||||
|
||||
# Do not allow changes to an Inventory kind.
|
||||
if kind is not None and obj.kind != kind:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
return super(InventoryDetail, self).update(request, *args, **kwargs)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'delete', obj):
|
||||
raise PermissionDenied()
|
||||
self.check_related_active_jobs(obj) # related jobs mixin
|
||||
try:
|
||||
obj.schedule_deletion(getattr(request.user, 'id', None))
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
except RuntimeError as e:
|
||||
return Response(dict(error=_("{0}".format(e))), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class InventoryActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView):
|
||||
|
||||
model = ActivityStream
|
||||
serializer_class = ActivityStreamSerializer
|
||||
parent_model = Inventory
|
||||
relationship = 'activitystream_set'
|
||||
search_fields = ('changes',)
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
return qs.filter(Q(inventory=parent) | Q(host__in=parent.hosts.all()) | Q(group__in=parent.groups.all()))
|
||||
|
||||
|
||||
class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
|
||||
model = InstanceGroup
|
||||
serializer_class = InstanceGroupSerializer
|
||||
parent_model = Inventory
|
||||
relationship = 'instance_groups'
|
||||
|
||||
|
||||
class InventoryAccessList(ResourceAccessList):
|
||||
|
||||
model = User # needs to be User for AccessLists's
|
||||
parent_model = Inventory
|
||||
|
||||
|
||||
class InventoryObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = Inventory
|
||||
search_fields = ('role_field', 'content_type__model',)
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
content_type = ContentType.objects.get_for_model(self.parent_model)
|
||||
return Role.objects.filter(content_type=content_type, object_id=po.pk)
|
||||
|
||||
|
||||
class InventoryJobTemplateList(SubListAPIView):
|
||||
|
||||
model = JobTemplate
|
||||
serializer_class = JobTemplateSerializer
|
||||
parent_model = Inventory
|
||||
relationship = 'jobtemplates'
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
return qs.filter(inventory=parent)
|
||||
|
||||
|
||||
class InventoryCopy(CopyAPIView):
|
||||
|
||||
model = Inventory
|
||||
copy_return_serializer_class = InventorySerializer
|
||||
@@ -13,12 +13,16 @@ from django.shortcuts import get_object_or_404
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from rest_framework.permissions import SAFE_METHODS
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.utils import get_object_or_400
|
||||
from awx.main.utils import (
|
||||
get_object_or_400,
|
||||
parse_yaml_or_json,
|
||||
)
|
||||
from awx.main.models.ha import (
|
||||
Instance,
|
||||
InstanceGroup,
|
||||
@@ -273,3 +277,33 @@ class OrganizationCountsMixin(object):
|
||||
full_context['related_field_counts'] = count_context
|
||||
|
||||
return full_context
|
||||
|
||||
|
||||
class ControlledByScmMixin(object):
|
||||
'''
|
||||
Special method to reset SCM inventory commit hash
|
||||
if anything that it manages changes.
|
||||
'''
|
||||
|
||||
def _reset_inv_src_rev(self, obj):
|
||||
if self.request.method in SAFE_METHODS or not obj:
|
||||
return
|
||||
project_following_sources = obj.inventory_sources.filter(
|
||||
update_on_project_update=True, source='scm')
|
||||
if project_following_sources:
|
||||
# Allow inventory changes unrelated to variables
|
||||
if self.model == Inventory and (
|
||||
not self.request or not self.request.data or
|
||||
parse_yaml_or_json(self.request.data.get('variables', '')) == parse_yaml_or_json(obj.variables)):
|
||||
return
|
||||
project_following_sources.update(scm_last_revision='')
|
||||
|
||||
def get_object(self):
|
||||
obj = super(ControlledByScmMixin, self).get_object()
|
||||
self._reset_inv_src_rev(obj)
|
||||
return obj
|
||||
|
||||
def get_parent_object(self):
|
||||
obj = super(ControlledByScmMixin, self).get_parent_object()
|
||||
self._reset_inv_src_rev(obj)
|
||||
return obj
|
||||
|
||||
247
awx/api/views/organization.py
Normal file
247
awx/api/views/organization.py
Normal file
@@ -0,0 +1,247 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import logging
|
||||
|
||||
# Django
|
||||
from django.db.models import Count
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# AWX
|
||||
from awx.conf.license import (
|
||||
feature_enabled,
|
||||
LicenseForbids,
|
||||
)
|
||||
from awx.main.models import (
|
||||
ActivityStream,
|
||||
Inventory,
|
||||
Project,
|
||||
JobTemplate,
|
||||
WorkflowJobTemplate,
|
||||
Organization,
|
||||
NotificationTemplate,
|
||||
Role,
|
||||
User,
|
||||
Team,
|
||||
InstanceGroup,
|
||||
)
|
||||
from awx.api.generics import (
|
||||
ListCreateAPIView,
|
||||
RetrieveUpdateDestroyAPIView,
|
||||
SubListAPIView,
|
||||
SubListCreateAttachDetachAPIView,
|
||||
SubListAttachDetachAPIView,
|
||||
ResourceAccessList,
|
||||
BaseUsersList,
|
||||
)
|
||||
|
||||
from awx.api.serializers import (
|
||||
OrganizationSerializer,
|
||||
InventorySerializer,
|
||||
ProjectSerializer,
|
||||
UserSerializer,
|
||||
TeamSerializer,
|
||||
ActivityStreamSerializer,
|
||||
RoleSerializer,
|
||||
NotificationTemplateSerializer,
|
||||
WorkflowJobTemplateSerializer,
|
||||
InstanceGroupSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import (
|
||||
ActivityStreamEnforcementMixin,
|
||||
RelatedJobsPreventDeleteMixin,
|
||||
OrganizationCountsMixin,
|
||||
)
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
|
||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
qs = Organization.accessible_objects(self.request.user, 'read_role')
|
||||
qs = qs.select_related('admin_role', 'auditor_role', 'member_role', 'read_role')
|
||||
qs = qs.prefetch_related('created_by', 'modified_by')
|
||||
return qs
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
"""Create a new organzation.
|
||||
|
||||
If there is already an organization and the license of this
|
||||
instance does not permit multiple organizations, then raise
|
||||
LicenseForbids.
|
||||
"""
|
||||
# Sanity check: If the multiple organizations feature is disallowed
|
||||
# by the license, then we are only willing to create this organization
|
||||
# if no organizations exist in the system.
|
||||
if (not feature_enabled('multiple_organizations') and
|
||||
self.model.objects.exists()):
|
||||
raise LicenseForbids(_('Your license only permits a single '
|
||||
'organization to exist.'))
|
||||
|
||||
# Okay, create the organization as usual.
|
||||
return super(OrganizationList, self).create(request, *args, **kwargs)
|
||||
|
||||
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
def get_serializer_context(self, *args, **kwargs):
|
||||
full_context = super(OrganizationDetail, self).get_serializer_context(*args, **kwargs)
|
||||
|
||||
if not hasattr(self, 'kwargs') or 'pk' not in self.kwargs:
|
||||
return full_context
|
||||
org_id = int(self.kwargs['pk'])
|
||||
|
||||
org_counts = {}
|
||||
access_kwargs = {'accessor': self.request.user, 'role_field': 'read_role'}
|
||||
direct_counts = Organization.objects.filter(id=org_id).annotate(
|
||||
users=Count('member_role__members', distinct=True),
|
||||
admins=Count('admin_role__members', distinct=True)
|
||||
).values('users', 'admins')
|
||||
|
||||
if not direct_counts:
|
||||
return full_context
|
||||
|
||||
org_counts = direct_counts[0]
|
||||
org_counts['inventories'] = Inventory.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['teams'] = Team.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['projects'] = Project.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).filter(
|
||||
project__organization__id=org_id).count()
|
||||
|
||||
full_context['related_field_counts'] = {}
|
||||
full_context['related_field_counts'][org_id] = org_counts
|
||||
|
||||
return full_context
|
||||
|
||||
|
||||
class OrganizationInventoriesList(SubListAPIView):
|
||||
|
||||
model = Inventory
|
||||
serializer_class = InventorySerializer
|
||||
parent_model = Organization
|
||||
relationship = 'inventories'
|
||||
|
||||
|
||||
class OrganizationUsersList(BaseUsersList):
|
||||
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'member_role.members'
|
||||
|
||||
|
||||
class OrganizationAdminsList(BaseUsersList):
|
||||
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'admin_role.members'
|
||||
|
||||
|
||||
class OrganizationProjectsList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = Project
|
||||
serializer_class = ProjectSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'projects'
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
class OrganizationWorkflowJobTemplatesList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = WorkflowJobTemplate
|
||||
serializer_class = WorkflowJobTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'workflows'
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = Team
|
||||
serializer_class = TeamSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'teams'
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
class OrganizationActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView):
|
||||
|
||||
model = ActivityStream
|
||||
serializer_class = ActivityStreamSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'activitystream_set'
|
||||
search_fields = ('changes',)
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates'
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates_any'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates_error'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates_success'
|
||||
|
||||
|
||||
class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
|
||||
model = InstanceGroup
|
||||
serializer_class = InstanceGroupSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'instance_groups'
|
||||
|
||||
|
||||
class OrganizationAccessList(ResourceAccessList):
|
||||
|
||||
model = User # needs to be User for AccessLists's
|
||||
parent_model = Organization
|
||||
|
||||
|
||||
class OrganizationObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = Organization
|
||||
search_fields = ('role_field', 'content_type__model',)
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
content_type = ContentType.objects.get_for_model(self.parent_model)
|
||||
return Role.objects.filter(content_type=content_type, object_id=po.pk)
|
||||
|
||||
281
awx/api/views/root.py
Normal file
281
awx/api/views/root.py
Normal file
@@ -0,0 +1,281 @@
|
||||
# Copyright (c) 2018 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import logging
|
||||
import operator
|
||||
import json
|
||||
from collections import OrderedDict
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
from awx.api.generics import APIView
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.main.utils import (
|
||||
get_awx_version,
|
||||
get_ansible_version,
|
||||
get_custom_venv_choices,
|
||||
to_python_boolean,
|
||||
)
|
||||
from awx.api.versioning import reverse, get_request_version, drf_reverse
|
||||
from awx.conf.license import get_license, feature_enabled
|
||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||
from awx.main.models import (
|
||||
Project,
|
||||
Organization,
|
||||
Instance,
|
||||
InstanceGroup,
|
||||
JobTemplate,
|
||||
)
|
||||
|
||||
logger = logging.getLogger('awx.api.views.root')
|
||||
|
||||
|
||||
class ApiRootView(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
view_name = _('REST API')
|
||||
versioning_class = None
|
||||
swagger_topic = 'Versioning'
|
||||
|
||||
@method_decorator(ensure_csrf_cookie)
|
||||
def get(self, request, format=None):
|
||||
''' List supported API versions '''
|
||||
|
||||
v1 = reverse('api:api_v1_root_view', kwargs={'version': 'v1'})
|
||||
v2 = reverse('api:api_v2_root_view', kwargs={'version': 'v2'})
|
||||
data = OrderedDict()
|
||||
data['description'] = _('AWX REST API')
|
||||
data['current_version'] = v2
|
||||
data['available_versions'] = dict(v1 = v1, v2 = v2)
|
||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||
if feature_enabled('rebranding'):
|
||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
return Response(data)
|
||||
|
||||
|
||||
class ApiOAuthAuthorizationRootView(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
view_name = _("API OAuth 2 Authorization Root")
|
||||
versioning_class = None
|
||||
swagger_topic = 'Authentication'
|
||||
|
||||
def get(self, request, format=None):
|
||||
data = OrderedDict()
|
||||
data['authorize'] = drf_reverse('api:authorize')
|
||||
data['token'] = drf_reverse('api:token')
|
||||
data['revoke_token'] = drf_reverse('api:revoke-token')
|
||||
return Response(data)
|
||||
|
||||
|
||||
class ApiVersionRootView(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
swagger_topic = 'Versioning'
|
||||
|
||||
def get(self, request, format=None):
|
||||
''' List top level resources '''
|
||||
data = OrderedDict()
|
||||
data['ping'] = reverse('api:api_v1_ping_view', request=request)
|
||||
data['instances'] = reverse('api:instance_list', request=request)
|
||||
data['instance_groups'] = reverse('api:instance_group_list', request=request)
|
||||
data['config'] = reverse('api:api_v1_config_view', request=request)
|
||||
data['settings'] = reverse('api:setting_category_list', request=request)
|
||||
data['me'] = reverse('api:user_me_list', request=request)
|
||||
data['dashboard'] = reverse('api:dashboard_view', request=request)
|
||||
data['organizations'] = reverse('api:organization_list', request=request)
|
||||
data['users'] = reverse('api:user_list', request=request)
|
||||
data['projects'] = reverse('api:project_list', request=request)
|
||||
data['project_updates'] = reverse('api:project_update_list', request=request)
|
||||
data['teams'] = reverse('api:team_list', request=request)
|
||||
data['credentials'] = reverse('api:credential_list', request=request)
|
||||
if get_request_version(request) > 1:
|
||||
data['credential_types'] = reverse('api:credential_type_list', request=request)
|
||||
data['applications'] = reverse('api:o_auth2_application_list', request=request)
|
||||
data['tokens'] = reverse('api:o_auth2_token_list', request=request)
|
||||
data['inventory'] = reverse('api:inventory_list', request=request)
|
||||
data['inventory_scripts'] = reverse('api:inventory_script_list', request=request)
|
||||
data['inventory_sources'] = reverse('api:inventory_source_list', request=request)
|
||||
data['inventory_updates'] = reverse('api:inventory_update_list', request=request)
|
||||
data['groups'] = reverse('api:group_list', request=request)
|
||||
data['hosts'] = reverse('api:host_list', request=request)
|
||||
data['job_templates'] = reverse('api:job_template_list', request=request)
|
||||
data['jobs'] = reverse('api:job_list', request=request)
|
||||
data['job_events'] = reverse('api:job_event_list', request=request)
|
||||
data['ad_hoc_commands'] = reverse('api:ad_hoc_command_list', request=request)
|
||||
data['system_job_templates'] = reverse('api:system_job_template_list', request=request)
|
||||
data['system_jobs'] = reverse('api:system_job_list', request=request)
|
||||
data['schedules'] = reverse('api:schedule_list', request=request)
|
||||
data['roles'] = reverse('api:role_list', request=request)
|
||||
data['notification_templates'] = reverse('api:notification_template_list', request=request)
|
||||
data['notifications'] = reverse('api:notification_list', request=request)
|
||||
data['labels'] = reverse('api:label_list', request=request)
|
||||
data['unified_job_templates'] = reverse('api:unified_job_template_list', request=request)
|
||||
data['unified_jobs'] = reverse('api:unified_job_list', request=request)
|
||||
data['activity_stream'] = reverse('api:activity_stream_list', request=request)
|
||||
data['workflow_job_templates'] = reverse('api:workflow_job_template_list', request=request)
|
||||
data['workflow_jobs'] = reverse('api:workflow_job_list', request=request)
|
||||
data['workflow_job_template_nodes'] = reverse('api:workflow_job_template_node_list', request=request)
|
||||
data['workflow_job_nodes'] = reverse('api:workflow_job_node_list', request=request)
|
||||
return Response(data)
|
||||
|
||||
|
||||
class ApiV1RootView(ApiVersionRootView):
|
||||
view_name = _('Version 1')
|
||||
|
||||
|
||||
class ApiV2RootView(ApiVersionRootView):
|
||||
view_name = _('Version 2')
|
||||
|
||||
|
||||
class ApiV1PingView(APIView):
|
||||
"""A simple view that reports very basic information about this
|
||||
instance, which is acceptable to be public information.
|
||||
"""
|
||||
permission_classes = (AllowAny,)
|
||||
authentication_classes = ()
|
||||
view_name = _('Ping')
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
def get(self, request, format=None):
|
||||
"""Return some basic information about this instance
|
||||
|
||||
Everything returned here should be considered public / insecure, as
|
||||
this requires no auth and is intended for use by the installer process.
|
||||
"""
|
||||
response = {
|
||||
'ha': is_ha_environment(),
|
||||
'version': get_awx_version(),
|
||||
'active_node': settings.CLUSTER_HOST_ID,
|
||||
}
|
||||
|
||||
response['instances'] = []
|
||||
for instance in Instance.objects.all():
|
||||
response['instances'].append(dict(node=instance.hostname, heartbeat=instance.modified,
|
||||
capacity=instance.capacity, version=instance.version))
|
||||
sorted(response['instances'], key=operator.itemgetter('node'))
|
||||
response['instance_groups'] = []
|
||||
for instance_group in InstanceGroup.objects.all():
|
||||
response['instance_groups'].append(dict(name=instance_group.name,
|
||||
capacity=instance_group.capacity,
|
||||
instances=[x.hostname for x in instance_group.instances.all()]))
|
||||
return Response(response)
|
||||
|
||||
|
||||
class ApiV1ConfigView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
view_name = _('Configuration')
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
def check_permissions(self, request):
|
||||
super(ApiV1ConfigView, self).check_permissions(request)
|
||||
if not request.user.is_superuser and request.method.lower() not in {'options', 'head', 'get'}:
|
||||
self.permission_denied(request) # Raises PermissionDenied exception.
|
||||
|
||||
def get(self, request, format=None):
|
||||
'''Return various sitewide configuration settings'''
|
||||
|
||||
if request.user.is_superuser or request.user.is_system_auditor:
|
||||
license_data = get_license(show_key=True)
|
||||
else:
|
||||
license_data = get_license(show_key=False)
|
||||
if not license_data.get('valid_key', False):
|
||||
license_data = {}
|
||||
if license_data and 'features' in license_data and 'activity_streams' in license_data['features']:
|
||||
# FIXME: Make the final setting value dependent on the feature?
|
||||
license_data['features']['activity_streams'] &= settings.ACTIVITY_STREAM_ENABLED
|
||||
|
||||
pendo_state = settings.PENDO_TRACKING_STATE if settings.PENDO_TRACKING_STATE in ('off', 'anonymous', 'detailed') else 'off'
|
||||
|
||||
data = dict(
|
||||
time_zone=settings.TIME_ZONE,
|
||||
license_info=license_data,
|
||||
version=get_awx_version(),
|
||||
ansible_version=get_ansible_version(),
|
||||
eula=render_to_string("eula.md") if license_data.get('license_type', 'UNLICENSED') != 'open' else '',
|
||||
analytics_status=pendo_state,
|
||||
become_methods=PRIVILEGE_ESCALATION_METHODS,
|
||||
)
|
||||
|
||||
# If LDAP is enabled, user_ldap_fields will return a list of field
|
||||
# names that are managed by LDAP and should be read-only for users with
|
||||
# a non-empty ldap_dn attribute.
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None) and feature_enabled('ldap'):
|
||||
user_ldap_fields = ['username', 'password']
|
||||
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_ATTR_MAP', {}).keys())
|
||||
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
|
||||
data['user_ldap_fields'] = user_ldap_fields
|
||||
|
||||
if request.user.is_superuser \
|
||||
or request.user.is_system_auditor \
|
||||
or Organization.accessible_objects(request.user, 'admin_role').exists() \
|
||||
or Organization.accessible_objects(request.user, 'auditor_role').exists():
|
||||
data.update(dict(
|
||||
project_base_dir = settings.PROJECTS_ROOT,
|
||||
project_local_paths = Project.get_local_path_choices(),
|
||||
custom_virtualenvs = get_custom_venv_choices()
|
||||
))
|
||||
elif JobTemplate.accessible_objects(request.user, 'admin_role').exists():
|
||||
data['custom_virtualenvs'] = get_custom_venv_choices()
|
||||
|
||||
return Response(data)
|
||||
|
||||
def post(self, request):
|
||||
if not isinstance(request.data, dict):
|
||||
return Response({"error": _("Invalid license data")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
if "eula_accepted" not in request.data:
|
||||
return Response({"error": _("Missing 'eula_accepted' property")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
eula_accepted = to_python_boolean(request.data["eula_accepted"])
|
||||
except ValueError:
|
||||
return Response({"error": _("'eula_accepted' value is invalid")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if not eula_accepted:
|
||||
return Response({"error": _("'eula_accepted' must be True")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
request.data.pop("eula_accepted")
|
||||
try:
|
||||
data_actual = json.dumps(request.data)
|
||||
except Exception:
|
||||
logger.info(smart_text(u"Invalid JSON submitted for license."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
from awx.main.utils.common import get_licenser
|
||||
license_data = json.loads(data_actual)
|
||||
license_data_validated = get_licenser(**license_data).validate()
|
||||
except Exception:
|
||||
logger.warning(smart_text(u"Invalid license submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# If the license is valid, write it to the database.
|
||||
if license_data_validated['valid_key']:
|
||||
settings.LICENSE = license_data
|
||||
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
|
||||
return Response(license_data_validated)
|
||||
|
||||
logger.warning(smart_text(u"Invalid license submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid license")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request):
|
||||
try:
|
||||
settings.LICENSE = {}
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except Exception:
|
||||
# FIX: Log
|
||||
return Response({"error": _("Failed to remove license.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# Python
|
||||
import os
|
||||
import logging
|
||||
import urlparse
|
||||
import urllib.parse as urlparse
|
||||
from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
@@ -8,9 +9,10 @@ from django.core.validators import URLValidator
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.fields import * # noqa
|
||||
|
||||
import six
|
||||
from rest_framework.fields import ( # noqa
|
||||
BooleanField, CharField, ChoiceField, DictField, EmailField, IntegerField,
|
||||
ListField, NullBooleanField
|
||||
)
|
||||
|
||||
logger = logging.getLogger('awx.conf.fields')
|
||||
|
||||
@@ -71,7 +73,7 @@ class StringListBooleanField(ListField):
|
||||
return False
|
||||
elif value in NullBooleanField.NULL_VALUES:
|
||||
return None
|
||||
elif isinstance(value, basestring):
|
||||
elif isinstance(value, str):
|
||||
return self.child.to_representation(value)
|
||||
except TypeError:
|
||||
pass
|
||||
@@ -88,13 +90,33 @@ class StringListBooleanField(ListField):
|
||||
return False
|
||||
elif data in NullBooleanField.NULL_VALUES:
|
||||
return None
|
||||
elif isinstance(data, basestring):
|
||||
elif isinstance(data, str):
|
||||
return self.child.run_validation(data)
|
||||
except TypeError:
|
||||
pass
|
||||
self.fail('type_error', input_type=type(data))
|
||||
|
||||
|
||||
class StringListPathField(StringListField):
|
||||
|
||||
default_error_messages = {
|
||||
'type_error': _('Expected list of strings but got {input_type} instead.'),
|
||||
'path_error': _('{path} is not a valid path choice.'),
|
||||
}
|
||||
|
||||
def to_internal_value(self, paths):
|
||||
if isinstance(paths, (list, tuple)):
|
||||
for p in paths:
|
||||
if not isinstance(p, str):
|
||||
self.fail('type_error', input_type=type(p))
|
||||
if not os.path.exists(p):
|
||||
self.fail('path_error', path=p)
|
||||
|
||||
return super(StringListPathField, self).to_internal_value(sorted({os.path.normpath(path) for path in paths}))
|
||||
else:
|
||||
self.fail('type_error', input_type=type(paths))
|
||||
|
||||
|
||||
class URLField(CharField):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
@@ -139,7 +161,7 @@ class KeyValueField(DictField):
|
||||
def to_internal_value(self, data):
|
||||
ret = super(KeyValueField, self).to_internal_value(data)
|
||||
for value in data.values():
|
||||
if not isinstance(value, six.string_types + six.integer_types + (float,)):
|
||||
if not isinstance(value, (str, int, float)):
|
||||
if isinstance(value, OrderedDict):
|
||||
value = dict(value)
|
||||
self.fail('invalid_child', input=value)
|
||||
|
||||
@@ -1,480 +0,0 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import base64
|
||||
import collections
|
||||
import difflib
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import transaction
|
||||
from django.utils.text import slugify
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Tower
|
||||
from awx import MODE
|
||||
from awx.conf import settings_registry
|
||||
from awx.conf.fields import empty, SkipField
|
||||
from awx.conf.models import Setting
|
||||
from awx.conf.utils import comment_assignments
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'category',
|
||||
nargs='*',
|
||||
type=str,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--dry-run',
|
||||
action='store_true',
|
||||
dest='dry_run',
|
||||
default=False,
|
||||
help=_('Only show which settings would be commented/migrated.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--skip-errors',
|
||||
action='store_true',
|
||||
dest='skip_errors',
|
||||
default=False,
|
||||
help=_('Skip over settings that would raise an error when commenting/migrating.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-comment',
|
||||
action='store_true',
|
||||
dest='no_comment',
|
||||
default=False,
|
||||
help=_('Skip commenting out settings in files.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--comment-only',
|
||||
action='store_true',
|
||||
dest='comment_only',
|
||||
default=False,
|
||||
help=_('Skip migrating and only comment out settings in files.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--backup-suffix',
|
||||
dest='backup_suffix',
|
||||
default=now().strftime('.%Y%m%d%H%M%S'),
|
||||
help=_('Backup existing settings files with this suffix.'),
|
||||
)
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.dry_run = bool(options.get('dry_run', False))
|
||||
self.skip_errors = bool(options.get('skip_errors', False))
|
||||
self.no_comment = bool(options.get('no_comment', False))
|
||||
self.comment_only = bool(options.get('comment_only', False))
|
||||
self.backup_suffix = options.get('backup_suffix', '')
|
||||
self.categories = options.get('category', None) or ['all']
|
||||
self.style.HEADING = self.style.MIGRATE_HEADING
|
||||
self.style.LABEL = self.style.MIGRATE_LABEL
|
||||
self.style.OK = self.style.SQL_FIELD
|
||||
self.style.SKIP = self.style.WARNING
|
||||
self.style.VALUE = self.style.SQL_KEYWORD
|
||||
|
||||
# Determine if any categories provided are invalid.
|
||||
category_slugs = []
|
||||
invalid_categories = []
|
||||
for category in self.categories:
|
||||
category_slug = slugify(category)
|
||||
if category_slug in settings_registry.get_registered_categories():
|
||||
if category_slug not in category_slugs:
|
||||
category_slugs.append(category_slug)
|
||||
else:
|
||||
if category not in invalid_categories:
|
||||
invalid_categories.append(category)
|
||||
if len(invalid_categories) == 1:
|
||||
raise CommandError('Invalid setting category: {}'.format(invalid_categories[0]))
|
||||
elif len(invalid_categories) > 1:
|
||||
raise CommandError('Invalid setting categories: {}'.format(', '.join(invalid_categories)))
|
||||
|
||||
# Build a list of all settings to be migrated.
|
||||
registered_settings = []
|
||||
for category_slug in category_slugs:
|
||||
for registered_setting in settings_registry.get_registered_settings(category_slug=category_slug, read_only=False):
|
||||
if registered_setting not in registered_settings:
|
||||
registered_settings.append(registered_setting)
|
||||
|
||||
self._migrate_settings(registered_settings)
|
||||
|
||||
def _get_settings_file_patterns(self):
|
||||
if MODE == 'development':
|
||||
return [
|
||||
'/etc/tower/settings.py',
|
||||
'/etc/tower/conf.d/*.py',
|
||||
os.path.join(os.path.dirname(__file__), '..', '..', '..', 'settings', 'local_*.py')
|
||||
]
|
||||
else:
|
||||
return [
|
||||
os.environ.get('AWX_SETTINGS_FILE', '/etc/tower/settings.py'),
|
||||
os.path.join(os.environ.get('AWX_SETTINGS_DIR', '/etc/tower/conf.d/'), '*.py'),
|
||||
]
|
||||
|
||||
def _get_license_file(self):
|
||||
return os.environ.get('AWX_LICENSE_FILE', '/etc/tower/license')
|
||||
|
||||
def _comment_license_file(self, dry_run=True):
|
||||
license_file = self._get_license_file()
|
||||
diff_lines = []
|
||||
if os.path.exists(license_file):
|
||||
try:
|
||||
raw_license_data = open(license_file).read()
|
||||
json.loads(raw_license_data)
|
||||
except Exception as e:
|
||||
raise CommandError('Error reading license from {0}: {1!r}'.format(license_file, e))
|
||||
if self.backup_suffix:
|
||||
backup_license_file = '{}{}'.format(license_file, self.backup_suffix)
|
||||
else:
|
||||
backup_license_file = '{}.old'.format(license_file)
|
||||
diff_lines = list(difflib.unified_diff(
|
||||
raw_license_data.splitlines(),
|
||||
[],
|
||||
fromfile=backup_license_file,
|
||||
tofile=license_file,
|
||||
lineterm='',
|
||||
))
|
||||
if not dry_run:
|
||||
if self.backup_suffix:
|
||||
shutil.copy2(license_file, backup_license_file)
|
||||
os.remove(license_file)
|
||||
return diff_lines
|
||||
|
||||
def _get_local_settings_file(self):
|
||||
if MODE == 'development':
|
||||
static_root = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'ui', 'static')
|
||||
else:
|
||||
static_root = settings.STATIC_ROOT
|
||||
return os.path.join(static_root, 'local_settings.json')
|
||||
|
||||
def _comment_local_settings_file(self, dry_run=True):
|
||||
local_settings_file = self._get_local_settings_file()
|
||||
diff_lines = []
|
||||
if os.path.exists(local_settings_file):
|
||||
try:
|
||||
raw_local_settings_data = open(local_settings_file).read()
|
||||
json.loads(raw_local_settings_data)
|
||||
except Exception as e:
|
||||
if not self.skip_errors:
|
||||
raise CommandError('Error reading local settings from {0}: {1!r}'.format(local_settings_file, e))
|
||||
return diff_lines
|
||||
if self.backup_suffix:
|
||||
backup_local_settings_file = '{}{}'.format(local_settings_file, self.backup_suffix)
|
||||
else:
|
||||
backup_local_settings_file = '{}.old'.format(local_settings_file)
|
||||
diff_lines = list(difflib.unified_diff(
|
||||
raw_local_settings_data.splitlines(),
|
||||
[],
|
||||
fromfile=backup_local_settings_file,
|
||||
tofile=local_settings_file,
|
||||
lineterm='',
|
||||
))
|
||||
if not dry_run:
|
||||
if self.backup_suffix:
|
||||
shutil.copy2(local_settings_file, backup_local_settings_file)
|
||||
os.remove(local_settings_file)
|
||||
return diff_lines
|
||||
|
||||
def _get_custom_logo_file(self):
|
||||
if MODE == 'development':
|
||||
static_root = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'ui', 'static')
|
||||
else:
|
||||
static_root = settings.STATIC_ROOT
|
||||
return os.path.join(static_root, 'assets', 'custom_console_logo.png')
|
||||
|
||||
def _comment_custom_logo_file(self, dry_run=True):
|
||||
custom_logo_file = self._get_custom_logo_file()
|
||||
diff_lines = []
|
||||
if os.path.exists(custom_logo_file):
|
||||
try:
|
||||
raw_custom_logo_data = open(custom_logo_file).read()
|
||||
except Exception as e:
|
||||
if not self.skip_errors:
|
||||
raise CommandError('Error reading custom logo from {0}: {1!r}'.format(custom_logo_file, e))
|
||||
return diff_lines
|
||||
if self.backup_suffix:
|
||||
backup_custom_logo_file = '{}{}'.format(custom_logo_file, self.backup_suffix)
|
||||
else:
|
||||
backup_custom_logo_file = '{}.old'.format(custom_logo_file)
|
||||
diff_lines = list(difflib.unified_diff(
|
||||
['<PNG Image ({} bytes)>'.format(len(raw_custom_logo_data))],
|
||||
[],
|
||||
fromfile=backup_custom_logo_file,
|
||||
tofile=custom_logo_file,
|
||||
lineterm='',
|
||||
))
|
||||
if not dry_run:
|
||||
if self.backup_suffix:
|
||||
shutil.copy2(custom_logo_file, backup_custom_logo_file)
|
||||
os.remove(custom_logo_file)
|
||||
return diff_lines
|
||||
|
||||
def _check_if_needs_comment(self, patterns, setting):
|
||||
files_to_comment = []
|
||||
# If any diffs are returned, this setting needs to be commented.
|
||||
diffs = comment_assignments(patterns, setting, dry_run=True)
|
||||
if setting == 'LICENSE':
|
||||
diffs.extend(self._comment_license_file(dry_run=True))
|
||||
elif setting == 'CUSTOM_LOGIN_INFO':
|
||||
diffs.extend(self._comment_local_settings_file(dry_run=True))
|
||||
elif setting == 'CUSTOM_LOGO':
|
||||
diffs.extend(self._comment_custom_logo_file(dry_run=True))
|
||||
for diff in diffs:
|
||||
for line in diff.splitlines():
|
||||
if line.startswith('+++ '):
|
||||
files_to_comment.append(line[4:])
|
||||
return files_to_comment
|
||||
|
||||
def _check_if_needs_migration(self, setting):
|
||||
# Check whether the current value differs from the default.
|
||||
default_value = settings.DEFAULTS_SNAPSHOT.get(setting, empty)
|
||||
if default_value is empty and setting != 'LICENSE':
|
||||
field = settings_registry.get_setting_field(setting, read_only=True)
|
||||
try:
|
||||
default_value = field.get_default()
|
||||
except SkipField:
|
||||
pass
|
||||
current_value = getattr(settings, setting, empty)
|
||||
if setting == 'CUSTOM_LOGIN_INFO' and current_value in {empty, ''}:
|
||||
local_settings_file = self._get_local_settings_file()
|
||||
try:
|
||||
if os.path.exists(local_settings_file):
|
||||
local_settings = json.load(open(local_settings_file))
|
||||
current_value = local_settings.get('custom_login_info', '')
|
||||
except Exception as e:
|
||||
if not self.skip_errors:
|
||||
raise CommandError('Error reading custom login info from {0}: {1!r}'.format(local_settings_file, e))
|
||||
if setting == 'CUSTOM_LOGO' and current_value in {empty, ''}:
|
||||
custom_logo_file = self._get_custom_logo_file()
|
||||
try:
|
||||
if os.path.exists(custom_logo_file):
|
||||
custom_logo_data = open(custom_logo_file).read()
|
||||
if custom_logo_data:
|
||||
current_value = 'data:image/png;base64,{}'.format(base64.b64encode(custom_logo_data))
|
||||
else:
|
||||
current_value = ''
|
||||
except Exception as e:
|
||||
if not self.skip_errors:
|
||||
raise CommandError('Error reading custom logo from {0}: {1!r}'.format(custom_logo_file, e))
|
||||
if current_value != default_value:
|
||||
if current_value is empty:
|
||||
current_value = None
|
||||
return current_value
|
||||
return empty
|
||||
|
||||
def _display_tbd(self, setting, files_to_comment, migrate_value, comment_error=None, migrate_error=None):
|
||||
if self.verbosity >= 1:
|
||||
if files_to_comment:
|
||||
if migrate_value is not empty:
|
||||
action = 'Migrate + Comment'
|
||||
else:
|
||||
action = 'Comment'
|
||||
if comment_error or migrate_error:
|
||||
action = self.style.ERROR('{} (skipped)'.format(action))
|
||||
else:
|
||||
action = self.style.OK(action)
|
||||
self.stdout.write(' {}: {}'.format(
|
||||
self.style.LABEL(setting),
|
||||
action,
|
||||
))
|
||||
if self.verbosity >= 2:
|
||||
if migrate_error:
|
||||
self.stdout.write(' - Migrate value: {}'.format(
|
||||
self.style.ERROR(migrate_error),
|
||||
))
|
||||
elif migrate_value is not empty:
|
||||
self.stdout.write(' - Migrate value: {}'.format(
|
||||
self.style.VALUE(repr(migrate_value)),
|
||||
))
|
||||
if comment_error:
|
||||
self.stdout.write(' - Comment: {}'.format(
|
||||
self.style.ERROR(comment_error),
|
||||
))
|
||||
elif files_to_comment:
|
||||
for file_to_comment in files_to_comment:
|
||||
self.stdout.write(' - Comment in: {}'.format(
|
||||
self.style.VALUE(file_to_comment),
|
||||
))
|
||||
else:
|
||||
if self.verbosity >= 2:
|
||||
self.stdout.write(' {}: {}'.format(
|
||||
self.style.LABEL(setting),
|
||||
self.style.SKIP('No Migration'),
|
||||
))
|
||||
|
||||
def _display_migrate(self, setting, action, display_value):
|
||||
if self.verbosity >= 1:
|
||||
if action == 'No Change':
|
||||
action = self.style.SKIP(action)
|
||||
else:
|
||||
action = self.style.OK(action)
|
||||
self.stdout.write(' {}: {}'.format(
|
||||
self.style.LABEL(setting),
|
||||
action,
|
||||
))
|
||||
if self.verbosity >= 2:
|
||||
for line in display_value.splitlines():
|
||||
self.stdout.write(' {}'.format(
|
||||
self.style.VALUE(line),
|
||||
))
|
||||
|
||||
def _display_diff_summary(self, filename, added, removed):
|
||||
self.stdout.write(' {} {}{} {}{}'.format(
|
||||
self.style.LABEL(filename),
|
||||
self.style.ERROR('-'),
|
||||
self.style.ERROR(int(removed)),
|
||||
self.style.OK('+'),
|
||||
self.style.OK(str(added)),
|
||||
))
|
||||
|
||||
def _display_comment(self, diffs):
|
||||
for diff in diffs:
|
||||
if self.verbosity >= 2:
|
||||
for line in diff.splitlines():
|
||||
display_line = line
|
||||
if line.startswith('--- ') or line.startswith('+++ '):
|
||||
display_line = self.style.LABEL(line)
|
||||
elif line.startswith('-'):
|
||||
display_line = self.style.ERROR(line)
|
||||
elif line.startswith('+'):
|
||||
display_line = self.style.OK(line)
|
||||
elif line.startswith('@@'):
|
||||
display_line = self.style.VALUE(line)
|
||||
if line.startswith('--- ') or line.startswith('+++ '):
|
||||
self.stdout.write(' ' + display_line)
|
||||
else:
|
||||
self.stdout.write(' ' + display_line)
|
||||
elif self.verbosity >= 1:
|
||||
filename, lines_added, lines_removed = None, 0, 0
|
||||
for line in diff.splitlines():
|
||||
if line.startswith('+++ '):
|
||||
if filename:
|
||||
self._display_diff_summary(filename, lines_added, lines_removed)
|
||||
filename, lines_added, lines_removed = line[4:], 0, 0
|
||||
elif line.startswith('+'):
|
||||
lines_added += 1
|
||||
elif line.startswith('-'):
|
||||
lines_removed += 1
|
||||
if filename:
|
||||
self._display_diff_summary(filename, lines_added, lines_removed)
|
||||
|
||||
def _discover_settings(self, registered_settings):
|
||||
if self.verbosity >= 1:
|
||||
self.stdout.write(self.style.HEADING('Discovering settings to be migrated and commented:'))
|
||||
|
||||
# Determine which settings need to be commented/migrated.
|
||||
to_migrate = collections.OrderedDict()
|
||||
to_comment = collections.OrderedDict()
|
||||
patterns = self._get_settings_file_patterns()
|
||||
|
||||
for name in registered_settings:
|
||||
comment_error, migrate_error = None, None
|
||||
files_to_comment = []
|
||||
try:
|
||||
files_to_comment = self._check_if_needs_comment(patterns, name)
|
||||
except Exception as e:
|
||||
comment_error = 'Error commenting {0}: {1!r}'.format(name, e)
|
||||
if not self.skip_errors:
|
||||
raise CommandError(comment_error)
|
||||
if files_to_comment:
|
||||
to_comment[name] = files_to_comment
|
||||
migrate_value = empty
|
||||
if files_to_comment:
|
||||
migrate_value = self._check_if_needs_migration(name)
|
||||
if migrate_value is not empty:
|
||||
field = settings_registry.get_setting_field(name)
|
||||
assert not field.read_only
|
||||
try:
|
||||
data = field.to_representation(migrate_value)
|
||||
setting_value = field.run_validation(data)
|
||||
db_value = field.to_representation(setting_value)
|
||||
to_migrate[name] = db_value
|
||||
except Exception as e:
|
||||
to_comment.pop(name)
|
||||
migrate_error = 'Unable to assign value {0!r} to setting "{1}: {2!s}".'.format(migrate_value, name, e)
|
||||
if not self.skip_errors:
|
||||
raise CommandError(migrate_error)
|
||||
self._display_tbd(name, files_to_comment, migrate_value, comment_error, migrate_error)
|
||||
if self.verbosity == 1 and not to_migrate and not to_comment:
|
||||
self.stdout.write(' No settings found to migrate or comment!')
|
||||
return (to_migrate, to_comment)
|
||||
|
||||
def _migrate(self, to_migrate):
|
||||
if self.verbosity >= 1:
|
||||
if self.dry_run:
|
||||
self.stdout.write(self.style.HEADING('Migrating settings to database (dry-run):'))
|
||||
else:
|
||||
self.stdout.write(self.style.HEADING('Migrating settings to database:'))
|
||||
if not to_migrate:
|
||||
self.stdout.write(' No settings to migrate!')
|
||||
|
||||
# Now migrate those settings to the database.
|
||||
for name, db_value in to_migrate.items():
|
||||
display_value = json.dumps(db_value, indent=4)
|
||||
setting = Setting.objects.filter(key=name, user__isnull=True).order_by('pk').first()
|
||||
action = 'No Change'
|
||||
if not setting:
|
||||
action = 'Migrated'
|
||||
if not self.dry_run:
|
||||
Setting.objects.create(key=name, user=None, value=db_value)
|
||||
elif setting.value != db_value or type(setting.value) != type(db_value):
|
||||
action = 'Updated'
|
||||
if not self.dry_run:
|
||||
setting.value = db_value
|
||||
setting.save(update_fields=['value'])
|
||||
self._display_migrate(name, action, display_value)
|
||||
|
||||
def _comment(self, to_comment):
|
||||
if self.verbosity >= 1:
|
||||
if bool(self.dry_run or self.no_comment):
|
||||
self.stdout.write(self.style.HEADING('Commenting settings in files (dry-run):'))
|
||||
else:
|
||||
self.stdout.write(self.style.HEADING('Commenting settings in files:'))
|
||||
if not to_comment:
|
||||
self.stdout.write(' No settings to comment!')
|
||||
|
||||
# Now comment settings in settings files.
|
||||
if to_comment:
|
||||
to_comment_patterns = []
|
||||
license_file_to_comment = None
|
||||
local_settings_file_to_comment = None
|
||||
custom_logo_file_to_comment = None
|
||||
for files_to_comment in to_comment.values():
|
||||
for file_to_comment in files_to_comment:
|
||||
if file_to_comment == self._get_license_file():
|
||||
license_file_to_comment = file_to_comment
|
||||
elif file_to_comment == self._get_local_settings_file():
|
||||
local_settings_file_to_comment = file_to_comment
|
||||
elif file_to_comment == self._get_custom_logo_file():
|
||||
custom_logo_file_to_comment = file_to_comment
|
||||
elif file_to_comment not in to_comment_patterns:
|
||||
to_comment_patterns.append(file_to_comment)
|
||||
# Run once in dry-run mode to catch any errors from updating the files.
|
||||
diffs = comment_assignments(to_comment_patterns, to_comment.keys(), dry_run=True, backup_suffix=self.backup_suffix)
|
||||
# Then, if really updating, run again.
|
||||
if not self.dry_run and not self.no_comment:
|
||||
diffs = comment_assignments(to_comment_patterns, to_comment.keys(), dry_run=False, backup_suffix=self.backup_suffix)
|
||||
if license_file_to_comment:
|
||||
diffs.extend(self._comment_license_file(dry_run=False))
|
||||
if local_settings_file_to_comment:
|
||||
diffs.extend(self._comment_local_settings_file(dry_run=False))
|
||||
if custom_logo_file_to_comment:
|
||||
diffs.extend(self._comment_custom_logo_file(dry_run=False))
|
||||
self._display_comment(diffs)
|
||||
|
||||
def _migrate_settings(self, registered_settings):
|
||||
to_migrate, to_comment = self._discover_settings(registered_settings)
|
||||
|
||||
if not bool(self.comment_only):
|
||||
self._migrate(to_migrate)
|
||||
self._comment(to_comment)
|
||||
18
awx/conf/migrations/0006_v331_ldap_group_type.py
Normal file
18
awx/conf/migrations/0006_v331_ldap_group_type.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# AWX
|
||||
from awx.conf.migrations._ldap_group_type import fill_ldap_group_type_params
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0005_v330_rename_two_session_settings'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(fill_ldap_group_type_params),
|
||||
]
|
||||
30
awx/conf/migrations/_ldap_group_type.py
Normal file
30
awx/conf/migrations/_ldap_group_type.py
Normal file
@@ -0,0 +1,30 @@
|
||||
|
||||
import inspect
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.timezone import now
|
||||
|
||||
|
||||
def fill_ldap_group_type_params(apps, schema_editor):
|
||||
group_type = settings.AUTH_LDAP_GROUP_TYPE
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
|
||||
group_type_params = {'name_attr': 'cn', 'member_attr': 'member'}
|
||||
qs = Setting.objects.filter(key='AUTH_LDAP_GROUP_TYPE_PARAMS')
|
||||
entry = None
|
||||
if qs.exists():
|
||||
entry = qs[0]
|
||||
group_type_params = entry.value
|
||||
else:
|
||||
entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS',
|
||||
value=group_type_params,
|
||||
created=now(),
|
||||
modified=now())
|
||||
|
||||
init_attrs = set(inspect.getargspec(group_type.__init__).args[1:])
|
||||
for k in group_type_params.keys():
|
||||
if k not in init_attrs:
|
||||
del group_type_params[k]
|
||||
|
||||
entry.value = group_type_params
|
||||
entry.save()
|
||||
@@ -1,7 +1,6 @@
|
||||
import base64
|
||||
import hashlib
|
||||
|
||||
import six
|
||||
from django.utils.encoding import smart_str
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
@@ -91,7 +90,7 @@ def encrypt_field(instance, field_name, ask=False, subfield=None, skip_utf8=Fals
|
||||
if skip_utf8:
|
||||
utf8 = False
|
||||
else:
|
||||
utf8 = type(value) == six.text_type
|
||||
utf8 = type(value) == str
|
||||
value = smart_str(value)
|
||||
key = get_encryption_key(field_name, getattr(instance, 'pk', None))
|
||||
encryptor = Cipher(AES(key), ECB(), default_backend()).encryptor()
|
||||
|
||||
@@ -33,7 +33,7 @@ class Setting(CreatedModifiedModel):
|
||||
on_delete=models.CASCADE,
|
||||
))
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
try:
|
||||
json_value = json.dumps(self.value)
|
||||
except ValueError:
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
# Django REST Framework
|
||||
from rest_framework import serializers
|
||||
|
||||
import six
|
||||
|
||||
# Tower
|
||||
from awx.api.fields import VerbatimField
|
||||
from awx.api.serializers import BaseSerializer
|
||||
@@ -47,12 +45,12 @@ class SettingFieldMixin(object):
|
||||
"""Mixin to use a registered setting field class for API display/validation."""
|
||||
|
||||
def to_representation(self, obj):
|
||||
if getattr(self, 'encrypted', False) and isinstance(obj, six.string_types) and obj:
|
||||
if getattr(self, 'encrypted', False) and isinstance(obj, str) and obj:
|
||||
return '$encrypted$'
|
||||
return obj
|
||||
|
||||
def to_internal_value(self, value):
|
||||
if getattr(self, 'encrypted', False) and isinstance(value, six.string_types) and value.startswith('$encrypted$'):
|
||||
if getattr(self, 'encrypted', False) and isinstance(value, str) and value.startswith('$encrypted$'):
|
||||
raise serializers.SkipField()
|
||||
obj = super(SettingFieldMixin, self).to_internal_value(value)
|
||||
return super(SettingFieldMixin, self).to_representation(obj)
|
||||
|
||||
@@ -6,18 +6,17 @@ import re
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import StringIO
|
||||
import traceback
|
||||
import urllib
|
||||
|
||||
import six
|
||||
import urllib.parse
|
||||
from io import StringIO
|
||||
|
||||
# Django
|
||||
from django.conf import LazySettings
|
||||
from django.conf import settings, UserSettingsHolder
|
||||
from django.core.cache import cache as django_cache
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import ProgrammingError, OperationalError, transaction, connection
|
||||
from django.db import transaction, connection
|
||||
from django.db.utils import Error as DBError
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
# Django REST Framework
|
||||
@@ -67,7 +66,7 @@ def normalize_broker_url(value):
|
||||
match = re.search('(amqp://[^:]+:)(.*)', parts[0])
|
||||
if match:
|
||||
prefix, password = match.group(1), match.group(2)
|
||||
parts[0] = prefix + urllib.quote(password)
|
||||
parts[0] = prefix + urllib.parse.quote(password)
|
||||
return '@'.join(parts)
|
||||
|
||||
|
||||
@@ -90,21 +89,21 @@ def _ctit_db_wrapper(trans_safe=False):
|
||||
logger.debug('Obtaining database settings in spite of broken transaction.')
|
||||
transaction.set_rollback(False)
|
||||
yield
|
||||
except (ProgrammingError, OperationalError):
|
||||
except DBError:
|
||||
if 'migrate' in sys.argv and get_tower_migration_version() < '310':
|
||||
logger.info('Using default settings until version 3.1 migration.')
|
||||
else:
|
||||
# We want the _full_ traceback with the context
|
||||
# First we get the current call stack, which constitutes the "top",
|
||||
# it has the context up to the point where the context manager is used
|
||||
top_stack = StringIO.StringIO()
|
||||
top_stack = StringIO()
|
||||
traceback.print_stack(file=top_stack)
|
||||
top_lines = top_stack.getvalue().strip('\n').split('\n')
|
||||
top_stack.close()
|
||||
# Get "bottom" stack from the local error that happened
|
||||
# inside of the "with" block this wraps
|
||||
exc_type, exc_value, exc_traceback = sys.exc_info()
|
||||
bottom_stack = StringIO.StringIO()
|
||||
bottom_stack = StringIO()
|
||||
traceback.print_tb(exc_traceback, file=bottom_stack)
|
||||
bottom_lines = bottom_stack.getvalue().strip('\n').split('\n')
|
||||
# Glue together top and bottom where overlap is found
|
||||
@@ -168,15 +167,6 @@ class EncryptedCacheProxy(object):
|
||||
def get(self, key, **kwargs):
|
||||
value = self.cache.get(key, **kwargs)
|
||||
value = self._handle_encryption(self.decrypter, key, value)
|
||||
|
||||
# python-memcached auto-encodes unicode on cache set in python2
|
||||
# https://github.com/linsomniac/python-memcached/issues/79
|
||||
# https://github.com/linsomniac/python-memcached/blob/288c159720eebcdf667727a859ef341f1e908308/memcache.py#L961
|
||||
if six.PY2 and isinstance(value, six.binary_type):
|
||||
try:
|
||||
six.text_type(value)
|
||||
except UnicodeDecodeError:
|
||||
value = value.decode('utf-8')
|
||||
logger.debug('cache get(%r, %r) -> %r', key, empty, filter_sensitive(self.registry, key, value))
|
||||
return value
|
||||
|
||||
@@ -309,7 +299,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
self.__dict__['_awx_conf_preload_expires'] = time.time() + SETTING_CACHE_TIMEOUT
|
||||
# Check for any settings that have been defined in Python files and
|
||||
# make those read-only to avoid overriding in the database.
|
||||
if not self._awx_conf_init_readonly and 'migrate_to_database_settings' not in sys.argv:
|
||||
if not self._awx_conf_init_readonly:
|
||||
defaults_snapshot = self._get_default('DEFAULTS_SNAPSHOT')
|
||||
for key in get_writeable_settings(self.registry):
|
||||
init_default = defaults_snapshot.get(key, None)
|
||||
|
||||
@@ -9,15 +9,11 @@ from django.core.cache import cache
|
||||
from django.dispatch import receiver
|
||||
|
||||
# Tower
|
||||
import awx.main.signals
|
||||
from awx.conf import settings_registry
|
||||
from awx.conf.models import Setting
|
||||
from awx.conf.serializers import SettingSerializer
|
||||
|
||||
logger = logging.getLogger('awx.conf.signals')
|
||||
|
||||
awx.main.signals.model_serializer_mapping[Setting] = SettingSerializer
|
||||
|
||||
__all__ = []
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import urllib.parse
|
||||
|
||||
import pytest
|
||||
|
||||
from django.core.urlresolvers import resolve
|
||||
from django.utils.six.moves.urllib.parse import urlparse
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
from rest_framework.test import (
|
||||
@@ -33,7 +34,7 @@ def admin():
|
||||
@pytest.fixture
|
||||
def api_request(admin):
|
||||
def rf(verb, url, data=None, user=admin):
|
||||
view, view_args, view_kwargs = resolve(urlparse(url)[2])
|
||||
view, view_args, view_kwargs = resolve(urllib.parse.urlparse(url)[2])
|
||||
request = getattr(APIRequestFactory(), verb)(url, data=data, format='json')
|
||||
if user:
|
||||
force_authenticate(request, user=user)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import pytest
|
||||
import mock
|
||||
from unittest import mock
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
import pytest
|
||||
import mock
|
||||
|
||||
from django.apps import apps
|
||||
from awx.conf.migrations._reencrypt import (
|
||||
replace_aesecb_fernet,
|
||||
encrypt_field,
|
||||
decrypt_field,
|
||||
)
|
||||
from awx.conf.settings import Setting
|
||||
from awx.main.utils import decrypt_field as new_decrypt_field
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize("old_enc, new_enc, value", [
|
||||
('$encrypted$UTF8$AES', '$encrypted$UTF8$AESCBC$', u'Iñtërnâtiônàlizætiøn'),
|
||||
('$encrypted$AES$', '$encrypted$AESCBC$', 'test'),
|
||||
])
|
||||
def test_settings(old_enc, new_enc, value):
|
||||
with mock.patch('awx.conf.models.encrypt_field', encrypt_field):
|
||||
with mock.patch('awx.conf.settings.decrypt_field', decrypt_field):
|
||||
setting = Setting.objects.create(key='SOCIAL_AUTH_GITHUB_SECRET', value=value)
|
||||
assert setting.value.startswith(old_enc)
|
||||
|
||||
replace_aesecb_fernet(apps, None)
|
||||
setting.refresh_from_db()
|
||||
|
||||
assert setting.value.startswith(new_enc)
|
||||
assert new_decrypt_field(setting, 'value') == value
|
||||
|
||||
# This is here for a side-effect.
|
||||
# Exception if the encryption type of AESCBC is not properly skipped, ensures
|
||||
# our `startswith` calls don't have typos
|
||||
replace_aesecb_fernet(apps, None)
|
||||
@@ -1,7 +1,7 @@
|
||||
import pytest
|
||||
|
||||
from rest_framework.fields import ValidationError
|
||||
from awx.conf.fields import StringListBooleanField, ListTuplesField
|
||||
from awx.conf.fields import StringListBooleanField, StringListPathField, ListTuplesField
|
||||
|
||||
|
||||
class TestStringListBooleanField():
|
||||
@@ -84,3 +84,49 @@ class TestListTuplesField():
|
||||
assert e.value.detail[0] == "Expected a list of tuples of max length 2 " \
|
||||
"but got {} instead.".format(t)
|
||||
|
||||
|
||||
class TestStringListPathField():
|
||||
|
||||
FIELD_VALUES = [
|
||||
((".", "..", "/"), [".", "..", "/"]),
|
||||
(("/home",), ["/home"]),
|
||||
(("///home///",), ["/home"]),
|
||||
(("/home/././././",), ["/home"]),
|
||||
(("/home", "/home", "/home/"), ["/home"]),
|
||||
(["/home/", "/home/", "/opt/", "/opt/", "/var/"], ["/home", "/opt", "/var"])
|
||||
]
|
||||
|
||||
FIELD_VALUES_INVALID_TYPE = [
|
||||
1.245,
|
||||
{"a": "b"},
|
||||
("/home"),
|
||||
]
|
||||
|
||||
FIELD_VALUES_INVALID_PATH = [
|
||||
"",
|
||||
"~/",
|
||||
"home",
|
||||
"/invalid_path",
|
||||
"/home/invalid_path",
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_internal_value_valid(self, value_in, value_known):
|
||||
field = StringListPathField()
|
||||
v = field.to_internal_value(value_in)
|
||||
assert v == value_known
|
||||
|
||||
@pytest.mark.parametrize("value", FIELD_VALUES_INVALID_TYPE)
|
||||
def test_to_internal_value_invalid_type(self, value):
|
||||
field = StringListPathField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_internal_value(value)
|
||||
assert e.value.detail[0] == "Expected list of strings but got {} instead.".format(type(value))
|
||||
|
||||
@pytest.mark.parametrize("value", FIELD_VALUES_INVALID_PATH)
|
||||
def test_to_internal_value_invalid_path(self, value):
|
||||
field = StringListPathField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_internal_value([value])
|
||||
assert e.value.detail[0] == "{} is not a valid path choice.".format(value)
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
from contextlib import contextmanager
|
||||
import codecs
|
||||
from uuid import uuid4
|
||||
import time
|
||||
|
||||
@@ -12,7 +13,6 @@ from django.core.cache.backends.locmem import LocMemCache
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
import pytest
|
||||
import six
|
||||
|
||||
from awx.conf import models, fields
|
||||
from awx.conf.settings import SettingsWrapper, EncryptedCacheProxy, SETTING_CACHE_NOTSET
|
||||
@@ -67,9 +67,9 @@ def test_cached_settings_unicode_is_auto_decoded(settings):
|
||||
# https://github.com/linsomniac/python-memcached/issues/79
|
||||
# https://github.com/linsomniac/python-memcached/blob/288c159720eebcdf667727a859ef341f1e908308/memcache.py#L961
|
||||
|
||||
value = six.u('Iñtërnâtiônàlizætiøn').encode('utf-8') # this simulates what python-memcached does on cache.set()
|
||||
value = 'Iñtërnâtiônàlizætiøn' # this simulates what python-memcached does on cache.set()
|
||||
settings.cache.set('DEBUG', value)
|
||||
assert settings.cache.get('DEBUG') == six.u('Iñtërnâtiônàlizætiøn')
|
||||
assert settings.cache.get('DEBUG') == 'Iñtërnâtiônàlizætiøn'
|
||||
|
||||
|
||||
def test_read_only_setting(settings):
|
||||
@@ -262,7 +262,7 @@ def test_setting_from_db_with_unicode(settings, mocker, encrypted):
|
||||
encrypted=encrypted
|
||||
)
|
||||
# this simulates a bug in python-memcached; see https://github.com/linsomniac/python-memcached/issues/79
|
||||
value = six.u('Iñtërnâtiônàlizætiøn').encode('utf-8')
|
||||
value = 'Iñtërnâtiônàlizætiøn'
|
||||
|
||||
setting_from_db = mocker.Mock(id=1, key='AWX_SOME_SETTING', value=value)
|
||||
mocks = mocker.Mock(**{
|
||||
@@ -272,8 +272,8 @@ def test_setting_from_db_with_unicode(settings, mocker, encrypted):
|
||||
}),
|
||||
})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
assert settings.AWX_SOME_SETTING == six.u('Iñtërnâtiônàlizætiøn')
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == six.u('Iñtërnâtiônàlizætiøn')
|
||||
assert settings.AWX_SOME_SETTING == 'Iñtërnâtiônàlizætiøn'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'Iñtërnâtiônàlizætiøn'
|
||||
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
@@ -434,7 +434,7 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
|
||||
|
||||
def rot13(obj, attribute):
|
||||
assert obj.pk == 123
|
||||
return getattr(obj, attribute).encode('rot13')
|
||||
return codecs.encode(getattr(obj, attribute), 'rot_13')
|
||||
|
||||
native_cache = LocMemCache(str(uuid4()), {})
|
||||
cache = EncryptedCacheProxy(
|
||||
@@ -471,7 +471,7 @@ def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
||||
|
||||
def rot13(obj, attribute):
|
||||
assert obj.pk is None
|
||||
return getattr(obj, attribute).encode('rot13')
|
||||
return codecs.encode(getattr(obj, attribute), 'rot_13')
|
||||
|
||||
native_cache = LocMemCache(str(uuid4()), {})
|
||||
cache = EncryptedCacheProxy(
|
||||
|
||||
@@ -1,110 +1,9 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Python
|
||||
import difflib
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import six
|
||||
|
||||
# AWX
|
||||
from awx.conf.registry import settings_registry
|
||||
|
||||
__all__ = ['comment_assignments', 'conf_to_dict']
|
||||
|
||||
|
||||
def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix='.old'):
|
||||
if isinstance(patterns, six.string_types):
|
||||
patterns = [patterns]
|
||||
diffs = []
|
||||
for pattern in patterns:
|
||||
for filename in sorted(glob.glob(pattern)):
|
||||
filename = os.path.abspath(os.path.normpath(filename))
|
||||
if backup_suffix:
|
||||
backup_filename = '{}{}'.format(filename, backup_suffix)
|
||||
else:
|
||||
backup_filename = None
|
||||
diff = comment_assignments_in_file(filename, assignment_names, dry_run, backup_filename)
|
||||
if diff:
|
||||
diffs.append(diff)
|
||||
return diffs
|
||||
|
||||
|
||||
def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup_filename=None):
|
||||
from redbaron import RedBaron, indent
|
||||
|
||||
if isinstance(assignment_names, six.string_types):
|
||||
assignment_names = [assignment_names]
|
||||
else:
|
||||
assignment_names = assignment_names[:]
|
||||
current_file_data = open(filename).read()
|
||||
|
||||
for assignment_name in assignment_names[:]:
|
||||
if assignment_name in current_file_data:
|
||||
continue
|
||||
if assignment_name in assignment_names:
|
||||
assignment_names.remove(assignment_name)
|
||||
if not assignment_names:
|
||||
return ''
|
||||
|
||||
replace_lines = {}
|
||||
rb = RedBaron(current_file_data)
|
||||
for assignment_node in rb.find_all('assignment'):
|
||||
for assignment_name in assignment_names:
|
||||
|
||||
# Only target direct assignments to a variable.
|
||||
name_node = assignment_node.find('name', value=assignment_name)
|
||||
if not name_node:
|
||||
continue
|
||||
if assignment_node.target.type != 'name':
|
||||
continue
|
||||
|
||||
# Build a new node that comments out the existing assignment node.
|
||||
indentation = '{}# '.format(assignment_node.indentation or '')
|
||||
new_node_content = indent(assignment_node.dumps(), indentation)
|
||||
new_node_lines = new_node_content.splitlines()
|
||||
# Add a pass statement in case the assignment block is the only
|
||||
# child in a parent code block to prevent a syntax error.
|
||||
if assignment_node.indentation:
|
||||
new_node_lines[0] = new_node_lines[0].replace(indentation, '{}pass # '.format(assignment_node.indentation or ''), 1)
|
||||
new_node_lines[0] = '{0}This setting is now configured via the Tower API.\n{1}'.format(indentation, new_node_lines[0])
|
||||
|
||||
# Store new node lines in dictionary to be replaced in file.
|
||||
start_lineno = assignment_node.absolute_bounding_box.top_left.line
|
||||
end_lineno = assignment_node.absolute_bounding_box.bottom_right.line
|
||||
for n, new_node_line in enumerate(new_node_lines):
|
||||
new_lineno = start_lineno + n
|
||||
assert new_lineno <= end_lineno
|
||||
replace_lines[new_lineno] = new_node_line
|
||||
|
||||
if not replace_lines:
|
||||
return ''
|
||||
|
||||
# Iterate through all lines in current file and replace as needed.
|
||||
current_file_lines = current_file_data.splitlines()
|
||||
new_file_lines = []
|
||||
for n, line in enumerate(current_file_lines):
|
||||
new_file_lines.append(replace_lines.get(n + 1, line))
|
||||
new_file_data = '\n'.join(new_file_lines)
|
||||
new_file_lines = new_file_data.splitlines()
|
||||
|
||||
# If changed, syntax check and write the new file; return a diff of changes.
|
||||
diff_lines = []
|
||||
if new_file_data != current_file_data:
|
||||
compile(new_file_data, filename, 'exec')
|
||||
if backup_filename:
|
||||
from_file = backup_filename
|
||||
else:
|
||||
from_file = '{}.old'.format(filename)
|
||||
to_file = filename
|
||||
diff_lines = list(difflib.unified_diff(current_file_lines, new_file_lines, fromfile=from_file, tofile=to_file, lineterm=''))
|
||||
if not dry_run:
|
||||
if backup_filename:
|
||||
shutil.copy2(filename, backup_filename)
|
||||
with open(filename, 'wb') as fileobj:
|
||||
fileobj.write(new_file_data)
|
||||
return '\n'.join(diff_lines)
|
||||
__all__ = ['conf_to_dict']
|
||||
|
||||
|
||||
def conf_to_dict(obj):
|
||||
@@ -112,10 +11,3 @@ def conf_to_dict(obj):
|
||||
'category': settings_registry.get_setting_category(obj.key),
|
||||
'name': obj.key,
|
||||
}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
pattern = os.path.join(os.path.dirname(__file__), '..', 'settings', 'local_*.py')
|
||||
diffs = comment_assignments(pattern, ['AUTH_LDAP_ORGANIZATION_MAP'])
|
||||
for diff in diffs:
|
||||
print(diff)
|
||||
|
||||
@@ -17,10 +17,15 @@ from rest_framework import serializers
|
||||
from rest_framework import status
|
||||
|
||||
# Tower
|
||||
from awx.api.generics import * # noqa
|
||||
from awx.api.generics import (
|
||||
APIView,
|
||||
GenericAPIView,
|
||||
ListAPIView,
|
||||
RetrieveUpdateDestroyAPIView,
|
||||
)
|
||||
from awx.api.permissions import IsSuperUser
|
||||
from awx.api.versioning import reverse, get_request_version
|
||||
from awx.main.utils import * # noqa
|
||||
from awx.main.utils import camelcase_to_underscore
|
||||
from awx.main.utils.handlers import AWXProxyHandler, LoggingConnectivityException
|
||||
from awx.main.tasks import handle_setting_changes
|
||||
from awx.conf.license import get_licensed_features
|
||||
@@ -72,7 +77,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
def get_queryset(self):
|
||||
self.category_slug = self.kwargs.get('category_slug', 'all')
|
||||
all_category_slugs = settings_registry.get_registered_categories(features_enabled=get_licensed_features()).keys()
|
||||
all_category_slugs = list(settings_registry.get_registered_categories(features_enabled=get_licensed_features()).keys())
|
||||
for slug_to_delete in VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE[get_request_version(self.request)]:
|
||||
all_category_slugs.remove(slug_to_delete)
|
||||
if self.request.user.is_superuser or getattr(self.request.user, 'is_system_auditor', False):
|
||||
@@ -123,7 +128,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
if key == 'LICENSE' or settings_registry.is_setting_read_only(key):
|
||||
continue
|
||||
if settings_registry.is_setting_encrypted(key) and \
|
||||
isinstance(value, basestring) and \
|
||||
isinstance(value, str) and \
|
||||
value.startswith('$encrypted$'):
|
||||
continue
|
||||
setattr(serializer.instance, key, value)
|
||||
@@ -135,7 +140,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
setting.value = value
|
||||
setting.save(update_fields=['value'])
|
||||
settings_change_list.append(key)
|
||||
if settings_change_list and 'migrate_to_database_settings' not in sys.argv:
|
||||
if settings_change_list:
|
||||
handle_setting_changes.delay(settings_change_list)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
@@ -150,7 +155,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
continue
|
||||
setting.delete()
|
||||
settings_change_list.append(setting.key)
|
||||
if settings_change_list and 'migrate_to_database_settings' not in sys.argv:
|
||||
if settings_change_list:
|
||||
handle_setting_changes.delay(settings_change_list)
|
||||
|
||||
# When TOWER_URL_BASE is deleted from the API, reset it to the hostname
|
||||
@@ -210,7 +215,7 @@ class SettingLoggingTest(GenericAPIView):
|
||||
# in URL patterns and reverse URL lookups, converting CamelCase names to
|
||||
# lowercase_with_underscore (e.g. MyView.as_view() becomes my_view).
|
||||
this_module = sys.modules[__name__]
|
||||
for attr, value in locals().items():
|
||||
for attr, value in list(locals().items()):
|
||||
if isinstance(value, type) and issubclass(value, APIView):
|
||||
name = camelcase_to_underscore(attr)
|
||||
view = value.as_view()
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
# Copyright (c) 2016 Ansible by Red Hat, Inc.
|
||||
#
|
||||
# This file is part of Ansible Tower, but depends on code imported from Ansible.
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
# AWX Display Callback
|
||||
from . import cleanup # noqa (registers control persistent cleanup)
|
||||
from . import display # noqa (wraps ansible.display.Display methods)
|
||||
from .module import AWXDefaultCallbackModule, AWXMinimalCallbackModule
|
||||
|
||||
__all__ = ['AWXDefaultCallbackModule', 'AWXMinimalCallbackModule']
|
||||
@@ -1,85 +0,0 @@
|
||||
# Copyright (c) 2016 Ansible by Red Hat, Inc.
|
||||
#
|
||||
# This file is part of Ansible Tower, but depends on code imported from Ansible.
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
# Python
|
||||
import atexit
|
||||
import glob
|
||||
import os
|
||||
import pwd
|
||||
|
||||
# PSUtil
|
||||
try:
|
||||
import psutil
|
||||
except ImportError:
|
||||
raise ImportError('psutil is missing; {}bin/pip install psutil'.format(
|
||||
os.environ['VIRTUAL_ENV']
|
||||
))
|
||||
|
||||
__all__ = []
|
||||
|
||||
main_pid = os.getpid()
|
||||
|
||||
|
||||
@atexit.register
|
||||
def terminate_ssh_control_masters():
|
||||
# Only run this cleanup from the main process.
|
||||
if os.getpid() != main_pid:
|
||||
return
|
||||
# Determine if control persist is being used and if any open sockets
|
||||
# exist after running the playbook.
|
||||
cp_path = os.environ.get('ANSIBLE_SSH_CONTROL_PATH', '')
|
||||
if not cp_path:
|
||||
return
|
||||
cp_dir = os.path.dirname(cp_path)
|
||||
if not os.path.exists(cp_dir):
|
||||
return
|
||||
cp_pattern = os.path.join(cp_dir, 'ansible-ssh-*')
|
||||
cp_files = glob.glob(cp_pattern)
|
||||
if not cp_files:
|
||||
return
|
||||
|
||||
# Attempt to find any running control master processes.
|
||||
username = pwd.getpwuid(os.getuid())[0]
|
||||
ssh_cm_procs = []
|
||||
for proc in psutil.process_iter():
|
||||
try:
|
||||
pname = proc.name()
|
||||
pcmdline = proc.cmdline()
|
||||
pusername = proc.username()
|
||||
except psutil.NoSuchProcess:
|
||||
continue
|
||||
if pusername != username:
|
||||
continue
|
||||
if pname != 'ssh':
|
||||
continue
|
||||
for cp_file in cp_files:
|
||||
if pcmdline and cp_file in pcmdline[0]:
|
||||
ssh_cm_procs.append(proc)
|
||||
break
|
||||
|
||||
# Terminate then kill control master processes. Workaround older
|
||||
# version of psutil that may not have wait_procs implemented.
|
||||
for proc in ssh_cm_procs:
|
||||
try:
|
||||
proc.terminate()
|
||||
except psutil.NoSuchProcess:
|
||||
continue
|
||||
procs_gone, procs_alive = psutil.wait_procs(ssh_cm_procs, timeout=5)
|
||||
for proc in procs_alive:
|
||||
proc.kill()
|
||||
@@ -1,98 +0,0 @@
|
||||
# Copyright (c) 2016 Ansible by Red Hat, Inc.
|
||||
#
|
||||
# This file is part of Ansible Tower, but depends on code imported from Ansible.
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
# Python
|
||||
import functools
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
# Ansible
|
||||
from ansible.utils.display import Display
|
||||
|
||||
# Tower Display Callback
|
||||
from .events import event_context
|
||||
|
||||
__all__ = []
|
||||
|
||||
|
||||
def with_context(**context):
|
||||
global event_context
|
||||
|
||||
def wrap(f):
|
||||
@functools.wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
with event_context.set_local(**context):
|
||||
return f(*args, **kwargs)
|
||||
return wrapper
|
||||
return wrap
|
||||
|
||||
|
||||
for attr in dir(Display):
|
||||
if attr.startswith('_') or 'cow' in attr or 'prompt' in attr:
|
||||
continue
|
||||
if attr in ('display', 'v', 'vv', 'vvv', 'vvvv', 'vvvvv', 'vvvvvv', 'verbose'):
|
||||
continue
|
||||
if not callable(getattr(Display, attr)):
|
||||
continue
|
||||
setattr(Display, attr, with_context(**{attr: True})(getattr(Display, attr)))
|
||||
|
||||
|
||||
def with_verbosity(f):
|
||||
global event_context
|
||||
|
||||
@functools.wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
host = args[2] if len(args) >= 3 else kwargs.get('host', None)
|
||||
caplevel = args[3] if len(args) >= 4 else kwargs.get('caplevel', 2)
|
||||
context = dict(verbose=True, verbosity=(caplevel + 1))
|
||||
if host is not None:
|
||||
context['remote_addr'] = host
|
||||
with event_context.set_local(**context):
|
||||
return f(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
Display.verbose = with_verbosity(Display.verbose)
|
||||
|
||||
|
||||
def display_with_context(f):
|
||||
|
||||
@functools.wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
log_only = args[5] if len(args) >= 6 else kwargs.get('log_only', False)
|
||||
stderr = args[3] if len(args) >= 4 else kwargs.get('stderr', False)
|
||||
event_uuid = event_context.get().get('uuid', None)
|
||||
with event_context.display_lock:
|
||||
# If writing only to a log file or there is already an event UUID
|
||||
# set (from a callback module method), skip dumping the event data.
|
||||
if log_only or event_uuid:
|
||||
return f(*args, **kwargs)
|
||||
try:
|
||||
fileobj = sys.stderr if stderr else sys.stdout
|
||||
event_context.add_local(uuid=str(uuid.uuid4()))
|
||||
event_context.dump_begin(fileobj)
|
||||
return f(*args, **kwargs)
|
||||
finally:
|
||||
event_context.dump_end(fileobj)
|
||||
event_context.remove_local(uuid=None)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
Display.display = display_with_context(Display.display)
|
||||
@@ -1,189 +0,0 @@
|
||||
# Copyright (c) 2016 Ansible by Red Hat, Inc.
|
||||
#
|
||||
# This file is part of Ansible Tower, but depends on code imported from Ansible.
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
# Python
|
||||
import base64
|
||||
import contextlib
|
||||
import datetime
|
||||
import json
|
||||
import multiprocessing
|
||||
import os
|
||||
import stat
|
||||
import threading
|
||||
import uuid
|
||||
|
||||
try:
|
||||
import memcache
|
||||
except ImportError:
|
||||
raise ImportError('python-memcached is missing; {}bin/pip install python-memcached'.format(
|
||||
os.environ['VIRTUAL_ENV']
|
||||
))
|
||||
|
||||
from six.moves import xrange
|
||||
|
||||
__all__ = ['event_context']
|
||||
|
||||
|
||||
class IsolatedFileWrite:
|
||||
'''
|
||||
Stand-in class that will write partial event data to a file as a
|
||||
replacement for memcache when a job is running on an isolated host.
|
||||
'''
|
||||
|
||||
def __init__(self):
|
||||
self.private_data_dir = os.getenv('AWX_ISOLATED_DATA_DIR')
|
||||
|
||||
def set(self, key, value):
|
||||
# Strip off the leading memcache key identifying characters :1:ev-
|
||||
event_uuid = key[len(':1:ev-'):]
|
||||
# Write data in a staging area and then atomic move to pickup directory
|
||||
filename = '{}-partial.json'.format(event_uuid)
|
||||
dropoff_location = os.path.join(self.private_data_dir, 'artifacts', 'job_events', filename)
|
||||
write_location = '.'.join([dropoff_location, 'tmp'])
|
||||
partial_data = json.dumps(value)
|
||||
with os.fdopen(os.open(write_location, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f:
|
||||
f.write(partial_data)
|
||||
os.rename(write_location, dropoff_location)
|
||||
|
||||
|
||||
class EventContext(object):
|
||||
'''
|
||||
Store global and local (per thread/process) data associated with callback
|
||||
events and other display output methods.
|
||||
'''
|
||||
|
||||
def __init__(self):
|
||||
self.display_lock = multiprocessing.RLock()
|
||||
cache_actual = os.getenv('CACHE', '127.0.0.1:11211')
|
||||
if os.getenv('AWX_ISOLATED_DATA_DIR', False):
|
||||
self.cache = IsolatedFileWrite()
|
||||
else:
|
||||
self.cache = memcache.Client([cache_actual], debug=0)
|
||||
|
||||
def add_local(self, **kwargs):
|
||||
if not hasattr(self, '_local'):
|
||||
self._local = threading.local()
|
||||
self._local._ctx = {}
|
||||
self._local._ctx.update(kwargs)
|
||||
|
||||
def remove_local(self, **kwargs):
|
||||
if hasattr(self, '_local'):
|
||||
for key in kwargs.keys():
|
||||
self._local._ctx.pop(key, None)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def set_local(self, **kwargs):
|
||||
try:
|
||||
self.add_local(**kwargs)
|
||||
yield
|
||||
finally:
|
||||
self.remove_local(**kwargs)
|
||||
|
||||
def get_local(self):
|
||||
return getattr(getattr(self, '_local', None), '_ctx', {})
|
||||
|
||||
def add_global(self, **kwargs):
|
||||
if not hasattr(self, '_global_ctx'):
|
||||
self._global_ctx = {}
|
||||
self._global_ctx.update(kwargs)
|
||||
|
||||
def remove_global(self, **kwargs):
|
||||
if hasattr(self, '_global_ctx'):
|
||||
for key in kwargs.keys():
|
||||
self._global_ctx.pop(key, None)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def set_global(self, **kwargs):
|
||||
try:
|
||||
self.add_global(**kwargs)
|
||||
yield
|
||||
finally:
|
||||
self.remove_global(**kwargs)
|
||||
|
||||
def get_global(self):
|
||||
return getattr(self, '_global_ctx', {})
|
||||
|
||||
def get(self):
|
||||
ctx = {}
|
||||
ctx.update(self.get_global())
|
||||
ctx.update(self.get_local())
|
||||
return ctx
|
||||
|
||||
def get_begin_dict(self):
|
||||
event_data = self.get()
|
||||
if os.getenv('JOB_ID', ''):
|
||||
event_data['job_id'] = int(os.getenv('JOB_ID', '0'))
|
||||
if os.getenv('AD_HOC_COMMAND_ID', ''):
|
||||
event_data['ad_hoc_command_id'] = int(os.getenv('AD_HOC_COMMAND_ID', '0'))
|
||||
if os.getenv('PROJECT_UPDATE_ID', ''):
|
||||
event_data['project_update_id'] = int(os.getenv('PROJECT_UPDATE_ID', '0'))
|
||||
event_data.setdefault('pid', os.getpid())
|
||||
event_data.setdefault('uuid', str(uuid.uuid4()))
|
||||
event_data.setdefault('created', datetime.datetime.utcnow().isoformat())
|
||||
if not event_data.get('parent_uuid', None) and event_data.get('job_id', None):
|
||||
for key in ('task_uuid', 'play_uuid', 'playbook_uuid'):
|
||||
parent_uuid = event_data.get(key, None)
|
||||
if parent_uuid and parent_uuid != event_data.get('uuid', None):
|
||||
event_data['parent_uuid'] = parent_uuid
|
||||
break
|
||||
|
||||
event = event_data.pop('event', None)
|
||||
if not event:
|
||||
event = 'verbose'
|
||||
for key in ('debug', 'verbose', 'deprecated', 'warning', 'system_warning', 'error'):
|
||||
if event_data.get(key, False):
|
||||
event = key
|
||||
break
|
||||
max_res = int(os.getenv("MAX_EVENT_RES", 700000))
|
||||
if event not in ('playbook_on_stats',) and "res" in event_data and len(str(event_data['res'])) > max_res:
|
||||
event_data['res'] = {}
|
||||
event_dict = dict(event=event, event_data=event_data)
|
||||
for key in event_data.keys():
|
||||
if key in ('job_id', 'ad_hoc_command_id', 'project_update_id', 'uuid', 'parent_uuid', 'created',):
|
||||
event_dict[key] = event_data.pop(key)
|
||||
elif key in ('verbosity', 'pid'):
|
||||
event_dict[key] = event_data[key]
|
||||
return event_dict
|
||||
|
||||
def get_end_dict(self):
|
||||
return {}
|
||||
|
||||
def dump(self, fileobj, data, max_width=78, flush=False):
|
||||
b64data = base64.b64encode(json.dumps(data))
|
||||
with self.display_lock:
|
||||
# pattern corresponding to OutputEventFilter expectation
|
||||
fileobj.write(u'\x1b[K')
|
||||
for offset in xrange(0, len(b64data), max_width):
|
||||
chunk = b64data[offset:offset + max_width]
|
||||
escaped_chunk = u'{}\x1b[{}D'.format(chunk, len(chunk))
|
||||
fileobj.write(escaped_chunk)
|
||||
fileobj.write(u'\x1b[K')
|
||||
if flush:
|
||||
fileobj.flush()
|
||||
|
||||
def dump_begin(self, fileobj):
|
||||
begin_dict = self.get_begin_dict()
|
||||
self.cache.set(":1:ev-{}".format(begin_dict['uuid']), begin_dict)
|
||||
self.dump(fileobj, {'uuid': begin_dict['uuid']})
|
||||
|
||||
def dump_end(self, fileobj):
|
||||
self.dump(fileobj, self.get_end_dict(), flush=True)
|
||||
|
||||
|
||||
event_context = EventContext()
|
||||
@@ -1,29 +0,0 @@
|
||||
# Copyright (c) 2016 Ansible by Red Hat, Inc.
|
||||
#
|
||||
# This file is part of Ansible Tower, but depends on code imported from Ansible.
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
# Python
|
||||
import os
|
||||
|
||||
# Ansible
|
||||
import ansible
|
||||
|
||||
# Because of the way Ansible loads plugins, it's not possible to import
|
||||
# ansible.plugins.callback.minimal when being loaded as the minimal plugin. Ugh.
|
||||
with open(os.path.join(os.path.dirname(ansible.__file__), 'plugins', 'callback', 'minimal.py')) as in_file:
|
||||
exec(in_file.read())
|
||||
@@ -1,492 +0,0 @@
|
||||
# Copyright (c) 2016 Ansible by Red Hat, Inc.
|
||||
#
|
||||
# This file is part of Ansible Tower, but depends on code imported from Ansible.
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
# Python
|
||||
import codecs
|
||||
import contextlib
|
||||
import json
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
import uuid
|
||||
from copy import copy
|
||||
|
||||
# Ansible
|
||||
from ansible import constants as C
|
||||
from ansible.plugins.callback import CallbackBase
|
||||
from ansible.plugins.callback.default import CallbackModule as DefaultCallbackModule
|
||||
|
||||
# AWX Display Callback
|
||||
from .events import event_context
|
||||
from .minimal import CallbackModule as MinimalCallbackModule
|
||||
|
||||
CENSORED = "the output has been hidden due to the fact that 'no_log: true' was specified for this result" # noqa
|
||||
|
||||
|
||||
class BaseCallbackModule(CallbackBase):
|
||||
'''
|
||||
Callback module for logging ansible/ansible-playbook events.
|
||||
'''
|
||||
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'stdout'
|
||||
|
||||
# These events should never have an associated play.
|
||||
EVENTS_WITHOUT_PLAY = [
|
||||
'playbook_on_start',
|
||||
'playbook_on_stats',
|
||||
]
|
||||
|
||||
# These events should never have an associated task.
|
||||
EVENTS_WITHOUT_TASK = EVENTS_WITHOUT_PLAY + [
|
||||
'playbook_on_setup',
|
||||
'playbook_on_notify',
|
||||
'playbook_on_import_for_host',
|
||||
'playbook_on_not_import_for_host',
|
||||
'playbook_on_no_hosts_matched',
|
||||
'playbook_on_no_hosts_remaining',
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
super(BaseCallbackModule, self).__init__()
|
||||
self.task_uuids = set()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def capture_event_data(self, event, **event_data):
|
||||
event_data.setdefault('uuid', str(uuid.uuid4()))
|
||||
|
||||
if event not in self.EVENTS_WITHOUT_TASK:
|
||||
task = event_data.pop('task', None)
|
||||
else:
|
||||
task = None
|
||||
|
||||
if event_data.get('res'):
|
||||
if event_data['res'].get('_ansible_no_log', False):
|
||||
event_data['res'] = {'censored': CENSORED}
|
||||
if event_data['res'].get('results', []):
|
||||
event_data['res']['results'] = copy(event_data['res']['results'])
|
||||
for i, item in enumerate(event_data['res'].get('results', [])):
|
||||
if isinstance(item, dict) and item.get('_ansible_no_log', False):
|
||||
event_data['res']['results'][i] = {'censored': CENSORED}
|
||||
|
||||
with event_context.display_lock:
|
||||
try:
|
||||
event_context.add_local(event=event, **event_data)
|
||||
if task:
|
||||
self.set_task(task, local=True)
|
||||
event_context.dump_begin(sys.stdout)
|
||||
yield
|
||||
finally:
|
||||
event_context.dump_end(sys.stdout)
|
||||
if task:
|
||||
self.clear_task(local=True)
|
||||
event_context.remove_local(event=None, **event_data)
|
||||
|
||||
def set_playbook(self, playbook):
|
||||
# NOTE: Ansible doesn't generate a UUID for playbook_on_start so do it for them.
|
||||
self.playbook_uuid = str(uuid.uuid4())
|
||||
file_name = getattr(playbook, '_file_name', '???')
|
||||
event_context.add_global(playbook=file_name, playbook_uuid=self.playbook_uuid)
|
||||
self.clear_play()
|
||||
|
||||
def set_play(self, play):
|
||||
if hasattr(play, 'hosts'):
|
||||
if isinstance(play.hosts, list):
|
||||
pattern = ','.join(play.hosts)
|
||||
else:
|
||||
pattern = play.hosts
|
||||
else:
|
||||
pattern = ''
|
||||
name = play.get_name().strip() or pattern
|
||||
event_context.add_global(play=name, play_uuid=str(play._uuid), play_pattern=pattern)
|
||||
self.clear_task()
|
||||
|
||||
def clear_play(self):
|
||||
event_context.remove_global(play=None, play_uuid=None, play_pattern=None)
|
||||
self.clear_task()
|
||||
|
||||
def set_task(self, task, local=False):
|
||||
# FIXME: Task is "global" unless using free strategy!
|
||||
task_ctx = dict(
|
||||
task=(task.name or task.action),
|
||||
task_uuid=str(task._uuid),
|
||||
task_action=task.action,
|
||||
task_args='',
|
||||
)
|
||||
try:
|
||||
task_ctx['task_path'] = task.get_path()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if C.DISPLAY_ARGS_TO_STDOUT:
|
||||
if task.no_log:
|
||||
task_ctx['task_args'] = "the output has been hidden due to the fact that 'no_log: true' was specified for this result"
|
||||
else:
|
||||
task_args = ', '.join(('%s=%s' % a for a in task.args.items()))
|
||||
task_ctx['task_args'] = task_args
|
||||
if getattr(task, '_role', None):
|
||||
task_role = task._role._role_name
|
||||
else:
|
||||
task_role = getattr(task, 'role_name', '')
|
||||
if task_role:
|
||||
task_ctx['role'] = task_role
|
||||
if local:
|
||||
event_context.add_local(**task_ctx)
|
||||
else:
|
||||
event_context.add_global(**task_ctx)
|
||||
|
||||
def clear_task(self, local=False):
|
||||
task_ctx = dict(task=None, task_path=None, task_uuid=None, task_action=None, task_args=None, role=None)
|
||||
if local:
|
||||
event_context.remove_local(**task_ctx)
|
||||
else:
|
||||
event_context.remove_global(**task_ctx)
|
||||
|
||||
def v2_playbook_on_start(self, playbook):
|
||||
self.set_playbook(playbook)
|
||||
event_data = dict(
|
||||
uuid=self.playbook_uuid,
|
||||
)
|
||||
with self.capture_event_data('playbook_on_start', **event_data):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_start(playbook)
|
||||
|
||||
def v2_playbook_on_vars_prompt(self, varname, private=True, prompt=None,
|
||||
encrypt=None, confirm=False, salt_size=None,
|
||||
salt=None, default=None):
|
||||
event_data = dict(
|
||||
varname=varname,
|
||||
private=private,
|
||||
prompt=prompt,
|
||||
encrypt=encrypt,
|
||||
confirm=confirm,
|
||||
salt_size=salt_size,
|
||||
salt=salt,
|
||||
default=default,
|
||||
)
|
||||
with self.capture_event_data('playbook_on_vars_prompt', **event_data):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_vars_prompt(
|
||||
varname, private, prompt, encrypt, confirm, salt_size, salt,
|
||||
default,
|
||||
)
|
||||
|
||||
def v2_playbook_on_include(self, included_file):
|
||||
event_data = dict(
|
||||
included_file=included_file._filename if included_file is not None else None,
|
||||
)
|
||||
with self.capture_event_data('playbook_on_include', **event_data):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_include(included_file)
|
||||
|
||||
def v2_playbook_on_play_start(self, play):
|
||||
self.set_play(play)
|
||||
if hasattr(play, 'hosts'):
|
||||
if isinstance(play.hosts, list):
|
||||
pattern = ','.join(play.hosts)
|
||||
else:
|
||||
pattern = play.hosts
|
||||
else:
|
||||
pattern = ''
|
||||
name = play.get_name().strip() or pattern
|
||||
event_data = dict(
|
||||
name=name,
|
||||
pattern=pattern,
|
||||
uuid=str(play._uuid),
|
||||
)
|
||||
with self.capture_event_data('playbook_on_play_start', **event_data):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_play_start(play)
|
||||
|
||||
def v2_playbook_on_import_for_host(self, result, imported_file):
|
||||
# NOTE: Not used by Ansible 2.x.
|
||||
with self.capture_event_data('playbook_on_import_for_host'):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_import_for_host(result, imported_file)
|
||||
|
||||
def v2_playbook_on_not_import_for_host(self, result, missing_file):
|
||||
# NOTE: Not used by Ansible 2.x.
|
||||
with self.capture_event_data('playbook_on_not_import_for_host'):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_not_import_for_host(result, missing_file)
|
||||
|
||||
def v2_playbook_on_setup(self):
|
||||
# NOTE: Not used by Ansible 2.x.
|
||||
with self.capture_event_data('playbook_on_setup'):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_setup()
|
||||
|
||||
def v2_playbook_on_task_start(self, task, is_conditional):
|
||||
# FIXME: Flag task path output as vv.
|
||||
task_uuid = str(task._uuid)
|
||||
if task_uuid in self.task_uuids:
|
||||
# FIXME: When this task UUID repeats, it means the play is using the
|
||||
# free strategy, so different hosts may be running different tasks
|
||||
# within a play.
|
||||
return
|
||||
self.task_uuids.add(task_uuid)
|
||||
self.set_task(task)
|
||||
event_data = dict(
|
||||
task=task,
|
||||
name=task.get_name(),
|
||||
is_conditional=is_conditional,
|
||||
uuid=task_uuid,
|
||||
)
|
||||
with self.capture_event_data('playbook_on_task_start', **event_data):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_task_start(task, is_conditional)
|
||||
|
||||
def v2_playbook_on_cleanup_task_start(self, task):
|
||||
# NOTE: Not used by Ansible 2.x.
|
||||
self.set_task(task)
|
||||
event_data = dict(
|
||||
task=task,
|
||||
name=task.get_name(),
|
||||
uuid=str(task._uuid),
|
||||
is_conditional=True,
|
||||
)
|
||||
with self.capture_event_data('playbook_on_task_start', **event_data):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_cleanup_task_start(task)
|
||||
|
||||
def v2_playbook_on_handler_task_start(self, task):
|
||||
# NOTE: Re-using playbook_on_task_start event for this v2-specific
|
||||
# event, but setting is_conditional=True, which is how v1 identified a
|
||||
# task run as a handler.
|
||||
self.set_task(task)
|
||||
event_data = dict(
|
||||
task=task,
|
||||
name=task.get_name(),
|
||||
uuid=str(task._uuid),
|
||||
is_conditional=True,
|
||||
)
|
||||
with self.capture_event_data('playbook_on_task_start', **event_data):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_handler_task_start(task)
|
||||
|
||||
def v2_playbook_on_no_hosts_matched(self):
|
||||
with self.capture_event_data('playbook_on_no_hosts_matched'):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_no_hosts_matched()
|
||||
|
||||
def v2_playbook_on_no_hosts_remaining(self):
|
||||
with self.capture_event_data('playbook_on_no_hosts_remaining'):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_no_hosts_remaining()
|
||||
|
||||
def v2_playbook_on_notify(self, handler, host):
|
||||
# NOTE: Not used by Ansible < 2.5.
|
||||
event_data = dict(
|
||||
host=host.get_name(),
|
||||
handler=handler.get_name(),
|
||||
)
|
||||
with self.capture_event_data('playbook_on_notify', **event_data):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_notify(handler, host)
|
||||
|
||||
'''
|
||||
ansible_stats is, retoractively, added in 2.2
|
||||
'''
|
||||
def v2_playbook_on_stats(self, stats):
|
||||
self.clear_play()
|
||||
# FIXME: Add count of plays/tasks.
|
||||
event_data = dict(
|
||||
changed=stats.changed,
|
||||
dark=stats.dark,
|
||||
failures=stats.failures,
|
||||
ok=stats.ok,
|
||||
processed=stats.processed,
|
||||
skipped=stats.skipped
|
||||
)
|
||||
|
||||
# write custom set_stat artifact data to the local disk so that it can
|
||||
# be persisted by awx after the process exits
|
||||
custom_artifact_data = stats.custom.get('_run', {}) if hasattr(stats, 'custom') else {}
|
||||
if custom_artifact_data:
|
||||
# create the directory for custom stats artifacts to live in (if it doesn't exist)
|
||||
custom_artifacts_dir = os.path.join(os.getenv('AWX_PRIVATE_DATA_DIR'), 'artifacts')
|
||||
if not os.path.isdir(custom_artifacts_dir):
|
||||
os.makedirs(custom_artifacts_dir, mode=stat.S_IXUSR + stat.S_IWUSR + stat.S_IRUSR)
|
||||
|
||||
custom_artifacts_path = os.path.join(custom_artifacts_dir, 'custom')
|
||||
with codecs.open(custom_artifacts_path, 'w', encoding='utf-8') as f:
|
||||
os.chmod(custom_artifacts_path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
json.dump(custom_artifact_data, f)
|
||||
|
||||
with self.capture_event_data('playbook_on_stats', **event_data):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_stats(stats)
|
||||
|
||||
@staticmethod
|
||||
def _get_event_loop(task):
|
||||
if hasattr(task, 'loop_with'): # Ansible >=2.5
|
||||
return task.loop_with
|
||||
elif hasattr(task, 'loop'): # Ansible <2.4
|
||||
return task.loop
|
||||
return None
|
||||
|
||||
def v2_runner_on_ok(self, result):
|
||||
# FIXME: Display detailed results or not based on verbosity.
|
||||
|
||||
# strip environment vars from the job event; it already exists on the
|
||||
# job and sensitive values are filtered there
|
||||
if result._task.action in ('setup', 'gather_facts'):
|
||||
result._result.get('ansible_facts', {}).pop('ansible_env', None)
|
||||
|
||||
event_data = dict(
|
||||
host=result._host.get_name(),
|
||||
remote_addr=result._host.address,
|
||||
task=result._task,
|
||||
res=result._result,
|
||||
event_loop=self._get_event_loop(result._task),
|
||||
)
|
||||
with self.capture_event_data('runner_on_ok', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_on_ok(result)
|
||||
|
||||
def v2_runner_on_failed(self, result, ignore_errors=False):
|
||||
# FIXME: Add verbosity for exception/results output.
|
||||
event_data = dict(
|
||||
host=result._host.get_name(),
|
||||
remote_addr=result._host.address,
|
||||
res=result._result,
|
||||
task=result._task,
|
||||
ignore_errors=ignore_errors,
|
||||
event_loop=self._get_event_loop(result._task),
|
||||
)
|
||||
with self.capture_event_data('runner_on_failed', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_on_failed(result, ignore_errors)
|
||||
|
||||
def v2_runner_on_skipped(self, result):
|
||||
event_data = dict(
|
||||
host=result._host.get_name(),
|
||||
remote_addr=result._host.address,
|
||||
task=result._task,
|
||||
event_loop=self._get_event_loop(result._task),
|
||||
)
|
||||
with self.capture_event_data('runner_on_skipped', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_on_skipped(result)
|
||||
|
||||
def v2_runner_on_unreachable(self, result):
|
||||
event_data = dict(
|
||||
host=result._host.get_name(),
|
||||
remote_addr=result._host.address,
|
||||
task=result._task,
|
||||
res=result._result,
|
||||
)
|
||||
with self.capture_event_data('runner_on_unreachable', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_on_unreachable(result)
|
||||
|
||||
def v2_runner_on_no_hosts(self, task):
|
||||
# NOTE: Not used by Ansible 2.x.
|
||||
event_data = dict(
|
||||
task=task,
|
||||
)
|
||||
with self.capture_event_data('runner_on_no_hosts', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_on_no_hosts(task)
|
||||
|
||||
def v2_runner_on_async_poll(self, result):
|
||||
# NOTE: Not used by Ansible 2.x.
|
||||
event_data = dict(
|
||||
host=result._host.get_name(),
|
||||
task=result._task,
|
||||
res=result._result,
|
||||
jid=result._result.get('ansible_job_id'),
|
||||
)
|
||||
with self.capture_event_data('runner_on_async_poll', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_on_async_poll(result)
|
||||
|
||||
def v2_runner_on_async_ok(self, result):
|
||||
# NOTE: Not used by Ansible 2.x.
|
||||
event_data = dict(
|
||||
host=result._host.get_name(),
|
||||
task=result._task,
|
||||
res=result._result,
|
||||
jid=result._result.get('ansible_job_id'),
|
||||
)
|
||||
with self.capture_event_data('runner_on_async_ok', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_on_async_ok(result)
|
||||
|
||||
def v2_runner_on_async_failed(self, result):
|
||||
# NOTE: Not used by Ansible 2.x.
|
||||
event_data = dict(
|
||||
host=result._host.get_name(),
|
||||
task=result._task,
|
||||
res=result._result,
|
||||
jid=result._result.get('ansible_job_id'),
|
||||
)
|
||||
with self.capture_event_data('runner_on_async_failed', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_on_async_failed(result)
|
||||
|
||||
def v2_runner_on_file_diff(self, result, diff):
|
||||
# NOTE: Not used by Ansible 2.x.
|
||||
event_data = dict(
|
||||
host=result._host.get_name(),
|
||||
task=result._task,
|
||||
diff=diff,
|
||||
)
|
||||
with self.capture_event_data('runner_on_file_diff', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_on_file_diff(result, diff)
|
||||
|
||||
def v2_on_file_diff(self, result):
|
||||
# NOTE: Logged as runner_on_file_diff.
|
||||
event_data = dict(
|
||||
host=result._host.get_name(),
|
||||
task=result._task,
|
||||
diff=result._result.get('diff'),
|
||||
)
|
||||
with self.capture_event_data('runner_on_file_diff', **event_data):
|
||||
super(BaseCallbackModule, self).v2_on_file_diff(result)
|
||||
|
||||
def v2_runner_item_on_ok(self, result):
|
||||
event_data = dict(
|
||||
host=result._host.get_name(),
|
||||
task=result._task,
|
||||
res=result._result,
|
||||
)
|
||||
with self.capture_event_data('runner_item_on_ok', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_item_on_ok(result)
|
||||
|
||||
def v2_runner_item_on_failed(self, result):
|
||||
event_data = dict(
|
||||
host=result._host.get_name(),
|
||||
task=result._task,
|
||||
res=result._result,
|
||||
)
|
||||
with self.capture_event_data('runner_item_on_failed', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_item_on_failed(result)
|
||||
|
||||
def v2_runner_item_on_skipped(self, result):
|
||||
event_data = dict(
|
||||
host=result._host.get_name(),
|
||||
task=result._task,
|
||||
res=result._result,
|
||||
)
|
||||
with self.capture_event_data('runner_item_on_skipped', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_item_on_skipped(result)
|
||||
|
||||
def v2_runner_retry(self, result):
|
||||
event_data = dict(
|
||||
host=result._host.get_name(),
|
||||
task=result._task,
|
||||
res=result._result,
|
||||
)
|
||||
with self.capture_event_data('runner_retry', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_retry(result)
|
||||
|
||||
|
||||
class AWXDefaultCallbackModule(BaseCallbackModule, DefaultCallbackModule):
|
||||
|
||||
CALLBACK_NAME = 'awx_display'
|
||||
|
||||
|
||||
class AWXMinimalCallbackModule(BaseCallbackModule, MinimalCallbackModule):
|
||||
|
||||
CALLBACK_NAME = 'minimal'
|
||||
|
||||
def v2_playbook_on_play_start(self, play):
|
||||
pass
|
||||
|
||||
def v2_playbook_on_task_start(self, task, is_conditional):
|
||||
self.set_task(task)
|
||||
@@ -1,353 +0,0 @@
|
||||
# Copyright (c) 2017 Ansible by Red Hat
|
||||
# All Rights Reserved
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
import mock
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
|
||||
# ansible uses `ANSIBLE_CALLBACK_PLUGINS` and `ANSIBLE_STDOUT_CALLBACK` to
|
||||
# discover callback plugins; `ANSIBLE_CALLBACK_PLUGINS` is a list of paths to
|
||||
# search for a plugin implementation (which should be named `CallbackModule`)
|
||||
#
|
||||
# this code modifies the Python path to make our
|
||||
# `awx.lib.awx_display_callback` callback importable (because `awx.lib`
|
||||
# itself is not a package)
|
||||
#
|
||||
# we use the `awx_display_callback` imports below within this file, but
|
||||
# Ansible also uses them when it discovers this file in
|
||||
# `ANSIBLE_CALLBACK_PLUGINS`
|
||||
CALLBACK = os.path.splitext(os.path.basename(__file__))[0]
|
||||
PLUGINS = os.path.dirname(__file__)
|
||||
with mock.patch.dict(os.environ, {'ANSIBLE_STDOUT_CALLBACK': CALLBACK,
|
||||
'ANSIBLE_CALLBACK_PLUGINS': PLUGINS}):
|
||||
from ansible import __version__ as ANSIBLE_VERSION
|
||||
from ansible.cli.playbook import PlaybookCLI
|
||||
from ansible.executor.playbook_executor import PlaybookExecutor
|
||||
from ansible.inventory.manager import InventoryManager
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
from ansible.vars.manager import VariableManager
|
||||
|
||||
# Add awx/lib to sys.path so we can use the plugin
|
||||
path = os.path.abspath(os.path.join(PLUGINS, '..', '..', 'lib'))
|
||||
if path not in sys.path:
|
||||
sys.path.insert(0, path)
|
||||
|
||||
from awx_display_callback import AWXDefaultCallbackModule as CallbackModule # noqa
|
||||
from awx_display_callback.events import event_context # noqa
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def cache(request):
|
||||
class Cache(OrderedDict):
|
||||
def set(self, key, value):
|
||||
self[key] = value
|
||||
local_cache = Cache()
|
||||
patch = mock.patch.object(event_context, 'cache', local_cache)
|
||||
patch.start()
|
||||
request.addfinalizer(patch.stop)
|
||||
return local_cache
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def executor(tmpdir_factory, request):
|
||||
playbooks = request.node.callspec.params.get('playbook')
|
||||
playbook_files = []
|
||||
for name, playbook in playbooks.items():
|
||||
filename = str(tmpdir_factory.mktemp('data').join(name))
|
||||
with open(filename, 'w') as f:
|
||||
f.write(playbook)
|
||||
playbook_files.append(filename)
|
||||
|
||||
cli = PlaybookCLI(['', 'playbook.yml'])
|
||||
cli.parse()
|
||||
options = cli.parser.parse_args(['-v'])[0]
|
||||
loader = DataLoader()
|
||||
variable_manager = VariableManager(loader=loader)
|
||||
inventory = InventoryManager(loader=loader, sources='localhost,')
|
||||
variable_manager.set_inventory(inventory)
|
||||
|
||||
return PlaybookExecutor(playbooks=playbook_files, inventory=inventory,
|
||||
variable_manager=variable_manager, loader=loader,
|
||||
options=options, passwords={})
|
||||
|
||||
|
||||
@pytest.mark.parametrize('event', {'playbook_on_start',
|
||||
'playbook_on_play_start',
|
||||
'playbook_on_task_start', 'runner_on_ok',
|
||||
'playbook_on_stats'})
|
||||
@pytest.mark.parametrize('playbook', [
|
||||
{'helloworld.yml': '''
|
||||
- name: Hello World Sample
|
||||
connection: local
|
||||
hosts: all
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: Hello Message
|
||||
debug:
|
||||
msg: "Hello World!"
|
||||
'''}, # noqa
|
||||
{'results_included.yml': '''
|
||||
- name: Run module which generates results list
|
||||
connection: local
|
||||
hosts: all
|
||||
gather_facts: no
|
||||
vars:
|
||||
results: ['foo', 'bar']
|
||||
tasks:
|
||||
- name: Generate results list
|
||||
debug:
|
||||
var: results
|
||||
'''}, # noqa
|
||||
])
|
||||
def test_callback_plugin_receives_events(executor, cache, event, playbook):
|
||||
executor.run()
|
||||
assert len(cache)
|
||||
assert event in [task['event'] for task in cache.values()]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('playbook', [
|
||||
{'no_log_on_ok.yml': '''
|
||||
- name: args should not be logged when task-level no_log is set
|
||||
connection: local
|
||||
hosts: all
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- shell: echo "SENSITIVE"
|
||||
no_log: true
|
||||
'''}, # noqa
|
||||
{'no_log_on_fail.yml': '''
|
||||
- name: failed args should not be logged when task-level no_log is set
|
||||
connection: local
|
||||
hosts: all
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- shell: echo "SENSITIVE"
|
||||
no_log: true
|
||||
failed_when: true
|
||||
ignore_errors: true
|
||||
'''}, # noqa
|
||||
{'no_log_on_skip.yml': '''
|
||||
- name: skipped task args should be suppressed with no_log
|
||||
connection: local
|
||||
hosts: all
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- shell: echo "SENSITIVE"
|
||||
no_log: true
|
||||
when: false
|
||||
'''}, # noqa
|
||||
{'no_log_on_play.yml': '''
|
||||
- name: args should not be logged when play-level no_log set
|
||||
connection: local
|
||||
hosts: all
|
||||
gather_facts: no
|
||||
no_log: true
|
||||
tasks:
|
||||
- shell: echo "SENSITIVE"
|
||||
'''}, # noqa
|
||||
{'async_no_log.yml': '''
|
||||
- name: async task args should suppressed with no_log
|
||||
connection: local
|
||||
hosts: all
|
||||
gather_facts: no
|
||||
no_log: true
|
||||
tasks:
|
||||
- async: 10
|
||||
poll: 1
|
||||
shell: echo "SENSITIVE"
|
||||
no_log: true
|
||||
'''}, # noqa
|
||||
{'with_items.yml': '''
|
||||
- name: with_items tasks should be suppressed with no_log
|
||||
connection: local
|
||||
hosts: all
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- shell: echo {{ item }}
|
||||
no_log: true
|
||||
with_items: [ "SENSITIVE", "SENSITIVE-SKIPPED", "SENSITIVE-FAILED" ]
|
||||
when: item != "SENSITIVE-SKIPPED"
|
||||
failed_when: item == "SENSITIVE-FAILED"
|
||||
ignore_errors: yes
|
||||
'''}, # noqa, NOTE: with_items will be deprecated in 2.9
|
||||
{'loop.yml': '''
|
||||
- name: loop tasks should be suppressed with no_log
|
||||
connection: local
|
||||
hosts: all
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- shell: echo {{ item }}
|
||||
no_log: true
|
||||
loop: [ "SENSITIVE", "SENSITIVE-SKIPPED", "SENSITIVE-FAILED" ]
|
||||
when: item != "SENSITIVE-SKIPPED"
|
||||
failed_when: item == "SENSITIVE-FAILED"
|
||||
ignore_errors: yes
|
||||
'''}, # noqa
|
||||
])
|
||||
def test_callback_plugin_no_log_filters(executor, cache, playbook):
|
||||
executor.run()
|
||||
assert len(cache)
|
||||
assert 'SENSITIVE' not in json.dumps(cache.items())
|
||||
|
||||
|
||||
@pytest.mark.parametrize('playbook', [
|
||||
{'no_log_on_ok.yml': '''
|
||||
- name: args should not be logged when no_log is set at the task or module level
|
||||
connection: local
|
||||
hosts: all
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- shell: echo "PUBLIC"
|
||||
- shell: echo "PRIVATE"
|
||||
no_log: true
|
||||
- uri: url=https://example.org username="PUBLIC" password="PRIVATE"
|
||||
- copy: content="PRIVATE" dest="/tmp/tmp_no_log"
|
||||
'''}, # noqa
|
||||
])
|
||||
def test_callback_plugin_task_args_leak(executor, cache, playbook):
|
||||
executor.run()
|
||||
events = cache.values()
|
||||
assert events[0]['event'] == 'playbook_on_start'
|
||||
assert events[1]['event'] == 'playbook_on_play_start'
|
||||
|
||||
# task 1
|
||||
assert events[2]['event'] == 'playbook_on_task_start'
|
||||
assert events[3]['event'] == 'runner_on_ok'
|
||||
|
||||
# task 2 no_log=True
|
||||
assert events[4]['event'] == 'playbook_on_task_start'
|
||||
assert events[5]['event'] == 'runner_on_ok'
|
||||
assert 'PUBLIC' in json.dumps(cache.items())
|
||||
assert 'PRIVATE' not in json.dumps(cache.items())
|
||||
# make sure playbook was successful, so all tasks were hit
|
||||
assert not events[-1]['event_data']['failures'], 'Unexpected playbook execution failure'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('playbook', [
|
||||
{'loop_with_no_log.yml': '''
|
||||
- name: playbook variable should not be overwritten when using no log
|
||||
connection: local
|
||||
hosts: all
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- command: "{{ item }}"
|
||||
register: command_register
|
||||
no_log: True
|
||||
with_items:
|
||||
- "echo helloworld!"
|
||||
- debug: msg="{{ command_register.results|map(attribute='stdout')|list }}"
|
||||
'''}, # noqa
|
||||
])
|
||||
def test_callback_plugin_censoring_does_not_overwrite(executor, cache, playbook):
|
||||
executor.run()
|
||||
events = cache.values()
|
||||
assert events[0]['event'] == 'playbook_on_start'
|
||||
assert events[1]['event'] == 'playbook_on_play_start'
|
||||
|
||||
# task 1
|
||||
assert events[2]['event'] == 'playbook_on_task_start'
|
||||
# Ordering of task and item events may differ randomly
|
||||
assert set(['runner_on_ok', 'runner_item_on_ok']) == set([data['event'] for data in events[3:5]])
|
||||
|
||||
# task 2 no_log=True
|
||||
assert events[5]['event'] == 'playbook_on_task_start'
|
||||
assert events[6]['event'] == 'runner_on_ok'
|
||||
assert 'helloworld!' in events[6]['event_data']['res']['msg']
|
||||
|
||||
|
||||
@pytest.mark.parametrize('playbook', [
|
||||
{'strip_env_vars.yml': '''
|
||||
- name: sensitive environment variables should be stripped from events
|
||||
connection: local
|
||||
hosts: all
|
||||
tasks:
|
||||
- shell: echo "Hello, World!"
|
||||
'''}, # noqa
|
||||
])
|
||||
def test_callback_plugin_strips_task_environ_variables(executor, cache, playbook):
|
||||
executor.run()
|
||||
assert len(cache)
|
||||
for event in cache.values():
|
||||
assert os.environ['PATH'] not in json.dumps(event)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('playbook', [
|
||||
{'custom_set_stat.yml': '''
|
||||
- name: custom set_stat calls should persist to the local disk so awx can save them
|
||||
connection: local
|
||||
hosts: all
|
||||
tasks:
|
||||
- set_stats:
|
||||
data:
|
||||
foo: "bar"
|
||||
'''}, # noqa
|
||||
])
|
||||
def test_callback_plugin_saves_custom_stats(executor, cache, playbook):
|
||||
try:
|
||||
private_data_dir = tempfile.mkdtemp()
|
||||
with mock.patch.dict(os.environ, {'AWX_PRIVATE_DATA_DIR': private_data_dir}):
|
||||
executor.run()
|
||||
artifacts_path = os.path.join(private_data_dir, 'artifacts', 'custom')
|
||||
with open(artifacts_path, 'r') as f:
|
||||
assert json.load(f) == {'foo': 'bar'}
|
||||
finally:
|
||||
shutil.rmtree(os.path.join(private_data_dir))
|
||||
|
||||
|
||||
@pytest.mark.parametrize('playbook', [
|
||||
{'handle_playbook_on_notify.yml': '''
|
||||
- name: handle playbook_on_notify events properly
|
||||
connection: local
|
||||
hosts: all
|
||||
handlers:
|
||||
- name: my_handler
|
||||
debug: msg="My Handler"
|
||||
tasks:
|
||||
- debug: msg="My Task"
|
||||
changed_when: true
|
||||
notify:
|
||||
- my_handler
|
||||
'''}, # noqa
|
||||
])
|
||||
@pytest.mark.skipif(ANSIBLE_VERSION < '2.5', reason="v2_playbook_on_notify doesn't work before ansible 2.5")
|
||||
def test_callback_plugin_records_notify_events(executor, cache, playbook):
|
||||
executor.run()
|
||||
assert len(cache)
|
||||
notify_events = [x[1] for x in cache.items() if x[1]['event'] == 'playbook_on_notify']
|
||||
assert len(notify_events) == 1
|
||||
assert notify_events[0]['event_data']['handler'] == 'my_handler'
|
||||
assert notify_events[0]['event_data']['host'] == 'localhost'
|
||||
assert notify_events[0]['event_data']['task'] == 'debug'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('playbook', [
|
||||
{'no_log_module_with_var.yml': '''
|
||||
- name: ensure that module-level secrets are redacted
|
||||
connection: local
|
||||
hosts: all
|
||||
vars:
|
||||
- pw: SENSITIVE
|
||||
tasks:
|
||||
- uri:
|
||||
url: https://example.org
|
||||
user: john-jacob-jingleheimer-schmidt
|
||||
password: "{{ pw }}"
|
||||
'''}, # noqa
|
||||
])
|
||||
def test_module_level_no_log(executor, cache, playbook):
|
||||
# https://github.com/ansible/tower/issues/1101
|
||||
# It's possible for `no_log=True` to be defined at the _module_ level,
|
||||
# e.g., for the URI module password parameter
|
||||
# This test ensures that we properly redact those
|
||||
executor.run()
|
||||
assert len(cache)
|
||||
assert 'john-jacob-jingleheimer-schmidt' in json.dumps(cache.items())
|
||||
assert 'SENSITIVE' not in json.dumps(cache.items())
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -3086,7 +3086,7 @@ msgstr "URL CloudForms"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:982
|
||||
msgid ""
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForm "
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForms "
|
||||
"instance. For example, https://cloudforms.example.org"
|
||||
msgstr ""
|
||||
"Introduzca la URL para la máquina virtual que corresponda a su instancia "
|
||||
|
||||
@@ -3099,7 +3099,7 @@ msgstr "URL CloudForms"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:982
|
||||
msgid ""
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForm "
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForms "
|
||||
"instance. For example, https://cloudforms.example.org"
|
||||
msgstr ""
|
||||
"Veuillez saisir l’URL de la machine virtuelle qui correspond à votre "
|
||||
|
||||
@@ -2858,7 +2858,7 @@ msgstr "CloudForms URL"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:982
|
||||
msgid ""
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForm "
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForms "
|
||||
"instance. For example, https://cloudforms.example.org"
|
||||
msgstr ""
|
||||
"CloudForms インスタンスに対応する仮想マシンの URL を入力します (例: https://cloudforms.example.org)。"
|
||||
|
||||
@@ -3072,7 +3072,7 @@ msgstr "CloudForms-URL"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:982
|
||||
msgid ""
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForm "
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForms "
|
||||
"instance. For example, https://cloudforms.example.org"
|
||||
msgstr ""
|
||||
"Voer de URL in voor de virtuele machine die overeenkomt met uw CloudForm-"
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import six
|
||||
from functools import reduce
|
||||
|
||||
# Django
|
||||
@@ -29,7 +28,17 @@ from awx.main.utils import (
|
||||
to_python_boolean,
|
||||
get_licenser,
|
||||
)
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.models import (
|
||||
ActivityStream, AdHocCommand, AdHocCommandEvent, Credential, CredentialType,
|
||||
CustomInventoryScript, Group, Host, Instance, InstanceGroup, Inventory,
|
||||
InventorySource, InventoryUpdate, InventoryUpdateEvent, Job, JobEvent,
|
||||
JobHostSummary, JobLaunchConfig, JobTemplate, Label, Notification,
|
||||
NotificationTemplate, Organization, Project, ProjectUpdate,
|
||||
ProjectUpdateEvent, Role, Schedule, SystemJob, SystemJobEvent,
|
||||
SystemJobTemplate, Team, UnifiedJob, UnifiedJobTemplate, WorkflowJob,
|
||||
WorkflowJobNode, WorkflowJobTemplate, WorkflowJobTemplateNode,
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR
|
||||
)
|
||||
from awx.main.models.mixins import ResourceMixin
|
||||
|
||||
from awx.conf.license import LicenseForbids, feature_enabled
|
||||
@@ -321,6 +330,36 @@ class BaseAccess(object):
|
||||
elif "features" not in validation_info:
|
||||
raise LicenseForbids(_("Features not found in active license."))
|
||||
|
||||
def check_org_host_limit(self, data, add_host_name=None):
|
||||
validation_info = get_licenser().validate()
|
||||
if validation_info.get('license_type', 'UNLICENSED') == 'open':
|
||||
return
|
||||
|
||||
inventory = get_object_from_data('inventory', Inventory, data)
|
||||
if inventory is None: # In this case a missing inventory error is launched
|
||||
return # further down the line, so just ignore it.
|
||||
|
||||
org = inventory.organization
|
||||
if org is None or org.max_hosts == 0:
|
||||
return
|
||||
|
||||
active_count = Host.objects.org_active_count(org.id)
|
||||
if active_count > org.max_hosts:
|
||||
raise PermissionDenied(
|
||||
_("You have already reached the maximum number of %s hosts"
|
||||
" allowed for your organization. Contact your System Administrator"
|
||||
" for assistance." % org.max_hosts)
|
||||
)
|
||||
|
||||
if add_host_name:
|
||||
host_exists = Host.objects.filter(inventory__organization=org.id, name=add_host_name).exists()
|
||||
if not host_exists and active_count == org.max_hosts:
|
||||
raise PermissionDenied(
|
||||
_("You have already reached the maximum number of %s hosts"
|
||||
" allowed for your organization. Contact your System Administrator"
|
||||
" for assistance." % org.max_hosts)
|
||||
)
|
||||
|
||||
def get_user_capabilities(self, obj, method_list=[], parent_obj=None, capabilities_cache={}):
|
||||
if obj is None:
|
||||
return {}
|
||||
@@ -351,7 +390,7 @@ class BaseAccess(object):
|
||||
user_capabilities[display_method] = self.user.is_superuser
|
||||
continue
|
||||
elif display_method == 'copy' and isinstance(obj, Project) and obj.scm_type == '':
|
||||
# Connot copy manual project without errors
|
||||
# Cannot copy manual project without errors
|
||||
user_capabilities[display_method] = False
|
||||
continue
|
||||
elif display_method in ['start', 'schedule'] and isinstance(obj, Group): # TODO: remove in 3.3
|
||||
@@ -435,12 +474,16 @@ class InstanceAccess(BaseAccess):
|
||||
skip_sub_obj_read_check=False):
|
||||
if relationship == 'rampart_groups' and isinstance(sub_obj, InstanceGroup):
|
||||
return self.user.is_superuser
|
||||
return super(InstanceAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
return super(InstanceAccess, self).can_attach(
|
||||
obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check
|
||||
)
|
||||
|
||||
def can_unattach(self, obj, sub_obj, relationship, data=None):
|
||||
if relationship == 'rampart_groups' and isinstance(sub_obj, InstanceGroup):
|
||||
return self.user.is_superuser
|
||||
return super(InstanceAccess, self).can_unattach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
return super(InstanceAccess, self).can_unattach(
|
||||
obj, sub_obj, relationship, relationship, data=data
|
||||
)
|
||||
|
||||
def can_add(self, data):
|
||||
return False
|
||||
@@ -524,7 +567,7 @@ class UserAccess(BaseAccess):
|
||||
# A user can be changed if they are themselves, or by org admins or
|
||||
# superusers. Change permission implies changing only certain fields
|
||||
# that a user should be able to edit for themselves.
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH:
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH and not self.user.is_superuser:
|
||||
return False
|
||||
return bool(self.user == obj or self.can_admin(obj, data))
|
||||
|
||||
@@ -577,7 +620,7 @@ class UserAccess(BaseAccess):
|
||||
return False
|
||||
|
||||
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH:
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH and not self.user.is_superuser:
|
||||
return False
|
||||
|
||||
# Reverse obj and sub_obj, defer to RoleAccess if this is a role assignment.
|
||||
@@ -587,7 +630,7 @@ class UserAccess(BaseAccess):
|
||||
return super(UserAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
|
||||
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH:
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH and not self.user.is_superuser:
|
||||
return False
|
||||
|
||||
if relationship == 'roles':
|
||||
@@ -615,7 +658,7 @@ class OAuth2ApplicationAccess(BaseAccess):
|
||||
return self.model.objects.filter(organization__in=org_access_qs)
|
||||
|
||||
def can_change(self, obj, data):
|
||||
return self.user.is_superuser or self.check_related('organization', Organization, data, obj=obj,
|
||||
return self.user.is_superuser or self.check_related('organization', Organization, data, obj=obj,
|
||||
role_field='admin_role', mandatory=True)
|
||||
|
||||
def can_delete(self, obj):
|
||||
@@ -623,7 +666,7 @@ class OAuth2ApplicationAccess(BaseAccess):
|
||||
|
||||
def can_add(self, data):
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
return True
|
||||
if not data:
|
||||
return Organization.accessible_objects(self.user, 'admin_role').exists()
|
||||
return self.check_related('organization', Organization, data, role_field='admin_role', mandatory=True)
|
||||
@@ -637,29 +680,29 @@ class OAuth2TokenAccess(BaseAccess):
|
||||
- I am the user of the token.
|
||||
I can create an OAuth2 app token when:
|
||||
- I have the read permission of the related application.
|
||||
I can read, change or delete a personal token when:
|
||||
I can read, change or delete a personal token when:
|
||||
- I am the user of the token
|
||||
- I am the superuser
|
||||
I can create an OAuth2 Personal Access Token when:
|
||||
- I am a user. But I can only create a PAT for myself.
|
||||
- I am a user. But I can only create a PAT for myself.
|
||||
'''
|
||||
|
||||
model = OAuth2AccessToken
|
||||
|
||||
|
||||
select_related = ('user', 'application')
|
||||
|
||||
def filtered_queryset(self):
|
||||
|
||||
def filtered_queryset(self):
|
||||
org_access_qs = Organization.objects.filter(
|
||||
Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
|
||||
return self.model.objects.filter(application__organization__in=org_access_qs) | self.model.objects.filter(user__id=self.user.pk)
|
||||
|
||||
|
||||
def can_delete(self, obj):
|
||||
if (self.user.is_superuser) | (obj.user == self.user):
|
||||
return True
|
||||
elif not obj.application:
|
||||
return False
|
||||
return self.user in obj.application.organization.admin_role
|
||||
|
||||
|
||||
def can_change(self, obj, data):
|
||||
return self.can_delete(obj)
|
||||
|
||||
@@ -827,6 +870,10 @@ class HostAccess(BaseAccess):
|
||||
|
||||
# Check to see if we have enough licenses
|
||||
self.check_license(add_host_name=data.get('name', None))
|
||||
|
||||
# Check the per-org limit
|
||||
self.check_org_host_limit(data, add_host_name=data.get('name', None))
|
||||
|
||||
return True
|
||||
|
||||
def can_change(self, obj, data):
|
||||
@@ -839,6 +886,10 @@ class HostAccess(BaseAccess):
|
||||
if data and 'name' in data:
|
||||
self.check_license(add_host_name=data['name'])
|
||||
|
||||
# Check the per-org limit
|
||||
self.check_org_host_limit({'inventory': obj.inventory},
|
||||
add_host_name=data['name'])
|
||||
|
||||
# Checks for admin or change permission on inventory, controls whether
|
||||
# the user can edit variable data.
|
||||
return obj and self.user in obj.inventory.admin_role
|
||||
@@ -1157,13 +1208,10 @@ class TeamAccess(BaseAccess):
|
||||
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
"""Reverse obj and sub_obj, defer to RoleAccess if this is an assignment
|
||||
of a resource role to the team."""
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH:
|
||||
return False
|
||||
# MANAGE_ORGANIZATION_AUTH setting checked in RoleAccess
|
||||
if isinstance(sub_obj, Role):
|
||||
if sub_obj.content_object is None:
|
||||
raise PermissionDenied(_("The {} role cannot be assigned to a team").format(sub_obj.name))
|
||||
elif isinstance(sub_obj.content_object, User):
|
||||
raise PermissionDenied(_("The admin_role for a User cannot be assigned to a team"))
|
||||
|
||||
if isinstance(sub_obj.content_object, ResourceMixin):
|
||||
role_access = RoleAccess(self.user)
|
||||
@@ -1175,9 +1223,7 @@ class TeamAccess(BaseAccess):
|
||||
*args, **kwargs)
|
||||
|
||||
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH:
|
||||
return False
|
||||
|
||||
# MANAGE_ORGANIZATION_AUTH setting checked in RoleAccess
|
||||
if isinstance(sub_obj, Role):
|
||||
if isinstance(sub_obj.content_object, ResourceMixin):
|
||||
role_access = RoleAccess(self.user)
|
||||
@@ -1213,7 +1259,7 @@ class ProjectAccess(BaseAccess):
|
||||
@check_superuser
|
||||
def can_add(self, data):
|
||||
if not data: # So the browseable API will work
|
||||
return Organization.accessible_objects(self.user, 'admin_role').exists()
|
||||
return Organization.accessible_objects(self.user, 'project_admin_role').exists()
|
||||
return (self.check_related('organization', Organization, data, role_field='project_admin_role', mandatory=True) and
|
||||
self.check_related('credential', Credential, data, role_field='use_role'))
|
||||
|
||||
@@ -1281,6 +1327,7 @@ class JobTemplateAccess(BaseAccess):
|
||||
'instance_groups',
|
||||
'credentials__credential_type',
|
||||
Prefetch('labels', queryset=Label.objects.all().order_by('name')),
|
||||
Prefetch('last_job', queryset=UnifiedJob.objects.non_polymorphic()),
|
||||
)
|
||||
|
||||
def filtered_queryset(self):
|
||||
@@ -1337,7 +1384,7 @@ class JobTemplateAccess(BaseAccess):
|
||||
return self.user in project.use_role
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
@check_superuser
|
||||
def can_copy_related(self, obj):
|
||||
'''
|
||||
@@ -1346,13 +1393,17 @@ class JobTemplateAccess(BaseAccess):
|
||||
'''
|
||||
|
||||
# obj.credentials.all() is accessible ONLY when object is saved (has valid id)
|
||||
credential_manager = getattr(obj, 'credentials', None) if getattr(obj, 'id', False) else Credentials.objects.none()
|
||||
credential_manager = getattr(obj, 'credentials', None) if getattr(obj, 'id', False) else Credential.objects.none()
|
||||
return reduce(lambda prev, cred: prev and self.user in cred.use_role, credential_manager.all(), True)
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
# Check license.
|
||||
if validate_license:
|
||||
self.check_license()
|
||||
|
||||
# Check the per-org limit
|
||||
self.check_org_host_limit({'inventory': obj.inventory})
|
||||
|
||||
if obj.survey_enabled:
|
||||
self.check_license(feature='surveys')
|
||||
if Instance.objects.active_count() > 1:
|
||||
@@ -1394,13 +1445,15 @@ class JobTemplateAccess(BaseAccess):
|
||||
'job_tags', 'force_handlers', 'skip_tags', 'ask_variables_on_launch',
|
||||
'ask_tags_on_launch', 'ask_job_type_on_launch', 'ask_skip_tags_on_launch',
|
||||
'ask_inventory_on_launch', 'ask_credential_on_launch', 'survey_enabled',
|
||||
'custom_virtualenv', 'diff_mode',
|
||||
'custom_virtualenv', 'diff_mode', 'timeout', 'job_slice_count',
|
||||
|
||||
# These fields are ignored, but it is convenient for QA to allow clients to post them
|
||||
'last_job_run', 'created', 'modified',
|
||||
]
|
||||
|
||||
for k, v in data.items():
|
||||
if k not in [x.name for x in obj._meta.concrete_fields]:
|
||||
continue
|
||||
if hasattr(obj, k) and getattr(obj, k) != v:
|
||||
if k not in field_whitelist and v != getattr(obj, '%s_id' % k, None) \
|
||||
and not (hasattr(obj, '%s_id' % k) and getattr(obj, '%s_id' % k) is None and v == ''): # Equate '' to None in the case of foreign keys
|
||||
@@ -1509,6 +1562,9 @@ class JobAccess(BaseAccess):
|
||||
if validate_license:
|
||||
self.check_license()
|
||||
|
||||
# Check the per-org limit
|
||||
self.check_org_host_limit({'inventory': obj.inventory})
|
||||
|
||||
# A super user can relaunch a job
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
@@ -1840,8 +1896,10 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
if 'survey_enabled' in data and data['survey_enabled']:
|
||||
self.check_license(feature='surveys')
|
||||
|
||||
return self.check_related('organization', Organization, data, role_field='workflow_admin_role',
|
||||
mandatory=True)
|
||||
return (
|
||||
self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True) and
|
||||
self.check_related('inventory', Inventory, data, role_field='use_role')
|
||||
)
|
||||
|
||||
def can_copy(self, obj):
|
||||
if self.save_messages:
|
||||
@@ -1851,7 +1909,6 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
qs = obj.workflow_job_template_nodes
|
||||
qs = qs.prefetch_related('unified_job_template', 'inventory__use_role', 'credentials__use_role')
|
||||
for node in qs.all():
|
||||
node_errors = {}
|
||||
if node.inventory and self.user not in node.inventory.use_role:
|
||||
missing_inventories.append(node.inventory.name)
|
||||
for cred in node.credentials.all():
|
||||
@@ -1860,8 +1917,6 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
ujt = node.unified_job_template
|
||||
if ujt and not self.user.can_access(UnifiedJobTemplate, 'start', ujt, validate_license=False):
|
||||
missing_ujt.append(ujt.name)
|
||||
if node_errors:
|
||||
wfjt_errors[node.id] = node_errors
|
||||
if missing_ujt:
|
||||
self.messages['templates_unable_to_copy'] = missing_ujt
|
||||
if missing_credentials:
|
||||
@@ -1876,6 +1931,10 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
if validate_license:
|
||||
# check basic license, node count
|
||||
self.check_license()
|
||||
|
||||
# Check the per-org limit
|
||||
self.check_org_host_limit({'inventory': obj.inventory})
|
||||
|
||||
# if surveys are added to WFJTs, check license here
|
||||
if obj.survey_enabled:
|
||||
self.check_license(feature='surveys')
|
||||
@@ -1895,8 +1954,11 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
|
||||
return (self.check_related('organization', Organization, data, role_field='workflow_admin_role', obj=obj) and
|
||||
self.user in obj.admin_role)
|
||||
return (
|
||||
self.check_related('organization', Organization, data, role_field='workflow_admin_role', obj=obj) and
|
||||
self.check_related('inventory', Inventory, data, role_field='use_role', obj=obj) and
|
||||
self.user in obj.admin_role
|
||||
)
|
||||
|
||||
def can_delete(self, obj):
|
||||
return self.user.is_superuser or self.user in obj.admin_role
|
||||
@@ -1944,6 +2006,9 @@ class WorkflowJobAccess(BaseAccess):
|
||||
if validate_license:
|
||||
self.check_license()
|
||||
|
||||
# Check the per-org limit
|
||||
self.check_org_host_limit({'inventory': obj.inventory})
|
||||
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
|
||||
@@ -1954,19 +2019,29 @@ class WorkflowJobAccess(BaseAccess):
|
||||
if not template:
|
||||
return False
|
||||
|
||||
# If job was launched by another user, it could have survey passwords
|
||||
if obj.created_by_id != self.user.pk:
|
||||
# Obtain prompts used to start original job
|
||||
JobLaunchConfig = obj._meta.get_field('launch_config').related_model
|
||||
try:
|
||||
config = JobLaunchConfig.objects.get(job=obj)
|
||||
except JobLaunchConfig.DoesNotExist:
|
||||
config = None
|
||||
# Obtain prompts used to start original job
|
||||
JobLaunchConfig = obj._meta.get_field('launch_config').related_model
|
||||
try:
|
||||
config = JobLaunchConfig.objects.get(job=obj)
|
||||
except JobLaunchConfig.DoesNotExist:
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Workflow Job was launched with unknown prompts.')
|
||||
return False
|
||||
|
||||
if config is None or config.prompts_dict():
|
||||
# Check if access to prompts to prevent relaunch
|
||||
if config.prompts_dict():
|
||||
if obj.created_by_id != self.user.pk:
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with prompts provided by another user.')
|
||||
return False
|
||||
if not JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}):
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with prompts you lack access to.')
|
||||
return False
|
||||
if config.has_unprompted(template):
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with prompts no longer accepted.')
|
||||
return False
|
||||
|
||||
# execute permission to WFJT is mandatory for any relaunch
|
||||
return (self.user in template.execute_role)
|
||||
@@ -2010,6 +2085,9 @@ class AdHocCommandAccess(BaseAccess):
|
||||
if validate_license:
|
||||
self.check_license()
|
||||
|
||||
# Check the per-org limit
|
||||
self.check_org_host_limit(data)
|
||||
|
||||
# If a credential is provided, the user should have use access to it.
|
||||
if not self.check_related('credential', Credential, data, role_field='use_role'):
|
||||
return False
|
||||
@@ -2419,7 +2497,7 @@ class ActivityStreamAccess(BaseAccess):
|
||||
model = ActivityStream
|
||||
prefetch_related = ('organization', 'user', 'inventory', 'host', 'group',
|
||||
'inventory_update', 'credential', 'credential_type', 'team',
|
||||
'ad_hoc_command', 'o_auth2_application', 'o_auth2_access_token',
|
||||
'ad_hoc_command', 'o_auth2_application', 'o_auth2_access_token',
|
||||
'notification_template', 'notification', 'label', 'role', 'actor',
|
||||
'schedule', 'custom_inventory_script', 'unified_job_template',
|
||||
'workflow_job_template_node',)
|
||||
@@ -2552,14 +2630,13 @@ class RoleAccess(BaseAccess):
|
||||
# Unsupported for now
|
||||
return False
|
||||
|
||||
def can_attach(self, obj, sub_obj, relationship, data,
|
||||
skip_sub_obj_read_check=False):
|
||||
return self.can_unattach(obj, sub_obj, relationship, data, skip_sub_obj_read_check)
|
||||
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
return self.can_unattach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
|
||||
@check_superuser
|
||||
def can_unattach(self, obj, sub_obj, relationship, data=None, skip_sub_obj_read_check=False):
|
||||
if isinstance(obj.content_object, Team):
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH:
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH and not self.user.is_superuser:
|
||||
return False
|
||||
|
||||
if not skip_sub_obj_read_check and relationship in ['members', 'member_role.parents', 'parents']:
|
||||
@@ -2578,7 +2655,7 @@ class RoleAccess(BaseAccess):
|
||||
if (isinstance(obj.content_object, Organization) and
|
||||
obj.role_field in (Organization.member_role.field.parent_role + ['member_role'])):
|
||||
if not isinstance(sub_obj, User):
|
||||
logger.error(six.text_type('Unexpected attempt to associate {} with organization role.').format(sub_obj))
|
||||
logger.error('Unexpected attempt to associate {} with organization role.'.format(sub_obj))
|
||||
return False
|
||||
if not UserAccess(self.user).can_admin(sub_obj, None, allow_orphans=True):
|
||||
return False
|
||||
|
||||
@@ -126,6 +126,17 @@ register(
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'CUSTOM_VENV_PATHS',
|
||||
field_class=fields.StringListPathField,
|
||||
label=_('Custom virtual environment paths'),
|
||||
help_text=_('Paths where Tower will look for custom virtual environments '
|
||||
'(in addition to /var/lib/awx/venv/). Enter one path per line.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
default=[],
|
||||
)
|
||||
|
||||
register(
|
||||
'AD_HOC_COMMANDS',
|
||||
field_class=fields.StringListField,
|
||||
@@ -197,6 +208,18 @@ register(
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_ISOLATED_VERBOSITY',
|
||||
field_class=fields.IntegerField,
|
||||
min_value=0,
|
||||
max_value=5,
|
||||
label=_('Verbosity level for isolated node management tasks'),
|
||||
help_text=_('This can be raised to aid in debugging connection issues for isolated task execution'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
default=0
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_ISOLATED_CHECK_INTERVAL',
|
||||
field_class=fields.IntegerField,
|
||||
@@ -283,7 +306,7 @@ register(
|
||||
field_class=fields.BooleanField,
|
||||
default=True,
|
||||
label=_('Enable Role Download'),
|
||||
help_text=_('Allows roles to be dynamically downlaoded from a requirements.yml file for SCM projects.'),
|
||||
help_text=_('Allows roles to be dynamically downloaded from a requirements.yml file for SCM projects.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
@@ -16,7 +16,8 @@ SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + ('custom', 'scm',)
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')), ('su', _('Su')), ('pbrun', _('Pbrun')), ('pfexec', _('Pfexec')),
|
||||
('dzdo', _('DZDO')), ('pmrun', _('Pmrun')), ('runas', _('Runas')),
|
||||
('enable', _('Enable')), ('doas', _('Doas')),
|
||||
('enable', _('Enable')), ('doas', _('Doas')), ('ksu', _('Ksu')),
|
||||
('machinectl', _('Machinectl')), ('sesu', _('Sesu')),
|
||||
]
|
||||
CHOICES_PRIVILEGE_ESCALATION_METHODS = [('', _('None'))] + PRIVILEGE_ESCALATION_METHODS
|
||||
ANSI_SGR_PATTERN = re.compile(r'\x1b\[[0-9;]*m')
|
||||
@@ -24,7 +25,9 @@ STANDARD_INVENTORY_UPDATE_ENV = {
|
||||
# Failure to parse inventory should always be fatal
|
||||
'ANSIBLE_INVENTORY_UNPARSED_FAILED': 'True',
|
||||
# Always use the --export option for ansible-inventory
|
||||
'ANSIBLE_INVENTORY_EXPORT': 'True'
|
||||
'ANSIBLE_INVENTORY_EXPORT': 'True',
|
||||
# Redirecting output to stderr allows JSON parsing to still work with -vvv
|
||||
'ANSIBLE_VERBOSE_TO_STDERR': 'True'
|
||||
}
|
||||
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
||||
ACTIVE_STATES = CAN_CANCEL
|
||||
|
||||
@@ -4,6 +4,7 @@ import logging
|
||||
from channels import Group
|
||||
from channels.auth import channel_session_user_from_http, channel_session_user
|
||||
|
||||
from django.utils.encoding import smart_str
|
||||
from django.http.cookie import parse_cookie
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
|
||||
@@ -30,7 +31,7 @@ def ws_connect(message):
|
||||
# store the valid CSRF token from the cookie so we can compare it later
|
||||
# on ws_receive
|
||||
cookie_token = parse_cookie(
|
||||
headers.get('cookie')
|
||||
smart_str(headers.get(b'cookie'))
|
||||
).get('csrftoken')
|
||||
if cookie_token:
|
||||
message.channel_session[XRF_KEY] = cookie_token
|
||||
|
||||
0
awx/main/db/__init__.py
Normal file
0
awx/main/db/__init__.py
Normal file
0
awx/main/db/profiled_pg/__init__.py
Normal file
0
awx/main/db/profiled_pg/__init__.py
Normal file
155
awx/main/db/profiled_pg/base.py
Normal file
155
awx/main/db/profiled_pg/base.py
Normal file
@@ -0,0 +1,155 @@
|
||||
import os
|
||||
import pkg_resources
|
||||
import sqlite3
|
||||
import sys
|
||||
import traceback
|
||||
import uuid
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.core.cache.backends.locmem import LocMemCache
|
||||
from django.db.backends.postgresql.base import DatabaseWrapper as BaseDatabaseWrapper
|
||||
|
||||
from awx.main.utils import memoize
|
||||
|
||||
__loc__ = LocMemCache(str(uuid.uuid4()), {})
|
||||
__all__ = ['DatabaseWrapper']
|
||||
|
||||
|
||||
class RecordedQueryLog(object):
|
||||
|
||||
def __init__(self, log, db, dest='/var/log/tower/profile'):
|
||||
self.log = log
|
||||
self.db = db
|
||||
self.dest = dest
|
||||
try:
|
||||
self.threshold = cache.get('awx-profile-sql-threshold')
|
||||
except Exception:
|
||||
# if we can't reach memcached, just assume profiling's off
|
||||
self.threshold = None
|
||||
|
||||
def append(self, query):
|
||||
ret = self.log.append(query)
|
||||
try:
|
||||
self.write(query)
|
||||
except Exception:
|
||||
# not sure what else to do her e- we can't really safely
|
||||
# *use* our loggers because it'll just generate more DB queries
|
||||
# and potentially recurse into this state again
|
||||
_, _, tb = sys.exc_info()
|
||||
traceback.print_tb(tb)
|
||||
return ret
|
||||
|
||||
def write(self, query):
|
||||
if self.threshold is None:
|
||||
return
|
||||
seconds = float(query['time'])
|
||||
|
||||
# if the query is slow enough...
|
||||
if seconds >= self.threshold:
|
||||
sql = query['sql']
|
||||
if sql.startswith('EXPLAIN'):
|
||||
return
|
||||
|
||||
# build a printable Python stack
|
||||
bt = ' '.join(traceback.format_stack())
|
||||
|
||||
# and re-run the same query w/ EXPLAIN
|
||||
explain = ''
|
||||
cursor = self.db.cursor()
|
||||
cursor.execute('EXPLAIN VERBOSE {}'.format(sql))
|
||||
for line in cursor.fetchall():
|
||||
explain += line[0] + '\n'
|
||||
|
||||
# write a row of data into a per-PID sqlite database
|
||||
if not os.path.isdir(self.dest):
|
||||
os.makedirs(self.dest)
|
||||
progname = ' '.join(sys.argv)
|
||||
for match in ('uwsgi', 'dispatcher', 'callback_receiver', 'runworker'):
|
||||
if match in progname:
|
||||
progname = match
|
||||
break
|
||||
else:
|
||||
progname = os.path.basename(sys.argv[0])
|
||||
filepath = os.path.join(
|
||||
self.dest,
|
||||
'{}.sqlite'.format(progname)
|
||||
)
|
||||
version = pkg_resources.get_distribution('awx').version
|
||||
log = sqlite3.connect(filepath, timeout=3)
|
||||
log.execute(
|
||||
'CREATE TABLE IF NOT EXISTS queries ('
|
||||
' id INTEGER PRIMARY KEY,'
|
||||
' version TEXT,'
|
||||
' pid INTEGER,'
|
||||
' stamp DATETIME DEFAULT CURRENT_TIMESTAMP,'
|
||||
' argv REAL,'
|
||||
' time REAL,'
|
||||
' sql TEXT,'
|
||||
' explain TEXT,'
|
||||
' bt TEXT'
|
||||
');'
|
||||
)
|
||||
log.commit()
|
||||
log.execute(
|
||||
'INSERT INTO queries (pid, version, argv, time, sql, explain, bt) '
|
||||
'VALUES (?, ?, ?, ?, ?, ?, ?);',
|
||||
(os.getpid(), version, ' ' .join(sys.argv), seconds, sql, explain, bt)
|
||||
)
|
||||
log.commit()
|
||||
|
||||
def __len__(self):
|
||||
return len(self.log)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.log)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self.log, attr)
|
||||
|
||||
|
||||
class DatabaseWrapper(BaseDatabaseWrapper):
|
||||
"""
|
||||
This is a special subclass of Django's postgres DB backend which - based on
|
||||
the value of a special flag in memcached - captures slow queries and
|
||||
writes profile and Python stack metadata to the disk.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DatabaseWrapper, self).__init__(*args, **kwargs)
|
||||
# Django's default base wrapper implementation has `queries_log`
|
||||
# which is a `collections.deque` that every query is appended to
|
||||
#
|
||||
# this line wraps the deque with a proxy that can capture each query
|
||||
# and - if it's slow enough - record profiling metadata to the file
|
||||
# system for debugging purposes
|
||||
self.queries_log = RecordedQueryLog(self.queries_log, self)
|
||||
|
||||
@property
|
||||
@memoize(ttl=1, cache=__loc__)
|
||||
def force_debug_cursor(self):
|
||||
# in Django's base DB implementation, `self.force_debug_cursor` is just
|
||||
# a simple boolean, and this value is used to signal to Django that it
|
||||
# should record queries into `self.queries_log` as they're executed (this
|
||||
# is the same mechanism used by libraries like the django-debug-toolbar)
|
||||
#
|
||||
# in _this_ implementation, we represent it as a property which will
|
||||
# check memcache for a special flag to be set (when the flag is set, it
|
||||
# means we should start recording queries because somebody called
|
||||
# `awx-manage profile_sql`)
|
||||
#
|
||||
# it's worth noting that this property is wrapped w/ @memoize because
|
||||
# Django references this attribute _constantly_ (in particular, once
|
||||
# per executed query); doing a memcached.get() _at most_ once per
|
||||
# second is a good enough window to detect when profiling is turned
|
||||
# on/off by a system administrator
|
||||
try:
|
||||
threshold = cache.get('awx-profile-sql-threshold')
|
||||
except Exception:
|
||||
# if we can't reach memcached, just assume profiling's off
|
||||
threshold = None
|
||||
self.queries_log.threshold = threshold
|
||||
return threshold is not None
|
||||
|
||||
@force_debug_cursor.setter
|
||||
def force_debug_cursor(self, v):
|
||||
return
|
||||
@@ -2,4 +2,4 @@ from django.conf import settings
|
||||
|
||||
|
||||
def get_local_queuename():
|
||||
return settings.CLUSTER_HOST_ID.encode('utf-8')
|
||||
return settings.CLUSTER_HOST_ID
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import random
|
||||
import sys
|
||||
import traceback
|
||||
from uuid import uuid4
|
||||
|
||||
import collections
|
||||
from multiprocessing import Process
|
||||
from multiprocessing import Queue as MPQueue
|
||||
from Queue import Full as QueueFull, Empty as QueueEmpty
|
||||
from queue import Full as QueueFull, Empty as QueueEmpty
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection as django_connection, connections
|
||||
@@ -19,7 +19,10 @@ import psutil
|
||||
from awx.main.models import UnifiedJob
|
||||
from awx.main.dispatch import reaper
|
||||
|
||||
logger = logging.getLogger('awx.main.dispatch')
|
||||
if 'run_callback_receiver' in sys.argv:
|
||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||
else:
|
||||
logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
|
||||
class PoolWorker(object):
|
||||
@@ -129,7 +132,7 @@ class PoolWorker(object):
|
||||
# the task at [0] is the one that's running right now (or is about to
|
||||
# be running)
|
||||
if len(self.managed_tasks):
|
||||
return self.managed_tasks[self.managed_tasks.keys()[0]]
|
||||
return self.managed_tasks[list(self.managed_tasks.keys())[0]]
|
||||
|
||||
return None
|
||||
|
||||
@@ -180,7 +183,7 @@ class WorkerPool(object):
|
||||
class MessagePrinter(awx.main.dispatch.worker.BaseWorker):
|
||||
|
||||
def perform_work(self, body):
|
||||
print body
|
||||
print(body)
|
||||
|
||||
pool = WorkerPool(min_workers=4) # spawn four worker processes
|
||||
pool.init_workers(MessagePrint().work_loop)
|
||||
@@ -253,7 +256,7 @@ class WorkerPool(object):
|
||||
return tmpl.render(pool=self, workers=self.workers, meta=self.debug_meta)
|
||||
|
||||
def write(self, preferred_queue, body):
|
||||
queue_order = sorted(range(len(self.workers)), cmp=lambda x, y: -1 if x==preferred_queue else 0)
|
||||
queue_order = sorted(range(len(self.workers)), key=lambda x: -1 if x==preferred_queue else x)
|
||||
write_attempt_order = []
|
||||
for queue_actual in queue_order:
|
||||
try:
|
||||
@@ -296,6 +299,9 @@ class AutoscalePool(WorkerPool):
|
||||
# 5 workers per GB of total memory
|
||||
self.max_workers = (total_memory_gb * 5)
|
||||
|
||||
# max workers can't be less than min_workers
|
||||
self.max_workers = max(self.min_workers, self.max_workers)
|
||||
|
||||
@property
|
||||
def should_grow(self):
|
||||
if len(self.workers) < self.min_workers:
|
||||
@@ -322,6 +328,11 @@ class AutoscalePool(WorkerPool):
|
||||
2. Clean up unnecessary, idle workers.
|
||||
3. Check to see if the database says this node is running any tasks
|
||||
that aren't actually running. If so, reap them.
|
||||
|
||||
IMPORTANT: this function is one of the few places in the dispatcher
|
||||
(aside from setting lookups) where we talk to the database. As such,
|
||||
if there's an outage, this method _can_ throw various
|
||||
django.db.utils.Error exceptions. Act accordingly.
|
||||
"""
|
||||
orphaned = []
|
||||
for w in self.workers[::]:
|
||||
@@ -362,14 +373,8 @@ class AutoscalePool(WorkerPool):
|
||||
running_uuids = []
|
||||
for worker in self.workers:
|
||||
worker.calculate_managed_tasks()
|
||||
running_uuids.extend(worker.managed_tasks.keys())
|
||||
try:
|
||||
reaper.reap(excluded_uuids=running_uuids)
|
||||
except Exception:
|
||||
# we _probably_ failed here due to DB connectivity issues, so
|
||||
# don't use our logger (it accesses the database for configuration)
|
||||
_, _, tb = sys.exc_info()
|
||||
traceback.print_tb(tb)
|
||||
running_uuids.extend(list(worker.managed_tasks.keys()))
|
||||
reaper.reap(excluded_uuids=running_uuids)
|
||||
|
||||
def up(self):
|
||||
if self.full:
|
||||
|
||||
@@ -45,7 +45,7 @@ class task:
|
||||
|
||||
@task(queue='tower_broadcast', exchange_type='fanout')
|
||||
def announce():
|
||||
print "Run this everywhere!"
|
||||
print("Run this everywhere!")
|
||||
"""
|
||||
|
||||
def __init__(self, queue=None, exchange_type=None):
|
||||
|
||||
@@ -11,6 +11,9 @@ logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
|
||||
def reap_job(j, status):
|
||||
if UnifiedJob.objects.get(id=j.id).status not in ('running', 'waiting'):
|
||||
# just in case, don't reap jobs that aren't running
|
||||
return
|
||||
j.status = status
|
||||
j.start_args = '' # blank field to remove encrypted passwords
|
||||
j.job_explanation += ' '.join((
|
||||
|
||||
@@ -4,15 +4,20 @@
|
||||
import os
|
||||
import logging
|
||||
import signal
|
||||
import sys
|
||||
from uuid import UUID
|
||||
from Queue import Empty as QueueEmpty
|
||||
from queue import Empty as QueueEmpty
|
||||
|
||||
from django import db
|
||||
from kombu import Producer
|
||||
from kombu.mixins import ConsumerMixin
|
||||
|
||||
from awx.main.dispatch.pool import WorkerPool
|
||||
|
||||
logger = logging.getLogger('awx.main.dispatch')
|
||||
if 'run_callback_receiver' in sys.argv:
|
||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||
else:
|
||||
logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
|
||||
def signame(sig):
|
||||
@@ -80,7 +85,11 @@ class AWXConsumer(ConsumerMixin):
|
||||
|
||||
def process_task(self, body, message):
|
||||
if 'control' in body:
|
||||
return self.control(body, message)
|
||||
try:
|
||||
return self.control(body, message)
|
||||
except Exception:
|
||||
logger.exception("Exception handling control message:")
|
||||
return
|
||||
if len(self.pool):
|
||||
if "uuid" in body and body['uuid']:
|
||||
try:
|
||||
@@ -103,7 +112,7 @@ class AWXConsumer(ConsumerMixin):
|
||||
|
||||
def stop(self, signum, frame):
|
||||
self.should_stop = True # this makes the kombu mixin stop consuming
|
||||
logger.debug('received {}, stopping'.format(signame(signum)))
|
||||
logger.warn('received {}, stopping'.format(signame(signum)))
|
||||
self.worker.on_stop()
|
||||
raise SystemExit()
|
||||
|
||||
@@ -128,6 +137,10 @@ class BaseWorker(object):
|
||||
logger.error("Exception on worker {}, restarting: ".format(idx) + str(e))
|
||||
continue
|
||||
try:
|
||||
for conn in db.connections.all():
|
||||
# If the database connection has a hiccup during the prior message, close it
|
||||
# so we can establish a new connection
|
||||
conn.close_if_unusable_or_obsolete()
|
||||
self.perform_work(body, *args)
|
||||
finally:
|
||||
if 'uuid' in body:
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import logging
|
||||
import time
|
||||
import os
|
||||
import signal
|
||||
import traceback
|
||||
|
||||
from django.conf import settings
|
||||
@@ -110,8 +108,7 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
break
|
||||
except (OperationalError, InterfaceError, InternalError):
|
||||
if retries >= self.MAX_RETRIES:
|
||||
logger.exception('Worker could not re-establish database connectivity, shutting down gracefully: Job {}'.format(job_identifier))
|
||||
os.kill(os.getppid(), signal.SIGINT)
|
||||
logger.exception('Worker could not re-establish database connectivity, giving up on event for Job {}'.format(job_identifier))
|
||||
return
|
||||
delay = 60 * retries
|
||||
logger.exception('Database Error Saving Job Event, retry #{i} in {delay} seconds:'.format(
|
||||
|
||||
@@ -4,8 +4,6 @@ import importlib
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
import six
|
||||
from django import db
|
||||
|
||||
from awx.main.tasks import dispatch_startup, inform_cluster_of_shutdown
|
||||
|
||||
@@ -31,11 +29,18 @@ class TaskWorker(BaseWorker):
|
||||
awx.main.tasks.delete_inventory
|
||||
awx.main.tasks.RunProjectUpdate
|
||||
'''
|
||||
if not task.startswith('awx.'):
|
||||
raise ValueError('{} is not a valid awx task'.format(task))
|
||||
module, target = task.rsplit('.', 1)
|
||||
module = importlib.import_module(module)
|
||||
_call = None
|
||||
if hasattr(module, target):
|
||||
_call = getattr(module, target, None)
|
||||
if not (
|
||||
hasattr(_call, 'apply_async') and hasattr(_call, 'delay')
|
||||
):
|
||||
raise ValueError('{} is not decorated with @task()'.format(task))
|
||||
|
||||
return _call
|
||||
|
||||
def run_callable(self, body):
|
||||
@@ -75,19 +80,16 @@ class TaskWorker(BaseWorker):
|
||||
'task': u'awx.main.tasks.RunProjectUpdate'
|
||||
}
|
||||
'''
|
||||
for conn in db.connections.all():
|
||||
# If the database connection has a hiccup during at task, close it
|
||||
# so we can establish a new connection
|
||||
conn.close_if_unusable_or_obsolete()
|
||||
result = None
|
||||
try:
|
||||
result = self.run_callable(body)
|
||||
except Exception as exc:
|
||||
result = exc
|
||||
|
||||
try:
|
||||
if getattr(exc, 'is_awx_task_error', False):
|
||||
# Error caused by user / tracked in job output
|
||||
logger.warning(six.text_type("{}").format(exc))
|
||||
logger.warning("{}".format(exc))
|
||||
else:
|
||||
task = body['task']
|
||||
args = body.get('args', [])
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
# Copyright (c) 2018 Ansible by Red Hat
|
||||
# All Rights Reserved.
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class _AwxTaskError():
|
||||
def build_exception(self, task, message=None):
|
||||
if message is None:
|
||||
message = six.text_type("Execution error running {}").format(task.log_format)
|
||||
message = "Execution error running {}".format(task.log_format)
|
||||
e = Exception(message)
|
||||
e.task = task
|
||||
e.is_awx_task_error = True
|
||||
@@ -15,7 +14,7 @@ class _AwxTaskError():
|
||||
|
||||
def TaskCancel(self, task, rc):
|
||||
"""Canceled flag caused run_pexpect to kill the job run"""
|
||||
message=six.text_type("{} was canceled (rc={})").format(task.log_format, rc)
|
||||
message="{} was canceled (rc={})".format(task.log_format, rc)
|
||||
e = self.build_exception(task, message)
|
||||
e.rc = rc
|
||||
e.awx_task_error_type = "TaskCancel"
|
||||
@@ -23,7 +22,7 @@ class _AwxTaskError():
|
||||
|
||||
def TaskError(self, task, rc):
|
||||
"""Userspace error (non-zero exit code) in run_pexpect subprocess"""
|
||||
message = six.text_type("{} encountered an error (rc={}), please see task stdout for details.").format(task.log_format, rc)
|
||||
message = "{} encountered an error (rc={}), please see task stdout for details.".format(task.log_format, rc)
|
||||
e = self.build_exception(task, message)
|
||||
e.rc = rc
|
||||
e.awx_task_error_type = "TaskError"
|
||||
|
||||
1
awx/main/expect/.gitignore
vendored
Normal file
1
awx/main/expect/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
authorized_keys
|
||||
0
awx/main/expect/authorized_keys
Normal file
0
awx/main/expect/authorized_keys
Normal file
@@ -1,6 +1,3 @@
|
||||
import base64
|
||||
import codecs
|
||||
import StringIO
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
@@ -8,13 +5,13 @@ import stat
|
||||
import tempfile
|
||||
import time
|
||||
import logging
|
||||
from distutils.version import LooseVersion as Version
|
||||
from io import StringIO
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
import awx
|
||||
from awx.main.expect import run
|
||||
from awx.main.utils import OutputEventFilter, get_system_task_capacity
|
||||
from awx.main.utils import get_system_task_capacity
|
||||
from awx.main.queue import CallbackQueueDispatcher
|
||||
|
||||
logger = logging.getLogger('awx.isolated.manager')
|
||||
@@ -23,23 +20,11 @@ playbook_logger = logging.getLogger('awx.isolated.manager.playbooks')
|
||||
|
||||
class IsolatedManager(object):
|
||||
|
||||
def __init__(self, args, cwd, env, stdout_handle, ssh_key_path,
|
||||
expect_passwords={}, cancelled_callback=None, job_timeout=0,
|
||||
idle_timeout=None, extra_update_fields=None,
|
||||
pexpect_timeout=5, proot_cmd='bwrap'):
|
||||
def __init__(self, env, cancelled_callback=None, job_timeout=0,
|
||||
idle_timeout=None):
|
||||
"""
|
||||
:param args: a list of `subprocess.call`-style arguments
|
||||
representing a subprocess e.g.,
|
||||
['ansible-playbook', '...']
|
||||
:param cwd: the directory where the subprocess should run,
|
||||
generally the directory where playbooks exist
|
||||
:param env: a dict containing environment variables for the
|
||||
subprocess, ala `os.environ`
|
||||
:param stdout_handle: a file-like object for capturing stdout
|
||||
:param ssh_key_path: a filepath where SSH key data can be read
|
||||
:param expect_passwords: a dict of regular expression password prompts
|
||||
to input values, i.e., {r'Password:*?$':
|
||||
'some_password'}
|
||||
:param cancelled_callback: a callable - which returns `True` or `False`
|
||||
- signifying if the job has been prematurely
|
||||
cancelled
|
||||
@@ -48,26 +33,11 @@ class IsolatedManager(object):
|
||||
:param idle_timeout a timeout (in seconds); if new output is not
|
||||
sent to stdout in this interval, the process
|
||||
will be terminated
|
||||
:param extra_update_fields: a dict used to specify DB fields which should
|
||||
be updated on the underlying model
|
||||
object after execution completes
|
||||
:param pexpect_timeout a timeout (in seconds) to wait on
|
||||
`pexpect.spawn().expect()` calls
|
||||
:param proot_cmd the command used to isolate processes, `bwrap`
|
||||
"""
|
||||
self.args = args
|
||||
self.cwd = cwd
|
||||
self.isolated_env = self._redact_isolated_env(env.copy())
|
||||
self.management_env = self._base_management_env()
|
||||
self.stdout_handle = stdout_handle
|
||||
self.ssh_key_path = ssh_key_path
|
||||
self.expect_passwords = {k.pattern: v for k, v in expect_passwords.items()}
|
||||
self.cancelled_callback = cancelled_callback
|
||||
self.job_timeout = job_timeout
|
||||
self.idle_timeout = idle_timeout
|
||||
self.extra_update_fields = extra_update_fields
|
||||
self.pexpect_timeout = pexpect_timeout
|
||||
self.proot_cmd = proot_cmd
|
||||
self.started_at = None
|
||||
|
||||
@staticmethod
|
||||
@@ -101,20 +71,10 @@ class IsolatedManager(object):
|
||||
]
|
||||
if extra_vars:
|
||||
args.extend(['-e', json.dumps(extra_vars)])
|
||||
if settings.AWX_ISOLATED_VERBOSITY:
|
||||
args.append('-%s' % ('v' * min(5, settings.AWX_ISOLATED_VERBOSITY)))
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def _redact_isolated_env(env):
|
||||
'''
|
||||
strips some environment variables that aren't applicable to
|
||||
job execution within the isolated instance
|
||||
'''
|
||||
for var in (
|
||||
'HOME', 'RABBITMQ_HOST', 'RABBITMQ_PASS', 'RABBITMQ_USER', 'CACHE',
|
||||
'DJANGO_PROJECT_DIR', 'DJANGO_SETTINGS_MODULE', 'RABBITMQ_VHOST'):
|
||||
env.pop(var, None)
|
||||
return env
|
||||
|
||||
@classmethod
|
||||
def awx_playbook_path(cls):
|
||||
return os.path.abspath(os.path.join(
|
||||
@@ -125,56 +85,35 @@ class IsolatedManager(object):
|
||||
def path_to(self, *args):
|
||||
return os.path.join(self.private_data_dir, *args)
|
||||
|
||||
def dispatch(self):
|
||||
def dispatch(self, playbook=None, module=None, module_args=None):
|
||||
'''
|
||||
Compile the playbook, its environment, and metadata into a series
|
||||
of files, and ship to a remote host for isolated execution.
|
||||
Ship the runner payload to a remote host for isolated execution.
|
||||
'''
|
||||
self.handled_events = set()
|
||||
self.started_at = time.time()
|
||||
secrets = {
|
||||
'env': self.isolated_env,
|
||||
'passwords': self.expect_passwords,
|
||||
'ssh_key_data': None,
|
||||
'idle_timeout': self.idle_timeout,
|
||||
'job_timeout': self.job_timeout,
|
||||
'pexpect_timeout': self.pexpect_timeout
|
||||
}
|
||||
|
||||
# if an ssh private key fifo exists, read its contents and delete it
|
||||
if self.ssh_key_path:
|
||||
buff = StringIO.StringIO()
|
||||
with open(self.ssh_key_path, 'r') as fifo:
|
||||
for line in fifo:
|
||||
buff.write(line)
|
||||
secrets['ssh_key_data'] = buff.getvalue()
|
||||
os.remove(self.ssh_key_path)
|
||||
|
||||
# write the entire secret payload to a named pipe
|
||||
# the run_isolated.yml playbook will use a lookup to read this data
|
||||
# into a variable, and will replicate the data into a named pipe on the
|
||||
# isolated instance
|
||||
secrets_path = os.path.join(self.private_data_dir, 'env')
|
||||
run.open_fifo_write(secrets_path, base64.b64encode(json.dumps(secrets)))
|
||||
|
||||
self.build_isolated_job_data()
|
||||
|
||||
extra_vars = {
|
||||
'src': self.private_data_dir,
|
||||
'dest': settings.AWX_PROOT_BASE_PATH,
|
||||
'ident': self.ident
|
||||
}
|
||||
if self.proot_temp_dir:
|
||||
extra_vars['proot_temp_dir'] = self.proot_temp_dir
|
||||
if playbook:
|
||||
extra_vars['playbook'] = playbook
|
||||
if module and module_args:
|
||||
extra_vars['module'] = module
|
||||
extra_vars['module_args'] = module_args
|
||||
|
||||
# Run ansible-playbook to launch a job on the isolated host. This:
|
||||
#
|
||||
# - sets up a temporary directory for proot/bwrap (if necessary)
|
||||
# - copies encrypted job data from the controlling host to the isolated host (with rsync)
|
||||
# - writes the encryption secret to a named pipe on the isolated host
|
||||
# - launches the isolated playbook runner via `awx-expect start <job-id>`
|
||||
# - launches ansible-runner
|
||||
args = self._build_args('run_isolated.yml', '%s,' % self.host, extra_vars)
|
||||
if self.instance.verbosity:
|
||||
args.append('-%s' % ('v' * min(5, self.instance.verbosity)))
|
||||
buff = StringIO.StringIO()
|
||||
buff = StringIO()
|
||||
logger.debug('Starting job {} on isolated host with `run_isolated.yml` playbook.'.format(self.instance.id))
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
@@ -182,10 +121,15 @@ class IsolatedManager(object):
|
||||
job_timeout=settings.AWX_ISOLATED_LAUNCH_TIMEOUT,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
output = buff.getvalue().encode('utf-8')
|
||||
output = buff.getvalue()
|
||||
playbook_logger.info('Isolated job {} dispatch:\n{}'.format(self.instance.id, output))
|
||||
if status != 'successful':
|
||||
self.stdout_handle.write(output)
|
||||
for event_data in [
|
||||
{'event': 'verbose', 'stdout': output},
|
||||
{'event': 'EOF', 'final_counter': 1},
|
||||
]:
|
||||
event_data.setdefault(self.event_data_key, self.instance.id)
|
||||
CallbackQueueDispatcher().dispatch(event_data)
|
||||
return status, rc
|
||||
|
||||
@classmethod
|
||||
@@ -209,11 +153,8 @@ class IsolatedManager(object):
|
||||
|
||||
def build_isolated_job_data(self):
|
||||
'''
|
||||
Write the playbook and metadata into a collection of files on the local
|
||||
file system.
|
||||
|
||||
This function is intended to be used to compile job data so that it
|
||||
can be shipped to a remote, isolated host (via ssh).
|
||||
Write metadata related to the playbook run into a collection of files
|
||||
on the local file system.
|
||||
'''
|
||||
|
||||
rsync_exclude = [
|
||||
@@ -223,42 +164,18 @@ class IsolatedManager(object):
|
||||
'- /project/.hg',
|
||||
# don't rsync job events that are in the process of being written
|
||||
'- /artifacts/job_events/*-partial.json.tmp',
|
||||
# rsync can't copy named pipe data - we're replicating this manually ourselves in the playbook
|
||||
'- /env'
|
||||
# don't rsync the ssh_key FIFO
|
||||
'- /env/ssh_key',
|
||||
]
|
||||
|
||||
for filename, data in (
|
||||
['.rsync-filter', '\n'.join(rsync_exclude)],
|
||||
['args', json.dumps(self.args)]
|
||||
):
|
||||
path = self.path_to(filename)
|
||||
with open(path, 'w') as f:
|
||||
f.write(data)
|
||||
os.chmod(path, stat.S_IRUSR)
|
||||
|
||||
# symlink the scm checkout (if there is one) so that it's rsync'ed over, too
|
||||
if 'AD_HOC_COMMAND_ID' not in self.isolated_env:
|
||||
os.symlink(self.cwd, self.path_to('project'))
|
||||
|
||||
# create directories for build artifacts to live in
|
||||
os.makedirs(self.path_to('artifacts', 'job_events'), mode=stat.S_IXUSR + stat.S_IWUSR + stat.S_IRUSR)
|
||||
|
||||
def _missing_artifacts(self, path_list, output):
|
||||
missing_artifacts = filter(lambda path: not os.path.exists(path), path_list)
|
||||
for path in missing_artifacts:
|
||||
self.stdout_handle.write('ansible did not exit cleanly, missing `{}`.\n'.format(path))
|
||||
if missing_artifacts:
|
||||
daemon_path = self.path_to('artifacts', 'daemon.log')
|
||||
if os.path.exists(daemon_path):
|
||||
# If available, show log files from the run.py call
|
||||
with codecs.open(daemon_path, 'r', encoding='utf-8') as f:
|
||||
self.stdout_handle.write(f.read())
|
||||
else:
|
||||
# Provide the management playbook standard out if not available
|
||||
self.stdout_handle.write(output)
|
||||
return True
|
||||
return False
|
||||
|
||||
def check(self, interval=None):
|
||||
"""
|
||||
Repeatedly poll the isolated node to determine if the job has run.
|
||||
@@ -282,20 +199,13 @@ class IsolatedManager(object):
|
||||
status = 'failed'
|
||||
output = ''
|
||||
rc = None
|
||||
buff = StringIO.StringIO()
|
||||
buff = StringIO()
|
||||
last_check = time.time()
|
||||
seek = 0
|
||||
job_timeout = remaining = self.job_timeout
|
||||
dispatcher = CallbackQueueDispatcher()
|
||||
while status == 'failed':
|
||||
if job_timeout != 0:
|
||||
remaining = max(0, job_timeout - (time.time() - self.started_at))
|
||||
if remaining == 0:
|
||||
# if it takes longer than $REMAINING_JOB_TIMEOUT to retrieve
|
||||
# job artifacts from the host, consider the job failed
|
||||
if isinstance(self.extra_update_fields, dict):
|
||||
self.extra_update_fields['job_explanation'] = "Job terminated due to timeout"
|
||||
status = 'failed'
|
||||
break
|
||||
|
||||
canceled = self.cancelled_callback() if self.cancelled_callback else False
|
||||
if not canceled and time.time() - last_check < interval:
|
||||
@@ -303,7 +213,10 @@ class IsolatedManager(object):
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
buff = StringIO.StringIO()
|
||||
if canceled:
|
||||
logger.warning('Isolated job {} was manually cancelled.'.format(self.instance.id))
|
||||
|
||||
buff = StringIO()
|
||||
logger.debug('Checking on isolated job {} with `check_isolated.yml`.'.format(self.instance.id))
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
@@ -311,36 +224,50 @@ class IsolatedManager(object):
|
||||
idle_timeout=remaining,
|
||||
job_timeout=remaining,
|
||||
pexpect_timeout=5,
|
||||
proot_cmd=self.proot_cmd
|
||||
proot_cmd='bwrap'
|
||||
)
|
||||
output = buff.getvalue().encode('utf-8')
|
||||
playbook_logger.info('Isolated job {} check:\n{}'.format(self.instance.id, output))
|
||||
|
||||
path = self.path_to('artifacts', 'stdout')
|
||||
if os.path.exists(path):
|
||||
with open(path, 'r') as f:
|
||||
f.seek(seek)
|
||||
for line in f:
|
||||
self.stdout_handle.write(line)
|
||||
seek += len(line)
|
||||
# discover new events and ingest them
|
||||
events_path = self.path_to('artifacts', self.ident, 'job_events')
|
||||
|
||||
# it's possible that `events_path` doesn't exist *yet*, because runner
|
||||
# hasn't actually written any events yet (if you ran e.g., a sleep 30)
|
||||
# only attempt to consume events if any were rsynced back
|
||||
if os.path.exists(events_path):
|
||||
for event in set(os.listdir(events_path)) - self.handled_events:
|
||||
path = os.path.join(events_path, event)
|
||||
if os.path.exists(path):
|
||||
event_data = json.load(
|
||||
open(os.path.join(events_path, event), 'r')
|
||||
)
|
||||
event_data.setdefault(self.event_data_key, self.instance.id)
|
||||
dispatcher.dispatch(event_data)
|
||||
self.handled_events.add(event)
|
||||
|
||||
# handle artifacts
|
||||
if event_data.get('event_data', {}).get('artifact_data', {}):
|
||||
self.instance.artifacts = event_data['event_data']['artifact_data']
|
||||
self.instance.save(update_fields=['artifacts'])
|
||||
|
||||
last_check = time.time()
|
||||
|
||||
if status == 'successful':
|
||||
status_path = self.path_to('artifacts', 'status')
|
||||
rc_path = self.path_to('artifacts', 'rc')
|
||||
if self._missing_artifacts([status_path, rc_path], output):
|
||||
status = 'failed'
|
||||
rc = 1
|
||||
else:
|
||||
with open(status_path, 'r') as f:
|
||||
status = f.readline()
|
||||
with open(rc_path, 'r') as f:
|
||||
rc = int(f.readline())
|
||||
elif status == 'failed':
|
||||
# if we were unable to retrieve job reults from the isolated host,
|
||||
# print stdout of the `check_isolated.yml` playbook for clues
|
||||
self.stdout_handle.write(output)
|
||||
status_path = self.path_to('artifacts', self.ident, 'status')
|
||||
rc_path = self.path_to('artifacts', self.ident, 'rc')
|
||||
with open(status_path, 'r') as f:
|
||||
status = f.readline()
|
||||
with open(rc_path, 'r') as f:
|
||||
rc = int(f.readline())
|
||||
|
||||
# emit an EOF event
|
||||
event_data = {
|
||||
'event': 'EOF',
|
||||
'final_counter': len(self.handled_events)
|
||||
}
|
||||
event_data.setdefault(self.event_data_key, self.instance.id)
|
||||
dispatcher.dispatch(event_data)
|
||||
|
||||
return status, rc
|
||||
|
||||
@@ -350,12 +277,11 @@ class IsolatedManager(object):
|
||||
'private_data_dir': self.private_data_dir,
|
||||
'cleanup_dirs': [
|
||||
self.private_data_dir,
|
||||
self.proot_temp_dir,
|
||||
],
|
||||
}
|
||||
args = self._build_args('clean_isolated.yml', '%s,' % self.host, extra_vars)
|
||||
logger.debug('Cleaning up job {} on isolated host with `clean_isolated.yml` playbook.'.format(self.instance.id))
|
||||
buff = StringIO.StringIO()
|
||||
buff = StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
@@ -371,23 +297,15 @@ class IsolatedManager(object):
|
||||
|
||||
@classmethod
|
||||
def update_capacity(cls, instance, task_result, awx_application_version):
|
||||
instance.version = task_result['version']
|
||||
instance.version = 'ansible-runner-{}'.format(task_result['version'])
|
||||
|
||||
isolated_version = instance.version.split("-", 1)[0]
|
||||
cluster_version = awx_application_version.split("-", 1)[0]
|
||||
|
||||
if Version(cluster_version) > Version(isolated_version):
|
||||
err_template = "Isolated instance {} reports version {}, cluster node is at {}, setting capacity to zero."
|
||||
logger.error(err_template.format(instance.hostname, instance.version, awx_application_version))
|
||||
instance.capacity = 0
|
||||
else:
|
||||
if instance.capacity == 0 and task_result['capacity_cpu']:
|
||||
logger.warning('Isolated instance {} has re-joined.'.format(instance.hostname))
|
||||
instance.cpu_capacity = int(task_result['capacity_cpu'])
|
||||
instance.mem_capacity = int(task_result['capacity_mem'])
|
||||
instance.capacity = get_system_task_capacity(scale=instance.capacity_adjustment,
|
||||
cpu_capacity=int(task_result['capacity_cpu']),
|
||||
mem_capacity=int(task_result['capacity_mem']))
|
||||
if instance.capacity == 0 and task_result['capacity_cpu']:
|
||||
logger.warning('Isolated instance {} has re-joined.'.format(instance.hostname))
|
||||
instance.cpu_capacity = int(task_result['capacity_cpu'])
|
||||
instance.mem_capacity = int(task_result['capacity_mem'])
|
||||
instance.capacity = get_system_task_capacity(scale=instance.capacity_adjustment,
|
||||
cpu_capacity=int(task_result['capacity_cpu']),
|
||||
mem_capacity=int(task_result['capacity_mem']))
|
||||
instance.save(update_fields=['cpu_capacity', 'mem_capacity', 'capacity', 'version', 'modified'])
|
||||
|
||||
@classmethod
|
||||
@@ -407,69 +325,55 @@ class IsolatedManager(object):
|
||||
args = cls._build_args('heartbeat_isolated.yml', hostname_string)
|
||||
args.extend(['--forks', str(len(instance_qs))])
|
||||
env = cls._base_management_env()
|
||||
env['ANSIBLE_STDOUT_CALLBACK'] = 'json'
|
||||
|
||||
buff = StringIO.StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, cls.awx_playbook_path(), env, buff,
|
||||
idle_timeout=timeout, job_timeout=timeout,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
output = buff.getvalue().encode('utf-8')
|
||||
buff.close()
|
||||
|
||||
try:
|
||||
result = json.loads(output)
|
||||
if not isinstance(result, dict):
|
||||
raise TypeError('Expected a dict but received {}.'.format(str(type(result))))
|
||||
except (ValueError, AssertionError, TypeError):
|
||||
logger.exception('Failed to read status from isolated instances, output:\n {}'.format(output))
|
||||
return
|
||||
facts_path = tempfile.mkdtemp()
|
||||
env['ANSIBLE_CACHE_PLUGIN'] = 'jsonfile'
|
||||
env['ANSIBLE_CACHE_PLUGIN_CONNECTION'] = facts_path
|
||||
|
||||
for instance in instance_qs:
|
||||
try:
|
||||
task_result = result['plays'][0]['tasks'][0]['hosts'][instance.hostname]
|
||||
except (KeyError, IndexError):
|
||||
buff = StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, cls.awx_playbook_path(), env, buff,
|
||||
idle_timeout=timeout, job_timeout=timeout,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
heartbeat_stdout = buff.getvalue().encode('utf-8')
|
||||
buff.close()
|
||||
|
||||
for instance in instance_qs:
|
||||
output = heartbeat_stdout
|
||||
task_result = {}
|
||||
if 'capacity_cpu' in task_result and 'capacity_mem' in task_result:
|
||||
cls.update_capacity(instance, task_result, awx_application_version)
|
||||
logger.debug('Isolated instance {} successful heartbeat'.format(instance.hostname))
|
||||
elif instance.capacity == 0:
|
||||
logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(
|
||||
instance.hostname))
|
||||
else:
|
||||
logger.warning('Could not update status of isolated instance {}, msg={}'.format(
|
||||
instance.hostname, task_result.get('msg', 'unknown failure')
|
||||
))
|
||||
if instance.is_lost(isolated=True):
|
||||
instance.capacity = 0
|
||||
instance.save(update_fields=['capacity'])
|
||||
logger.error('Isolated instance {} last checked in at {}, marked as lost.'.format(
|
||||
instance.hostname, instance.modified))
|
||||
|
||||
@staticmethod
|
||||
def get_stdout_handle(instance, private_data_dir, event_data_key='job_id'):
|
||||
dispatcher = CallbackQueueDispatcher()
|
||||
|
||||
def job_event_callback(event_data):
|
||||
event_data.setdefault(event_data_key, instance.id)
|
||||
if 'uuid' in event_data:
|
||||
filename = '{}-partial.json'.format(event_data['uuid'])
|
||||
partial_filename = os.path.join(private_data_dir, 'artifacts', 'job_events', filename)
|
||||
try:
|
||||
with codecs.open(partial_filename, 'r', encoding='utf-8') as f:
|
||||
partial_event_data = json.load(f)
|
||||
event_data.update(partial_event_data)
|
||||
except IOError:
|
||||
if event_data.get('event', '') != 'verbose':
|
||||
logger.error('Missing callback data for event type `{}`, uuid {}, job {}.\nevent_data: {}'.format(
|
||||
event_data.get('event', ''), event_data['uuid'], instance.id, event_data))
|
||||
dispatcher.dispatch(event_data)
|
||||
with open(os.path.join(facts_path, instance.hostname), 'r') as facts_data:
|
||||
output = facts_data.read()
|
||||
task_result = json.loads(output)
|
||||
except Exception:
|
||||
logger.exception('Failed to read status from isolated instances, output:\n {}'.format(output))
|
||||
if 'awx_capacity_cpu' in task_result and 'awx_capacity_mem' in task_result:
|
||||
task_result = {
|
||||
'capacity_cpu': task_result['awx_capacity_cpu'],
|
||||
'capacity_mem': task_result['awx_capacity_mem'],
|
||||
'version': task_result['awx_capacity_version']
|
||||
}
|
||||
cls.update_capacity(instance, task_result, awx_application_version)
|
||||
logger.debug('Isolated instance {} successful heartbeat'.format(instance.hostname))
|
||||
elif instance.capacity == 0:
|
||||
logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(
|
||||
instance.hostname))
|
||||
else:
|
||||
logger.warning('Could not update status of isolated instance {}'.format(instance.hostname))
|
||||
if instance.is_lost(isolated=True):
|
||||
instance.capacity = 0
|
||||
instance.save(update_fields=['capacity'])
|
||||
logger.error('Isolated instance {} last checked in at {}, marked as lost.'.format(
|
||||
instance.hostname, instance.modified))
|
||||
finally:
|
||||
if os.path.exists(facts_path):
|
||||
shutil.rmtree(facts_path)
|
||||
|
||||
return OutputEventFilter(job_event_callback)
|
||||
|
||||
def run(self, instance, private_data_dir, proot_temp_dir):
|
||||
def run(self, instance, private_data_dir, playbook, module, module_args,
|
||||
event_data_key, ident=None):
|
||||
"""
|
||||
Run a job on an isolated host.
|
||||
|
||||
@@ -477,18 +381,21 @@ class IsolatedManager(object):
|
||||
:param private_data_dir: an absolute path on the local file system
|
||||
where job-specific data should be written
|
||||
(i.e., `/tmp/ansible_awx_xyz/`)
|
||||
:param proot_temp_dir: a temporary directory which bwrap maps
|
||||
restricted paths to
|
||||
:param playbook: the playbook to run
|
||||
:param module: the module to run
|
||||
:param module_args: the module args to use
|
||||
:param event_data_key: e.g., job_id, inventory_id, ...
|
||||
|
||||
For a completed job run, this function returns (status, rc),
|
||||
representing the status and return code of the isolated
|
||||
`ansible-playbook` run.
|
||||
"""
|
||||
self.ident = ident
|
||||
self.event_data_key = event_data_key
|
||||
self.instance = instance
|
||||
self.host = instance.execution_node
|
||||
self.private_data_dir = private_data_dir
|
||||
self.proot_temp_dir = proot_temp_dir
|
||||
status, rc = self.dispatch()
|
||||
status, rc = self.dispatch(playbook, module, module_args)
|
||||
if status == 'successful':
|
||||
status, rc = self.check()
|
||||
self.cleanup()
|
||||
|
||||
@@ -4,7 +4,6 @@ import argparse
|
||||
import base64
|
||||
import codecs
|
||||
import collections
|
||||
import StringIO
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
@@ -13,12 +12,12 @@ import pipes
|
||||
import re
|
||||
import signal
|
||||
import sys
|
||||
import thread
|
||||
import threading
|
||||
import time
|
||||
from io import StringIO
|
||||
|
||||
import pexpect
|
||||
import psutil
|
||||
import six
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.utils.expect')
|
||||
@@ -49,7 +48,10 @@ def open_fifo_write(path, data):
|
||||
reads data from the pipe.
|
||||
'''
|
||||
os.mkfifo(path, 0o600)
|
||||
thread.start_new_thread(lambda p, d: open(p, 'w').write(d), (path, data))
|
||||
threading.Thread(
|
||||
target=lambda p, d: open(p, 'w').write(d),
|
||||
args=(path, data)
|
||||
).start()
|
||||
|
||||
|
||||
def run_pexpect(args, cwd, env, logfile,
|
||||
@@ -97,14 +99,8 @@ def run_pexpect(args, cwd, env, logfile,
|
||||
# enforce usage of an OrderedDict so that the ordering of elements in
|
||||
# `keys()` matches `values()`.
|
||||
expect_passwords = collections.OrderedDict(expect_passwords)
|
||||
password_patterns = expect_passwords.keys()
|
||||
password_values = expect_passwords.values()
|
||||
|
||||
# pexpect needs all env vars to be utf-8 encoded strings
|
||||
# https://github.com/pexpect/pexpect/issues/512
|
||||
for k, v in env.items():
|
||||
if isinstance(v, six.text_type):
|
||||
env[k] = v.encode('utf-8')
|
||||
password_patterns = list(expect_passwords.keys())
|
||||
password_values = list(expect_passwords.values())
|
||||
|
||||
child = pexpect.spawn(
|
||||
args[0], args[1:], cwd=cwd, env=env, ignore_sighup=True,
|
||||
@@ -195,18 +191,7 @@ def run_isolated_job(private_data_dir, secrets, logfile=sys.stdout):
|
||||
job_timeout = secrets.get('job_timeout', 10)
|
||||
pexpect_timeout = secrets.get('pexpect_timeout', 5)
|
||||
|
||||
# Use local callback directory
|
||||
callback_dir = os.getenv('AWX_LIB_DIRECTORY')
|
||||
if callback_dir is None:
|
||||
raise RuntimeError('Location for callbacks must be specified '
|
||||
'by environment variable AWX_LIB_DIRECTORY.')
|
||||
env['ANSIBLE_CALLBACK_PLUGINS'] = os.path.join(callback_dir, 'isolated_callbacks')
|
||||
if 'AD_HOC_COMMAND_ID' in env:
|
||||
env['ANSIBLE_STDOUT_CALLBACK'] = 'minimal'
|
||||
else:
|
||||
env['ANSIBLE_STDOUT_CALLBACK'] = 'awx_display'
|
||||
env['AWX_ISOLATED_DATA_DIR'] = private_data_dir
|
||||
env['PYTHONPATH'] = env.get('PYTHONPATH', '') + callback_dir + ':'
|
||||
|
||||
venv_path = env.get('VIRTUAL_ENV')
|
||||
if venv_path and not os.path.exists(venv_path):
|
||||
@@ -232,7 +217,8 @@ def handle_termination(pid, args, proot_cmd, is_cancel=True):
|
||||
instance's cancel_flag.
|
||||
'''
|
||||
try:
|
||||
if proot_cmd in ' '.join(args):
|
||||
used_proot = proot_cmd.encode('utf-8') in args
|
||||
if used_proot:
|
||||
if not psutil:
|
||||
os.kill(pid, signal.SIGKILL)
|
||||
else:
|
||||
@@ -253,8 +239,8 @@ def handle_termination(pid, args, proot_cmd, is_cancel=True):
|
||||
|
||||
|
||||
def __run__(private_data_dir):
|
||||
buff = StringIO.StringIO()
|
||||
with open(os.path.join(private_data_dir, 'env'), 'r') as f:
|
||||
buff = StringIO()
|
||||
with codecs.open(os.path.join(private_data_dir, 'env'), 'r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
buff.write(line)
|
||||
|
||||
|
||||
@@ -4,10 +4,8 @@
|
||||
# Python
|
||||
import copy
|
||||
import json
|
||||
import operator
|
||||
import re
|
||||
import six
|
||||
import urllib
|
||||
import urllib.parse
|
||||
|
||||
from jinja2 import Environment, StrictUndefined
|
||||
from jinja2.exceptions import UndefinedError, TemplateSyntaxError
|
||||
@@ -46,7 +44,7 @@ from awx.main.utils.filters import SmartFilter
|
||||
from awx.main.utils.encryption import encrypt_value, decrypt_value, get_encryption_key
|
||||
from awx.main.validators import validate_ssh_private_key
|
||||
from awx.main.models.rbac import batch_role_ancestor_rebuilding, Role
|
||||
from awx.main.constants import CHOICES_PRIVILEGE_ESCALATION_METHODS, ENV_BLACKLIST
|
||||
from awx.main.constants import ENV_BLACKLIST
|
||||
from awx.main import utils
|
||||
|
||||
|
||||
@@ -80,7 +78,7 @@ class JSONField(upstream_JSONField):
|
||||
|
||||
class JSONBField(upstream_JSONBField):
|
||||
def get_prep_lookup(self, lookup_type, value):
|
||||
if isinstance(value, six.string_types) and value == "null":
|
||||
if isinstance(value, str) and value == "null":
|
||||
return 'null'
|
||||
return super(JSONBField, self).get_prep_lookup(lookup_type, value)
|
||||
|
||||
@@ -95,7 +93,7 @@ class JSONBField(upstream_JSONBField):
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
# Work around a bug in django-jsonfield
|
||||
# https://bitbucket.org/schinckel/django-jsonfield/issues/57/cannot-use-in-the-same-project-as-djangos
|
||||
if isinstance(value, six.string_types):
|
||||
if isinstance(value, str):
|
||||
return json.loads(value)
|
||||
return value
|
||||
|
||||
@@ -251,6 +249,9 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
if type(field_name) == tuple:
|
||||
continue
|
||||
|
||||
if type(field_name) == bytes:
|
||||
field_name = field_name.decode('utf-8')
|
||||
|
||||
if field_name.startswith('singleton:'):
|
||||
continue
|
||||
|
||||
@@ -373,7 +374,7 @@ class SmartFilterField(models.TextField):
|
||||
# https://docs.python.org/2/library/stdtypes.html#truth-value-testing
|
||||
if not value:
|
||||
return None
|
||||
value = urllib.unquote(value)
|
||||
value = urllib.parse.unquote(value)
|
||||
try:
|
||||
SmartFilter().query_from_string(value)
|
||||
except RuntimeError as e:
|
||||
@@ -407,11 +408,8 @@ class JSONSchemaField(JSONBField):
|
||||
self.schema(model_instance),
|
||||
format_checker=self.format_checker
|
||||
).iter_errors(value):
|
||||
# strip Python unicode markers from jsonschema validation errors
|
||||
error.message = re.sub(r'\bu(\'|")', r'\1', error.message)
|
||||
|
||||
if error.validator == 'pattern' and 'error' in error.schema:
|
||||
error.message = six.text_type(error.schema['error']).format(instance=error.instance)
|
||||
error.message = error.schema['error'].format(instance=error.instance)
|
||||
elif error.validator == 'type':
|
||||
expected_type = error.validator_value
|
||||
if expected_type == 'object':
|
||||
@@ -450,7 +448,7 @@ class JSONSchemaField(JSONBField):
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
# Work around a bug in django-jsonfield
|
||||
# https://bitbucket.org/schinckel/django-jsonfield/issues/57/cannot-use-in-the-same-project-as-djangos
|
||||
if isinstance(value, six.string_types):
|
||||
if isinstance(value, str):
|
||||
return json.loads(value)
|
||||
return value
|
||||
|
||||
@@ -512,12 +510,9 @@ class CredentialInputField(JSONSchemaField):
|
||||
properties = {}
|
||||
for field in model_instance.credential_type.inputs.get('fields', []):
|
||||
field = field.copy()
|
||||
if field['type'] == 'become_method':
|
||||
field.pop('type')
|
||||
field['choices'] = map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS)
|
||||
properties[field['id']] = field
|
||||
if field.get('choices', []):
|
||||
field['enum'] = field['choices'][:]
|
||||
field['enum'] = list(field['choices'])[:]
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': properties,
|
||||
@@ -547,7 +542,7 @@ class CredentialInputField(JSONSchemaField):
|
||||
v != '$encrypted$',
|
||||
model_instance.pk
|
||||
]):
|
||||
if not isinstance(getattr(model_instance, k), six.string_types):
|
||||
if not isinstance(getattr(model_instance, k), str):
|
||||
raise django_exceptions.ValidationError(
|
||||
_('secret values must be of type string, not {}').format(type(v).__name__),
|
||||
code='invalid',
|
||||
@@ -564,7 +559,7 @@ class CredentialInputField(JSONSchemaField):
|
||||
format_checker=self.format_checker
|
||||
).iter_errors(decrypted_values):
|
||||
if error.validator == 'pattern' and 'error' in error.schema:
|
||||
error.message = six.text_type(error.schema['error']).format(instance=error.instance)
|
||||
error.message = error.schema['error'].format(instance=error.instance)
|
||||
if error.validator == 'dependencies':
|
||||
# replace the default error messaging w/ a better i18n string
|
||||
# I wish there was a better way to determine the parameters of
|
||||
@@ -658,7 +653,7 @@ class CredentialTypeInputField(JSONSchemaField):
|
||||
'items': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'type': {'enum': ['string', 'boolean', 'become_method']},
|
||||
'type': {'enum': ['string', 'boolean']},
|
||||
'format': {'enum': ['ssh_private_key']},
|
||||
'choices': {
|
||||
'type': 'array',
|
||||
@@ -676,6 +671,7 @@ class CredentialTypeInputField(JSONSchemaField):
|
||||
'multiline': {'type': 'boolean'},
|
||||
'secret': {'type': 'boolean'},
|
||||
'ask_at_runtime': {'type': 'boolean'},
|
||||
'default': {},
|
||||
},
|
||||
'additionalProperties': False,
|
||||
'required': ['id', 'label'],
|
||||
@@ -719,16 +715,13 @@ class CredentialTypeInputField(JSONSchemaField):
|
||||
# If no type is specified, default to string
|
||||
field['type'] = 'string'
|
||||
|
||||
if field['type'] == 'become_method':
|
||||
if not model_instance.managed_by_tower:
|
||||
if 'default' in field:
|
||||
default = field['default']
|
||||
_type = {'string': str, 'boolean': bool}[field['type']]
|
||||
if type(default) != _type:
|
||||
raise django_exceptions.ValidationError(
|
||||
_('become_method is a reserved type name'),
|
||||
code='invalid',
|
||||
params={'value': value},
|
||||
_('{} is not a {}').format(default, field['type'])
|
||||
)
|
||||
else:
|
||||
field.pop('type')
|
||||
field['choices'] = CHOICES_PRIVILEGE_ESCALATION_METHODS
|
||||
|
||||
for key in ('choices', 'multiline', 'format', 'secret',):
|
||||
if key in field and field['type'] != 'string':
|
||||
@@ -824,14 +817,14 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
||||
)
|
||||
|
||||
class ExplodingNamespace:
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
raise UndefinedError(_('Must define unnamed file injector in order to reference `tower.filename`.'))
|
||||
|
||||
class TowerNamespace:
|
||||
def __init__(self):
|
||||
self.filename = ExplodingNamespace()
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
raise UndefinedError(_('Cannot directly reference reserved `tower` namespace container.'))
|
||||
|
||||
valid_namespace['tower'] = TowerNamespace()
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand
|
||||
@@ -43,7 +42,7 @@ class Command(BaseCommand):
|
||||
n_deleted_items = 0
|
||||
pks_to_delete = set()
|
||||
for asobj in ActivityStream.objects.iterator():
|
||||
asobj_disp = '"%s" id: %s' % (six.text_type(asobj), asobj.id)
|
||||
asobj_disp = '"%s" id: %s' % (str(asobj), asobj.id)
|
||||
if asobj.timestamp >= self.cutoff:
|
||||
if self.dry_run:
|
||||
self.logger.info("would skip %s" % asobj_disp)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
|
||||
# Python
|
||||
import re
|
||||
import sys
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
# Django
|
||||
@@ -129,6 +130,7 @@ class Command(BaseCommand):
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
sys.stderr.write("This command has been deprecated and will be removed in a future release.\n")
|
||||
if not feature_enabled('system_tracking'):
|
||||
raise CommandError("The System Tracking feature is not enabled for your instance")
|
||||
cleanup_facts = CleanupFacts()
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
@@ -68,7 +67,7 @@ class Command(BaseCommand):
|
||||
jobs = Job.objects.filter(created__lt=self.cutoff)
|
||||
for job in jobs.iterator():
|
||||
job_display = '"%s" (%d host summaries, %d events)' % \
|
||||
(six.text_type(job),
|
||||
(str(job),
|
||||
job.job_host_summaries.count(), job.job_events.count())
|
||||
if job.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@@ -89,7 +88,7 @@ class Command(BaseCommand):
|
||||
ad_hoc_commands = AdHocCommand.objects.filter(created__lt=self.cutoff)
|
||||
for ad_hoc_command in ad_hoc_commands.iterator():
|
||||
ad_hoc_command_display = '"%s" (%d events)' % \
|
||||
(six.text_type(ad_hoc_command),
|
||||
(str(ad_hoc_command),
|
||||
ad_hoc_command.ad_hoc_command_events.count())
|
||||
if ad_hoc_command.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@@ -109,7 +108,7 @@ class Command(BaseCommand):
|
||||
skipped, deleted = 0, 0
|
||||
project_updates = ProjectUpdate.objects.filter(created__lt=self.cutoff)
|
||||
for pu in project_updates.iterator():
|
||||
pu_display = '"%s" (type %s)' % (six.text_type(pu), six.text_type(pu.launch_type))
|
||||
pu_display = '"%s" (type %s)' % (str(pu), str(pu.launch_type))
|
||||
if pu.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s project update %s', action_text, pu.status, pu_display)
|
||||
@@ -132,7 +131,7 @@ class Command(BaseCommand):
|
||||
skipped, deleted = 0, 0
|
||||
inventory_updates = InventoryUpdate.objects.filter(created__lt=self.cutoff)
|
||||
for iu in inventory_updates.iterator():
|
||||
iu_display = '"%s" (source %s)' % (six.text_type(iu), six.text_type(iu.source))
|
||||
iu_display = '"%s" (source %s)' % (str(iu), str(iu.source))
|
||||
if iu.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s inventory update %s', action_text, iu.status, iu_display)
|
||||
@@ -155,7 +154,7 @@ class Command(BaseCommand):
|
||||
skipped, deleted = 0, 0
|
||||
system_jobs = SystemJob.objects.filter(created__lt=self.cutoff)
|
||||
for sj in system_jobs.iterator():
|
||||
sj_display = '"%s" (type %s)' % (six.text_type(sj), six.text_type(sj.job_type))
|
||||
sj_display = '"%s" (type %s)' % (str(sj), str(sj.job_type))
|
||||
if sj.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s system_job %s', action_text, sj.status, sj_display)
|
||||
@@ -185,7 +184,7 @@ class Command(BaseCommand):
|
||||
workflow_jobs = WorkflowJob.objects.filter(created__lt=self.cutoff)
|
||||
for workflow_job in workflow_jobs.iterator():
|
||||
workflow_job_display = '"{}" ({} nodes)'.format(
|
||||
six.text_type(workflow_job),
|
||||
str(workflow_job),
|
||||
workflow_job.workflow_nodes.count())
|
||||
if workflow_job.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@@ -206,7 +205,7 @@ class Command(BaseCommand):
|
||||
notifications = Notification.objects.filter(created__lt=self.cutoff)
|
||||
for notification in notifications.iterator():
|
||||
notification_display = '"{}" (started {}, {} type, {} sent)'.format(
|
||||
six.text_type(notification), six.text_type(notification.created),
|
||||
str(notification), str(notification.created),
|
||||
notification.notification_type, notification.notifications_sent)
|
||||
if notification.status in ('pending',):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
# All Rights Reserved
|
||||
|
||||
import subprocess
|
||||
import warnings
|
||||
|
||||
from django.db import transaction
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
@@ -24,17 +23,10 @@ class Command(BaseCommand):
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--hostname', dest='hostname', type=str,
|
||||
help='Hostname used during provisioning')
|
||||
parser.add_argument('--name', dest='name', type=str,
|
||||
help='(PENDING DEPRECIATION) Hostname used during provisioning')
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
# TODO: remove in 3.3
|
||||
if options.get('name'):
|
||||
warnings.warn("`--name` is depreciated in favor of `--hostname`, and will be removed in release 3.3.")
|
||||
if options.get('hostname'):
|
||||
raise CommandError("Cannot accept both --name and --hostname.")
|
||||
options['hostname'] = options['name']
|
||||
hostname = options.get('hostname')
|
||||
if not hostname:
|
||||
raise CommandError("--hostname is a required argument")
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
# Copyright (c) 2017 Ansible by Red Hat
|
||||
# All Rights Reserved
|
||||
|
||||
# Borrow from another AWX command
|
||||
from awx.main.management.commands.deprovision_instance import Command as OtherCommand
|
||||
|
||||
# Python
|
||||
import warnings
|
||||
|
||||
|
||||
class Command(OtherCommand):
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# TODO: delete this entire file in 3.3
|
||||
warnings.warn('This command is replaced with `deprovision_instance` and will '
|
||||
'be removed in release 3.3.')
|
||||
return super(Command, self).handle(*args, **options)
|
||||
@@ -1,6 +1,7 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
import datetime
|
||||
from django.utils.encoding import smart_str
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
@@ -35,10 +36,10 @@ class Command(BaseCommand):
|
||||
).save()
|
||||
pemfile = Setting.objects.create(
|
||||
key='AWX_ISOLATED_PUBLIC_KEY',
|
||||
value=key.public_key().public_bytes(
|
||||
value=smart_str(key.public_key().public_bytes(
|
||||
encoding=serialization.Encoding.OpenSSH,
|
||||
format=serialization.PublicFormat.OpenSSH
|
||||
) + " generated-by-awx@%s" % datetime.datetime.utcnow().isoformat()
|
||||
)) + " generated-by-awx@%s" % datetime.datetime.utcnow().isoformat()
|
||||
)
|
||||
pemfile.save()
|
||||
print(pemfile.value)
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# Python
|
||||
import json
|
||||
import logging
|
||||
import fnmatch
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
@@ -11,16 +12,25 @@ import sys
|
||||
import time
|
||||
import traceback
|
||||
import shutil
|
||||
from distutils.version import LooseVersion as Version
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import connection, transaction
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
# AWX inventory imports
|
||||
from awx.main.models.inventory import (
|
||||
Inventory,
|
||||
InventorySource,
|
||||
InventoryUpdate,
|
||||
Host
|
||||
)
|
||||
from awx.main.utils.mem_inventory import MemInventory, dict_to_mem_data
|
||||
|
||||
# other AWX imports
|
||||
from awx.main.models.rbac import batch_role_ancestor_rebuilding
|
||||
from awx.main.utils import (
|
||||
ignore_inventory_computed_fields,
|
||||
check_proot_installed,
|
||||
@@ -28,7 +38,7 @@ from awx.main.utils import (
|
||||
build_proot_temp_dir,
|
||||
get_licenser
|
||||
)
|
||||
from awx.main.utils.mem_inventory import MemInventory, dict_to_mem_data
|
||||
from awx.main.utils.common import _get_ansible_version
|
||||
from awx.main.signals import disable_activity_stream
|
||||
from awx.main.constants import STANDARD_INVENTORY_UPDATE_ENV
|
||||
|
||||
@@ -63,60 +73,71 @@ class AnsibleInventoryLoader(object):
|
||||
use the ansible-inventory CLI utility to convert it into in-memory
|
||||
representational objects. Example:
|
||||
/usr/bin/ansible/ansible-inventory -i hosts --list
|
||||
If it fails to find this, it uses the backported script instead
|
||||
'''
|
||||
|
||||
def __init__(self, source, group_filter_re=None, host_filter_re=None, is_custom=False):
|
||||
def __init__(self, source, is_custom=False, venv_path=None):
|
||||
self.source = source
|
||||
self.source_dir = functioning_dir(self.source)
|
||||
self.is_custom = is_custom
|
||||
self.tmp_private_dir = None
|
||||
self.method = 'ansible-inventory'
|
||||
self.group_filter_re = group_filter_re
|
||||
self.host_filter_re = host_filter_re
|
||||
|
||||
self.is_vendored_source = False
|
||||
if self.source_dir == os.path.join(settings.BASE_DIR, 'plugins', 'inventory'):
|
||||
self.is_vendored_source = True
|
||||
if venv_path:
|
||||
self.venv_path = venv_path
|
||||
else:
|
||||
self.venv_path = settings.ANSIBLE_VENV_PATH
|
||||
|
||||
def build_env(self):
|
||||
env = dict(os.environ.items())
|
||||
env['VIRTUAL_ENV'] = settings.ANSIBLE_VENV_PATH
|
||||
env['PATH'] = os.path.join(settings.ANSIBLE_VENV_PATH, "bin") + ":" + env['PATH']
|
||||
env['VIRTUAL_ENV'] = self.venv_path
|
||||
env['PATH'] = os.path.join(self.venv_path, "bin") + ":" + env['PATH']
|
||||
# Set configuration items that should always be used for updates
|
||||
for key, value in STANDARD_INVENTORY_UPDATE_ENV.items():
|
||||
if key not in env:
|
||||
env[key] = value
|
||||
venv_libdir = os.path.join(settings.ANSIBLE_VENV_PATH, "lib")
|
||||
venv_libdir = os.path.join(self.venv_path, "lib")
|
||||
env.pop('PYTHONPATH', None) # default to none if no python_ver matches
|
||||
if os.path.isdir(os.path.join(venv_libdir, "python2.7")):
|
||||
env['PYTHONPATH'] = os.path.join(venv_libdir, "python2.7", "site-packages") + ":"
|
||||
for version in os.listdir(venv_libdir):
|
||||
if fnmatch.fnmatch(version, 'python[23].*'):
|
||||
if os.path.isdir(os.path.join(venv_libdir, version)):
|
||||
env['PYTHONPATH'] = os.path.join(venv_libdir, version, "site-packages") + ":"
|
||||
break
|
||||
# For internal inventory updates, these are not reported in the job_env API
|
||||
logger.info('Using VIRTUAL_ENV: {}'.format(env['VIRTUAL_ENV']))
|
||||
logger.info('Using PATH: {}'.format(env['PATH']))
|
||||
logger.info('Using PYTHONPATH: {}'.format(env.get('PYTHONPATH', None)))
|
||||
return env
|
||||
|
||||
def get_path_to_ansible_inventory(self):
|
||||
venv_exe = os.path.join(self.venv_path, 'bin', 'ansible-inventory')
|
||||
if os.path.exists(venv_exe):
|
||||
return venv_exe
|
||||
elif os.path.exists(
|
||||
os.path.join(self.venv_path, 'bin', 'ansible')
|
||||
):
|
||||
# if bin/ansible exists but bin/ansible-inventory doesn't, it's
|
||||
# probably a really old version of ansible that doesn't support
|
||||
# ansible-inventory
|
||||
raise RuntimeError(
|
||||
"{} does not exist (please upgrade to ansible >= 2.4)".format(
|
||||
venv_exe
|
||||
)
|
||||
)
|
||||
return shutil.which('ansible-inventory')
|
||||
|
||||
def get_base_args(self):
|
||||
# get ansible-inventory absolute path for running in bubblewrap/proot, in Popen
|
||||
for path in os.environ["PATH"].split(os.pathsep):
|
||||
potential_path = os.path.join(path.strip('"'), 'ansible-inventory')
|
||||
if os.path.isfile(potential_path) and os.access(potential_path, os.X_OK):
|
||||
logger.debug('Using system install of ansible-inventory CLI: {}'.format(potential_path))
|
||||
return [potential_path, '-i', self.source]
|
||||
|
||||
# Stopgap solution for group_vars, do not use backported module for official
|
||||
# vendored cloud modules or custom scripts TODO: remove after Ansible 2.3 deprecation
|
||||
if self.is_vendored_source or self.is_custom:
|
||||
self.method = 'inventory script invocation'
|
||||
return [self.source]
|
||||
|
||||
# ansible-inventory was not found, look for backported module TODO: remove after Ansible 2.3 deprecation
|
||||
abs_module_path = os.path.abspath(os.path.join(
|
||||
os.path.dirname(__file__), '..', '..', '..', 'plugins',
|
||||
'ansible_inventory', 'backport.py'))
|
||||
self.method = 'ansible-inventory backport'
|
||||
|
||||
if not os.path.exists(abs_module_path):
|
||||
raise ImproperlyConfigured('Cannot find inventory module')
|
||||
logger.debug('Using backported ansible-inventory module: {}'.format(abs_module_path))
|
||||
return [abs_module_path, '-i', self.source]
|
||||
ansible_inventory_path = self.get_path_to_ansible_inventory()
|
||||
# NOTE: why do we add "python" to the start of these args?
|
||||
# the script that runs ansible-inventory specifies a python interpreter
|
||||
# that makes no sense in light of the fact that we put all the dependencies
|
||||
# inside of /venv/ansible, so we override the specified interpreter
|
||||
# https://github.com/ansible/ansible/issues/50714
|
||||
bargs = ['python', ansible_inventory_path, '-i', self.source]
|
||||
ansible_version = _get_ansible_version(ansible_inventory_path[:-len('-inventory')])
|
||||
if ansible_version != 'unknown' and Version(ansible_version) >= Version('2.5'):
|
||||
bargs.extend(['--playbook-dir', self.source_dir])
|
||||
logger.debug('Using base command: {}'.format(' '.join(bargs)))
|
||||
return bargs
|
||||
|
||||
def get_proot_args(self, cmd, env):
|
||||
cwd = os.getcwd()
|
||||
@@ -142,6 +163,9 @@ class AnsibleInventoryLoader(object):
|
||||
kwargs['proot_show_paths'] = [functioning_dir(self.source)]
|
||||
logger.debug("Running from `{}` working directory.".format(cwd))
|
||||
|
||||
if self.venv_path != settings.ANSIBLE_VENV_PATH:
|
||||
kwargs['proot_custom_virtualenv'] = self.venv_path
|
||||
|
||||
return wrap_args_with_proot(cmd, cwd, **kwargs)
|
||||
|
||||
def command_to_json(self, cmd):
|
||||
@@ -155,6 +179,8 @@ class AnsibleInventoryLoader(object):
|
||||
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
|
||||
stdout, stderr = proc.communicate()
|
||||
stdout = smart_text(stdout)
|
||||
stderr = smart_text(stderr)
|
||||
|
||||
if self.tmp_private_dir:
|
||||
shutil.rmtree(self.tmp_private_dir, True)
|
||||
@@ -177,80 +203,7 @@ class AnsibleInventoryLoader(object):
|
||||
base_args = self.get_base_args()
|
||||
logger.info('Reading Ansible inventory source: %s', self.source)
|
||||
|
||||
data = self.command_to_json(base_args + ['--list'])
|
||||
|
||||
# TODO: remove after we run custom scripts through ansible-inventory
|
||||
if self.is_custom and '_meta' not in data or 'hostvars' not in data['_meta']:
|
||||
# Invoke the executable once for each host name we've built up
|
||||
# to set their variables
|
||||
data.setdefault('_meta', {})
|
||||
data['_meta'].setdefault('hostvars', {})
|
||||
logger.warning('Re-calling script for hostvars individually.')
|
||||
for group_name, group_data in data.iteritems():
|
||||
if group_name == '_meta':
|
||||
continue
|
||||
|
||||
if isinstance(group_data, dict):
|
||||
group_host_list = group_data.get('hosts', [])
|
||||
elif isinstance(group_data, list):
|
||||
group_host_list = group_data
|
||||
else:
|
||||
logger.warning('Group data for "%s" is not a dict or list',
|
||||
group_name)
|
||||
group_host_list = []
|
||||
|
||||
for hostname in group_host_list:
|
||||
logger.debug('Obtaining hostvars for %s' % hostname.encode('utf-8'))
|
||||
hostdata = self.command_to_json(
|
||||
base_args + ['--host', hostname.encode("utf-8")]
|
||||
)
|
||||
if isinstance(hostdata, dict):
|
||||
data['_meta']['hostvars'][hostname] = hostdata
|
||||
else:
|
||||
logger.warning(
|
||||
'Expected dict of vars for host "%s" when '
|
||||
'calling with `--host`, got %s instead',
|
||||
k, str(type(data))
|
||||
)
|
||||
|
||||
logger.info('Processing JSON output...')
|
||||
inventory = MemInventory(
|
||||
group_filter_re=self.group_filter_re, host_filter_re=self.host_filter_re)
|
||||
inventory = dict_to_mem_data(data, inventory=inventory)
|
||||
|
||||
return inventory
|
||||
|
||||
|
||||
def load_inventory_source(source, group_filter_re=None,
|
||||
host_filter_re=None, exclude_empty_groups=False,
|
||||
is_custom=False):
|
||||
'''
|
||||
Load inventory from given source directory or file.
|
||||
'''
|
||||
# Sanity check: We sanitize these module names for our API but Ansible proper doesn't follow
|
||||
# good naming conventions
|
||||
source = source.replace('rhv.py', 'ovirt4.py')
|
||||
source = source.replace('satellite6.py', 'foreman.py')
|
||||
source = source.replace('vmware.py', 'vmware_inventory.py')
|
||||
if not os.path.exists(source):
|
||||
raise IOError('Source does not exist: %s' % source)
|
||||
source = os.path.join(os.getcwd(), os.path.dirname(source),
|
||||
os.path.basename(source))
|
||||
source = os.path.normpath(os.path.abspath(source))
|
||||
|
||||
inventory = AnsibleInventoryLoader(
|
||||
source=source,
|
||||
group_filter_re=group_filter_re,
|
||||
host_filter_re=host_filter_re,
|
||||
is_custom=is_custom).load()
|
||||
|
||||
logger.debug('Finished loading from source: %s', source)
|
||||
# Exclude groups that are completely empty.
|
||||
if exclude_empty_groups:
|
||||
inventory.delete_empty_groups()
|
||||
logger.info('Loaded %d groups, %d hosts', len(inventory.all_group.all_groups),
|
||||
len(inventory.all_group.all_hosts))
|
||||
return inventory.all_group
|
||||
return self.command_to_json(base_args + ['--list'])
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -268,6 +221,8 @@ class Command(BaseCommand):
|
||||
parser.add_argument('--inventory-id', dest='inventory_id', type=int,
|
||||
default=None, metavar='i',
|
||||
help='id of inventory to sync')
|
||||
parser.add_argument('--venv', dest='venv', type=str, default=None,
|
||||
help='absolute path to the AWX custom virtualenv to use')
|
||||
parser.add_argument('--overwrite', dest='overwrite', action='store_true', default=False,
|
||||
help='overwrite the destination hosts and groups')
|
||||
parser.add_argument('--overwrite-vars', dest='overwrite_vars',
|
||||
@@ -347,7 +302,7 @@ class Command(BaseCommand):
|
||||
if enabled is not default:
|
||||
enabled_value = getattr(self, 'enabled_value', None)
|
||||
if enabled_value is not None:
|
||||
enabled = bool(unicode(enabled_value) == unicode(enabled))
|
||||
enabled = bool(str(enabled_value) == str(enabled))
|
||||
else:
|
||||
enabled = bool(enabled)
|
||||
if enabled is default:
|
||||
@@ -357,6 +312,14 @@ class Command(BaseCommand):
|
||||
else:
|
||||
raise NotImplementedError('Value of enabled {} not understood.'.format(enabled))
|
||||
|
||||
def get_source_absolute_path(self, source):
|
||||
if not os.path.exists(source):
|
||||
raise IOError('Source does not exist: %s' % source)
|
||||
source = os.path.join(os.getcwd(), os.path.dirname(source),
|
||||
os.path.basename(source))
|
||||
source = os.path.normpath(os.path.abspath(source))
|
||||
return source
|
||||
|
||||
def load_inventory_from_database(self):
|
||||
'''
|
||||
Load inventory and related objects from the database.
|
||||
@@ -369,9 +332,9 @@ class Command(BaseCommand):
|
||||
try:
|
||||
self.inventory = Inventory.objects.get(**q)
|
||||
except Inventory.DoesNotExist:
|
||||
raise CommandError('Inventory with %s = %s cannot be found' % q.items()[0])
|
||||
raise CommandError('Inventory with %s = %s cannot be found' % list(q.items())[0])
|
||||
except Inventory.MultipleObjectsReturned:
|
||||
raise CommandError('Inventory with %s = %s returned multiple results' % q.items()[0])
|
||||
raise CommandError('Inventory with %s = %s returned multiple results' % list(q.items())[0])
|
||||
logger.info('Updating inventory %d: %s' % (self.inventory.pk,
|
||||
self.inventory.name))
|
||||
|
||||
@@ -456,6 +419,16 @@ class Command(BaseCommand):
|
||||
mem_host.instance_id = instance_id
|
||||
self.mem_instance_id_map[instance_id] = mem_host.name
|
||||
|
||||
def _existing_host_pks(self):
|
||||
'''Returns cached set of existing / previous host primary key values
|
||||
this is the starting set, meaning that it is pre-modification
|
||||
by deletions and other things done in the course of this import
|
||||
'''
|
||||
if not hasattr(self, '_cached_host_pk_set'):
|
||||
self._cached_host_pk_set = frozenset(
|
||||
self.inventory_source.hosts.values_list('pk', flat=True))
|
||||
return self._cached_host_pk_set
|
||||
|
||||
def _delete_hosts(self):
|
||||
'''
|
||||
For each host in the database that is NOT in the local list, delete
|
||||
@@ -467,11 +440,11 @@ class Command(BaseCommand):
|
||||
queries_before = len(connection.queries)
|
||||
hosts_qs = self.inventory_source.hosts
|
||||
# Build list of all host pks, remove all that should not be deleted.
|
||||
del_host_pks = set(hosts_qs.values_list('pk', flat=True))
|
||||
del_host_pks = set(self._existing_host_pks()) # makes mutable copy
|
||||
if self.instance_id_var:
|
||||
all_instance_ids = self.mem_instance_id_map.keys()
|
||||
all_instance_ids = list(self.mem_instance_id_map.keys())
|
||||
instance_ids = []
|
||||
for offset in xrange(0, len(all_instance_ids), self._batch_size):
|
||||
for offset in range(0, len(all_instance_ids), self._batch_size):
|
||||
instance_ids = all_instance_ids[offset:(offset + self._batch_size)]
|
||||
for host_pk in hosts_qs.filter(instance_id__in=instance_ids).values_list('pk', flat=True):
|
||||
del_host_pks.discard(host_pk)
|
||||
@@ -479,14 +452,14 @@ class Command(BaseCommand):
|
||||
del_host_pks.discard(host_pk)
|
||||
all_host_names = list(set(self.mem_instance_id_map.values()) - set(self.all_group.all_hosts.keys()))
|
||||
else:
|
||||
all_host_names = self.all_group.all_hosts.keys()
|
||||
for offset in xrange(0, len(all_host_names), self._batch_size):
|
||||
all_host_names = list(self.all_group.all_hosts.keys())
|
||||
for offset in range(0, len(all_host_names), self._batch_size):
|
||||
host_names = all_host_names[offset:(offset + self._batch_size)]
|
||||
for host_pk in hosts_qs.filter(name__in=host_names).values_list('pk', flat=True):
|
||||
del_host_pks.discard(host_pk)
|
||||
# Now delete all remaining hosts in batches.
|
||||
all_del_pks = sorted(list(del_host_pks))
|
||||
for offset in xrange(0, len(all_del_pks), self._batch_size):
|
||||
for offset in range(0, len(all_del_pks), self._batch_size):
|
||||
del_pks = all_del_pks[offset:(offset + self._batch_size)]
|
||||
for host in hosts_qs.filter(pk__in=del_pks):
|
||||
host_name = host.name
|
||||
@@ -509,8 +482,8 @@ class Command(BaseCommand):
|
||||
groups_qs = self.inventory_source.groups.all()
|
||||
# Build list of all group pks, remove those that should not be deleted.
|
||||
del_group_pks = set(groups_qs.values_list('pk', flat=True))
|
||||
all_group_names = self.all_group.all_groups.keys()
|
||||
for offset in xrange(0, len(all_group_names), self._batch_size):
|
||||
all_group_names = list(self.all_group.all_groups.keys())
|
||||
for offset in range(0, len(all_group_names), self._batch_size):
|
||||
group_names = all_group_names[offset:(offset + self._batch_size)]
|
||||
for group_pk in groups_qs.filter(name__in=group_names).values_list('pk', flat=True):
|
||||
del_group_pks.discard(group_pk)
|
||||
@@ -522,7 +495,7 @@ class Command(BaseCommand):
|
||||
del_group_pks.discard(self.inventory_source.deprecated_group_id)
|
||||
# Now delete all remaining groups in batches.
|
||||
all_del_pks = sorted(list(del_group_pks))
|
||||
for offset in xrange(0, len(all_del_pks), self._batch_size):
|
||||
for offset in range(0, len(all_del_pks), self._batch_size):
|
||||
del_pks = all_del_pks[offset:(offset + self._batch_size)]
|
||||
for group in groups_qs.filter(pk__in=del_pks):
|
||||
group_name = group.name
|
||||
@@ -547,6 +520,10 @@ class Command(BaseCommand):
|
||||
group_group_count = 0
|
||||
group_host_count = 0
|
||||
db_groups = self.inventory_source.groups
|
||||
# Set of all group names managed by this inventory source
|
||||
all_source_group_names = frozenset(self.all_group.all_groups.keys())
|
||||
# Set of all host pks managed by this inventory source
|
||||
all_source_host_pks = self._existing_host_pks()
|
||||
for db_group in db_groups.all():
|
||||
if self.inventory_source.deprecated_group_id == db_group.id: # TODO: remove in 3.3
|
||||
logger.debug(
|
||||
@@ -557,11 +534,20 @@ class Command(BaseCommand):
|
||||
# Delete child group relationships not present in imported data.
|
||||
db_children = db_group.children
|
||||
db_children_name_pk_map = dict(db_children.values_list('name', 'pk'))
|
||||
# Exclude child groups from removal list if they were returned by
|
||||
# the import, because this parent-child relationship has not changed
|
||||
mem_children = self.all_group.all_groups[db_group.name].children
|
||||
for mem_group in mem_children:
|
||||
db_children_name_pk_map.pop(mem_group.name, None)
|
||||
# Exclude child groups from removal list if they were not imported
|
||||
# by this specific inventory source, because
|
||||
# those relationships are outside of the dominion of this inventory source
|
||||
other_source_group_names = set(db_children_name_pk_map.keys()) - all_source_group_names
|
||||
for group_name in other_source_group_names:
|
||||
db_children_name_pk_map.pop(group_name, None)
|
||||
# Removal list is complete - now perform the removals
|
||||
del_child_group_pks = list(set(db_children_name_pk_map.values()))
|
||||
for offset in xrange(0, len(del_child_group_pks), self._batch_size):
|
||||
for offset in range(0, len(del_child_group_pks), self._batch_size):
|
||||
child_group_pks = del_child_group_pks[offset:(offset + self._batch_size)]
|
||||
for db_child in db_children.filter(pk__in=child_group_pks):
|
||||
group_group_count += 1
|
||||
@@ -572,22 +558,29 @@ class Command(BaseCommand):
|
||||
# Delete group/host relationships not present in imported data.
|
||||
db_hosts = db_group.hosts
|
||||
del_host_pks = set(db_hosts.values_list('pk', flat=True))
|
||||
# Exclude child hosts from removal list if they were not imported
|
||||
# by this specific inventory source, because
|
||||
# those relationships are outside of the dominion of this inventory source
|
||||
del_host_pks = del_host_pks & all_source_host_pks
|
||||
# Exclude child hosts from removal list if they were returned by
|
||||
# the import, because this group-host relationship has not changed
|
||||
mem_hosts = self.all_group.all_groups[db_group.name].hosts
|
||||
all_mem_host_names = [h.name for h in mem_hosts if not h.instance_id]
|
||||
for offset in xrange(0, len(all_mem_host_names), self._batch_size):
|
||||
for offset in range(0, len(all_mem_host_names), self._batch_size):
|
||||
mem_host_names = all_mem_host_names[offset:(offset + self._batch_size)]
|
||||
for db_host_pk in db_hosts.filter(name__in=mem_host_names).values_list('pk', flat=True):
|
||||
del_host_pks.discard(db_host_pk)
|
||||
all_mem_instance_ids = [h.instance_id for h in mem_hosts if h.instance_id]
|
||||
for offset in xrange(0, len(all_mem_instance_ids), self._batch_size):
|
||||
for offset in range(0, len(all_mem_instance_ids), self._batch_size):
|
||||
mem_instance_ids = all_mem_instance_ids[offset:(offset + self._batch_size)]
|
||||
for db_host_pk in db_hosts.filter(instance_id__in=mem_instance_ids).values_list('pk', flat=True):
|
||||
del_host_pks.discard(db_host_pk)
|
||||
all_db_host_pks = [v for k,v in self.db_instance_id_map.items() if k in all_mem_instance_ids]
|
||||
for db_host_pk in all_db_host_pks:
|
||||
del_host_pks.discard(db_host_pk)
|
||||
# Removal list is complete - now perform the removals
|
||||
del_host_pks = list(del_host_pks)
|
||||
for offset in xrange(0, len(del_host_pks), self._batch_size):
|
||||
for offset in range(0, len(del_host_pks), self._batch_size):
|
||||
del_pks = del_host_pks[offset:(offset + self._batch_size)]
|
||||
for db_host in db_hosts.filter(pk__in=del_pks):
|
||||
group_host_count += 1
|
||||
@@ -635,7 +628,7 @@ class Command(BaseCommand):
|
||||
if len(v.parents) == 1 and v.parents[0].name == 'all':
|
||||
root_group_names.add(k)
|
||||
existing_group_names = set()
|
||||
for offset in xrange(0, len(all_group_names), self._batch_size):
|
||||
for offset in range(0, len(all_group_names), self._batch_size):
|
||||
group_names = all_group_names[offset:(offset + self._batch_size)]
|
||||
for group in self.inventory.groups.filter(name__in=group_names):
|
||||
mem_group = self.all_group.all_groups[group.name]
|
||||
@@ -739,7 +732,7 @@ class Command(BaseCommand):
|
||||
mem_host_instance_id_map = {}
|
||||
mem_host_name_map = {}
|
||||
mem_host_names_to_update = set(self.all_group.all_hosts.keys())
|
||||
for k,v in self.all_group.all_hosts.iteritems():
|
||||
for k,v in self.all_group.all_hosts.items():
|
||||
mem_host_name_map[k] = v
|
||||
instance_id = self._get_instance_id(v.variables)
|
||||
if instance_id in self.db_instance_id_map:
|
||||
@@ -749,7 +742,7 @@ class Command(BaseCommand):
|
||||
|
||||
# Update all existing hosts where we know the PK based on instance_id.
|
||||
all_host_pks = sorted(mem_host_pk_map.keys())
|
||||
for offset in xrange(0, len(all_host_pks), self._batch_size):
|
||||
for offset in range(0, len(all_host_pks), self._batch_size):
|
||||
host_pks = all_host_pks[offset:(offset + self._batch_size)]
|
||||
for db_host in self.inventory.hosts.filter( pk__in=host_pks):
|
||||
if db_host.pk in host_pks_updated:
|
||||
@@ -761,7 +754,7 @@ class Command(BaseCommand):
|
||||
|
||||
# Update all existing hosts where we know the instance_id.
|
||||
all_instance_ids = sorted(mem_host_instance_id_map.keys())
|
||||
for offset in xrange(0, len(all_instance_ids), self._batch_size):
|
||||
for offset in range(0, len(all_instance_ids), self._batch_size):
|
||||
instance_ids = all_instance_ids[offset:(offset + self._batch_size)]
|
||||
for db_host in self.inventory.hosts.filter( instance_id__in=instance_ids):
|
||||
if db_host.pk in host_pks_updated:
|
||||
@@ -773,7 +766,7 @@ class Command(BaseCommand):
|
||||
|
||||
# Update all existing hosts by name.
|
||||
all_host_names = sorted(mem_host_name_map.keys())
|
||||
for offset in xrange(0, len(all_host_names), self._batch_size):
|
||||
for offset in range(0, len(all_host_names), self._batch_size):
|
||||
host_names = all_host_names[offset:(offset + self._batch_size)]
|
||||
for db_host in self.inventory.hosts.filter( name__in=host_names):
|
||||
if db_host.pk in host_pks_updated:
|
||||
@@ -815,15 +808,15 @@ class Command(BaseCommand):
|
||||
'''
|
||||
if settings.SQL_DEBUG:
|
||||
queries_before = len(connection.queries)
|
||||
all_group_names = sorted([k for k,v in self.all_group.all_groups.iteritems() if v.children])
|
||||
all_group_names = sorted([k for k,v in self.all_group.all_groups.items() if v.children])
|
||||
group_group_count = 0
|
||||
for offset in xrange(0, len(all_group_names), self._batch_size):
|
||||
for offset in range(0, len(all_group_names), self._batch_size):
|
||||
group_names = all_group_names[offset:(offset + self._batch_size)]
|
||||
for db_group in self.inventory.groups.filter(name__in=group_names):
|
||||
mem_group = self.all_group.all_groups[db_group.name]
|
||||
group_group_count += len(mem_group.children)
|
||||
all_child_names = sorted([g.name for g in mem_group.children])
|
||||
for offset2 in xrange(0, len(all_child_names), self._batch_size):
|
||||
for offset2 in range(0, len(all_child_names), self._batch_size):
|
||||
child_names = all_child_names[offset2:(offset2 + self._batch_size)]
|
||||
db_children_qs = self.inventory.groups.filter(name__in=child_names)
|
||||
for db_child in db_children_qs.filter(children__id=db_group.id):
|
||||
@@ -842,15 +835,15 @@ class Command(BaseCommand):
|
||||
# belongs.
|
||||
if settings.SQL_DEBUG:
|
||||
queries_before = len(connection.queries)
|
||||
all_group_names = sorted([k for k,v in self.all_group.all_groups.iteritems() if v.hosts])
|
||||
all_group_names = sorted([k for k,v in self.all_group.all_groups.items() if v.hosts])
|
||||
group_host_count = 0
|
||||
for offset in xrange(0, len(all_group_names), self._batch_size):
|
||||
for offset in range(0, len(all_group_names), self._batch_size):
|
||||
group_names = all_group_names[offset:(offset + self._batch_size)]
|
||||
for db_group in self.inventory.groups.filter(name__in=group_names):
|
||||
mem_group = self.all_group.all_groups[db_group.name]
|
||||
group_host_count += len(mem_group.hosts)
|
||||
all_host_names = sorted([h.name for h in mem_group.hosts if not h.instance_id])
|
||||
for offset2 in xrange(0, len(all_host_names), self._batch_size):
|
||||
for offset2 in range(0, len(all_host_names), self._batch_size):
|
||||
host_names = all_host_names[offset2:(offset2 + self._batch_size)]
|
||||
db_hosts_qs = self.inventory.hosts.filter(name__in=host_names)
|
||||
for db_host in db_hosts_qs.filter(groups__id=db_group.id):
|
||||
@@ -859,7 +852,7 @@ class Command(BaseCommand):
|
||||
self._batch_add_m2m(db_group.hosts, db_host)
|
||||
logger.debug('Host "%s" added to group "%s"', db_host.name, db_group.name)
|
||||
all_instance_ids = sorted([h.instance_id for h in mem_group.hosts if h.instance_id])
|
||||
for offset2 in xrange(0, len(all_instance_ids), self._batch_size):
|
||||
for offset2 in range(0, len(all_instance_ids), self._batch_size):
|
||||
instance_ids = all_instance_ids[offset2:(offset2 + self._batch_size)]
|
||||
db_hosts_qs = self.inventory.hosts.filter(instance_id__in=instance_ids)
|
||||
for db_host in db_hosts_qs.filter(groups__id=db_group.id):
|
||||
@@ -892,12 +885,24 @@ class Command(BaseCommand):
|
||||
self._create_update_group_children()
|
||||
self._create_update_group_hosts()
|
||||
|
||||
def remote_tower_license_compare(self, local_license_type):
|
||||
# this requires https://github.com/ansible/ansible/pull/52747
|
||||
source_vars = self.all_group.variables
|
||||
remote_license_type = source_vars.get('tower_metadata', {}).get('license_type', None)
|
||||
if remote_license_type is None:
|
||||
raise CommandError('Unexpected Error: Tower inventory plugin missing needed metadata!')
|
||||
if local_license_type != remote_license_type:
|
||||
raise CommandError('Tower server licenses must match: source: {} local: {}'.format(
|
||||
remote_license_type, local_license_type
|
||||
))
|
||||
|
||||
def check_license(self):
|
||||
license_info = get_licenser().validate()
|
||||
local_license_type = license_info.get('license_type', 'UNLICENSED')
|
||||
if license_info.get('license_key', 'UNLICENSED') == 'UNLICENSED':
|
||||
logger.error(LICENSE_NON_EXISTANT_MESSAGE)
|
||||
raise CommandError('No license found!')
|
||||
elif license_info.get('license_type', 'UNLICENSED') == 'open':
|
||||
elif local_license_type == 'open':
|
||||
return
|
||||
available_instances = license_info.get('available_instances', 0)
|
||||
free_instances = license_info.get('free_instances', 0)
|
||||
@@ -906,6 +911,13 @@ class Command(BaseCommand):
|
||||
if time_remaining <= 0 and not license_info.get('demo', False):
|
||||
logger.error(LICENSE_EXPIRED_MESSAGE)
|
||||
raise CommandError("License has expired!")
|
||||
# special check for tower-type inventory sources
|
||||
# but only if running the plugin
|
||||
TOWER_SOURCE_FILES = ['tower.yml', 'tower.yaml']
|
||||
if self.inventory_source.source == 'tower' and any(f in self.source for f in TOWER_SOURCE_FILES):
|
||||
# only if this is the 2nd call to license check, we cannot compare before running plugin
|
||||
if hasattr(self, 'all_group'):
|
||||
self.remote_tower_license_compare(local_license_type)
|
||||
if free_instances < 0:
|
||||
d = {
|
||||
'new_count': new_count,
|
||||
@@ -917,15 +929,33 @@ class Command(BaseCommand):
|
||||
logger.error(LICENSE_MESSAGE % d)
|
||||
raise CommandError('License count exceeded!')
|
||||
|
||||
def check_org_host_limit(self):
|
||||
license_info = get_licenser().validate()
|
||||
if license_info.get('license_type', 'UNLICENSED') == 'open':
|
||||
return
|
||||
|
||||
org = self.inventory.organization
|
||||
if org is None or org.max_hosts == 0:
|
||||
return
|
||||
|
||||
active_count = Host.objects.org_active_count(org.id)
|
||||
if active_count > org.max_hosts:
|
||||
raise CommandError('Host limit for organization exceeded!')
|
||||
|
||||
def mark_license_failure(self, save=True):
|
||||
self.inventory_update.license_error = True
|
||||
self.inventory_update.save(update_fields=['license_error'])
|
||||
|
||||
def mark_org_limits_failure(self, save=True):
|
||||
self.inventory_update.org_host_limit_error = True
|
||||
self.inventory_update.save(update_fields=['org_host_limit_error'])
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.set_logging_level()
|
||||
self.inventory_name = options.get('inventory_name', None)
|
||||
self.inventory_id = options.get('inventory_id', None)
|
||||
venv_path = options.get('venv', None)
|
||||
self.overwrite = bool(options.get('overwrite', False))
|
||||
self.overwrite_vars = bool(options.get('overwrite_vars', False))
|
||||
self.keep_vars = bool(options.get('keep_vars', False))
|
||||
@@ -973,6 +1003,13 @@ class Command(BaseCommand):
|
||||
self.mark_license_failure(save=True)
|
||||
raise e
|
||||
|
||||
try:
|
||||
# Check the per-org host limits
|
||||
self.check_org_host_limit()
|
||||
except CommandError as e:
|
||||
self.mark_org_limits_failure(save=True)
|
||||
raise e
|
||||
|
||||
status, tb, exc = 'error', '', None
|
||||
try:
|
||||
if settings.SQL_DEBUG:
|
||||
@@ -986,12 +1023,26 @@ class Command(BaseCommand):
|
||||
self.inventory_update.status = 'running'
|
||||
self.inventory_update.save()
|
||||
|
||||
# Load inventory from source.
|
||||
self.all_group = load_inventory_source(self.source,
|
||||
self.group_filter_re,
|
||||
self.host_filter_re,
|
||||
self.exclude_empty_groups,
|
||||
self.is_custom)
|
||||
source = self.get_source_absolute_path(self.source)
|
||||
|
||||
data = AnsibleInventoryLoader(source=source, is_custom=self.is_custom, venv_path=venv_path).load()
|
||||
|
||||
logger.debug('Finished loading from source: %s', source)
|
||||
logger.info('Processing JSON output...')
|
||||
inventory = MemInventory(
|
||||
group_filter_re=self.group_filter_re, host_filter_re=self.host_filter_re)
|
||||
inventory = dict_to_mem_data(data, inventory=inventory)
|
||||
|
||||
del data # forget dict from import, could be large
|
||||
|
||||
logger.info('Loaded %d groups, %d hosts', len(inventory.all_group.all_groups),
|
||||
len(inventory.all_group.all_hosts))
|
||||
|
||||
if self.exclude_empty_groups:
|
||||
inventory.delete_empty_groups()
|
||||
|
||||
self.all_group = inventory.all_group
|
||||
|
||||
if settings.DEBUG:
|
||||
# depending on inventory source, this output can be
|
||||
# *exceedingly* verbose - crawling a deeply nested
|
||||
@@ -1030,9 +1081,17 @@ class Command(BaseCommand):
|
||||
# If the license is not valid, a CommandError will be thrown,
|
||||
# and inventory update will be marked as invalid.
|
||||
# with transaction.atomic() will roll back the changes.
|
||||
license_fail = True
|
||||
self.check_license()
|
||||
|
||||
# Check the per-org host limits
|
||||
license_fail = False
|
||||
self.check_org_host_limit()
|
||||
except CommandError as e:
|
||||
self.mark_license_failure()
|
||||
if license_fail:
|
||||
self.mark_license_failure()
|
||||
else:
|
||||
self.mark_org_limits_failure()
|
||||
raise e
|
||||
|
||||
if settings.SQL_DEBUG:
|
||||
@@ -1060,9 +1119,8 @@ class Command(BaseCommand):
|
||||
else:
|
||||
tb = traceback.format_exc()
|
||||
exc = e
|
||||
transaction.rollback()
|
||||
|
||||
if self.invoked_from_dispatcher is False:
|
||||
if not self.invoked_from_dispatcher:
|
||||
with ignore_inventory_computed_fields():
|
||||
self.inventory_update = InventoryUpdate.objects.get(pk=self.inventory_update.pk)
|
||||
self.inventory_update.result_traceback = tb
|
||||
@@ -1071,7 +1129,10 @@ class Command(BaseCommand):
|
||||
self.inventory_source.status = status
|
||||
self.inventory_source.save(update_fields=['status'])
|
||||
|
||||
if exc and isinstance(exc, CommandError):
|
||||
sys.exit(1)
|
||||
elif exc:
|
||||
raise
|
||||
if exc:
|
||||
logger.error(str(exc))
|
||||
|
||||
if exc:
|
||||
if isinstance(exc, CommandError):
|
||||
sys.exit(1)
|
||||
raise exc
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
|
||||
from awx.main.models import Instance, InstanceGroup
|
||||
from django.core.management.base import BaseCommand
|
||||
import six
|
||||
|
||||
|
||||
class Ungrouped(object):
|
||||
@@ -42,7 +41,7 @@ class Command(BaseCommand):
|
||||
fmt += ' policy>={0.policy_instance_minimum}'
|
||||
if instance_group.controller:
|
||||
fmt += ' controller={0.controller.name}'
|
||||
print(six.text_type(fmt + ']').format(instance_group))
|
||||
print((fmt + ']').format(instance_group))
|
||||
for x in instance_group.instances.all():
|
||||
color = '\033[92m'
|
||||
if x.capacity == 0 or x.enabled is False:
|
||||
@@ -52,5 +51,5 @@ class Command(BaseCommand):
|
||||
fmt += ' last_isolated_check="{0.last_isolated_check:%Y-%m-%d %H:%M:%S}"'
|
||||
if x.capacity:
|
||||
fmt += ' heartbeat="{0.modified:%Y-%m-%d %H:%M:%S}"'
|
||||
print(six.text_type(fmt + '\033[0m').format(x, x.version or '?'))
|
||||
print((fmt + '\033[0m').format(x, x.version or '?'))
|
||||
print('')
|
||||
|
||||
21
awx/main/management/commands/profile_sql.py
Normal file
21
awx/main/management/commands/profile_sql.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from awx.main.tasks import profile_sql
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""
|
||||
Enable or disable SQL Profiling across all Python processes.
|
||||
SQL profile data will be recorded at /var/log/tower/profile
|
||||
"""
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--threshold', dest='threshold', type=float, default=2.0,
|
||||
help='The minimum query duration in seconds (default=2). Use 0 to disable.')
|
||||
parser.add_argument('--minutes', dest='minutes', type=float, default=5,
|
||||
help='How long to record for in minutes (default=5)')
|
||||
|
||||
def handle(self, **options):
|
||||
profile_sql.delay(
|
||||
threshold=options['threshold'], minutes=options['minutes']
|
||||
)
|
||||
@@ -1,17 +0,0 @@
|
||||
# Copyright (c) 2017 Ansible by Red Hat
|
||||
# All Rights Reserved
|
||||
|
||||
# Borrow from another AWX command
|
||||
from awx.main.management.commands.provision_instance import Command as OtherCommand
|
||||
|
||||
# Python
|
||||
import warnings
|
||||
|
||||
|
||||
class Command(OtherCommand):
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# TODO: delete this entire file in 3.3
|
||||
warnings.warn('This command is replaced with `provision_instance` and will '
|
||||
'be removed in release 3.3.')
|
||||
return super(Command, self).handle(*args, **options)
|
||||
@@ -1,7 +1,6 @@
|
||||
# Copyright (c) 2017 Ansible Tower by Red Hat
|
||||
# All Rights Reserved.
|
||||
import sys
|
||||
import six
|
||||
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
from awx.main.models import Instance, InstanceGroup
|
||||
@@ -19,11 +18,11 @@ class InstanceNotFound(Exception):
|
||||
class Command(BaseCommand):
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--queuename', dest='queuename', type=lambda s: six.text_type(s, 'utf8'),
|
||||
parser.add_argument('--queuename', dest='queuename', type=str,
|
||||
help='Queue to create/update')
|
||||
parser.add_argument('--hostnames', dest='hostnames', type=lambda s: six.text_type(s, 'utf8'),
|
||||
parser.add_argument('--hostnames', dest='hostnames', type=str,
|
||||
help='Comma-Delimited Hosts to add to the Queue (will not remove already assigned instances)')
|
||||
parser.add_argument('--controller', dest='controller', type=lambda s: six.text_type(s, 'utf8'),
|
||||
parser.add_argument('--controller', dest='controller', type=str,
|
||||
default='', help='The controlling group (makes this an isolated group)')
|
||||
parser.add_argument('--instance_percent', dest='instance_percent', type=int, default=0,
|
||||
help='The percentage of active instances that will be assigned to this group'),
|
||||
@@ -73,7 +72,7 @@ class Command(BaseCommand):
|
||||
if instance.exists():
|
||||
instances.append(instance[0])
|
||||
else:
|
||||
raise InstanceNotFound(six.text_type("Instance does not exist: {}").format(inst_name), changed)
|
||||
raise InstanceNotFound("Instance does not exist: {}".format(inst_name), changed)
|
||||
|
||||
ig.instances.add(*instances)
|
||||
|
||||
@@ -99,24 +98,24 @@ class Command(BaseCommand):
|
||||
if options.get('hostnames'):
|
||||
hostname_list = options.get('hostnames').split(",")
|
||||
|
||||
with advisory_lock(six.text_type('instance_group_registration_{}').format(queuename)):
|
||||
with advisory_lock('instance_group_registration_{}'.format(queuename)):
|
||||
changed2 = False
|
||||
changed3 = False
|
||||
(ig, created, changed1) = self.get_create_update_instance_group(queuename, inst_per, inst_min)
|
||||
if created:
|
||||
print(six.text_type("Creating instance group {}".format(ig.name)))
|
||||
print("Creating instance group {}".format(ig.name))
|
||||
elif not created:
|
||||
print(six.text_type("Instance Group already registered {}").format(ig.name))
|
||||
print("Instance Group already registered {}".format(ig.name))
|
||||
|
||||
if ctrl:
|
||||
(ig_ctrl, changed2) = self.update_instance_group_controller(ig, ctrl)
|
||||
if changed2:
|
||||
print(six.text_type("Set controller group {} on {}.").format(ctrl, queuename))
|
||||
print("Set controller group {} on {}.".format(ctrl, queuename))
|
||||
|
||||
try:
|
||||
(instances, changed3) = self.add_instances_to_group(ig, hostname_list)
|
||||
for i in instances:
|
||||
print(six.text_type("Added instance {} to {}").format(i.hostname, ig.name))
|
||||
print("Added instance {} to {}".format(i.hostname, ig.name))
|
||||
except InstanceNotFound as e:
|
||||
instance_not_found_err = e
|
||||
|
||||
@@ -126,4 +125,3 @@ class Command(BaseCommand):
|
||||
if instance_not_found_err:
|
||||
print(instance_not_found_err.message)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
import random
|
||||
|
||||
from django.utils import timezone
|
||||
from django.core.management.base import BaseCommand
|
||||
@@ -26,7 +27,21 @@ from awx.api.serializers import (
|
||||
)
|
||||
|
||||
|
||||
class ReplayJobEvents():
|
||||
class JobStatusLifeCycle():
|
||||
def emit_job_status(self, job, status):
|
||||
# {"status": "successful", "project_id": 13, "unified_job_id": 659, "group_name": "jobs"}
|
||||
job.websocket_emit_status(status)
|
||||
|
||||
def determine_job_event_finish_status_index(self, job_event_count, random_seed):
|
||||
if random_seed == 0:
|
||||
return job_event_count - 1
|
||||
|
||||
random.seed(random_seed)
|
||||
job_event_index = random.randint(0, job_event_count - 1)
|
||||
return job_event_index
|
||||
|
||||
|
||||
class ReplayJobEvents(JobStatusLifeCycle):
|
||||
|
||||
recording_start = None
|
||||
replay_start = None
|
||||
@@ -76,9 +91,10 @@ class ReplayJobEvents():
|
||||
job_events = job.inventory_update_events.order_by('created')
|
||||
elif type(job) is SystemJob:
|
||||
job_events = job.system_job_events.order_by('created')
|
||||
if job_events.count() == 0:
|
||||
count = job_events.count()
|
||||
if count == 0:
|
||||
raise RuntimeError("No events for job id {}".format(job.id))
|
||||
return job_events
|
||||
return job_events, count
|
||||
|
||||
def get_serializer(self, job):
|
||||
if type(job) is Job:
|
||||
@@ -95,7 +111,7 @@ class ReplayJobEvents():
|
||||
raise RuntimeError("Job is of type {} and replay is not yet supported.".format(type(job)))
|
||||
sys.exit(1)
|
||||
|
||||
def run(self, job_id, speed=1.0, verbosity=0, skip_range=[]):
|
||||
def run(self, job_id, speed=1.0, verbosity=0, skip_range=[], random_seed=0, final_status_delay=0, debug=False):
|
||||
stats = {
|
||||
'events_ontime': {
|
||||
'total': 0,
|
||||
@@ -119,17 +135,27 @@ class ReplayJobEvents():
|
||||
}
|
||||
try:
|
||||
job = self.get_job(job_id)
|
||||
job_events = self.get_job_events(job)
|
||||
job_events, job_event_count = self.get_job_events(job)
|
||||
serializer = self.get_serializer(job)
|
||||
except RuntimeError as e:
|
||||
print("{}".format(e.message))
|
||||
sys.exit(1)
|
||||
|
||||
je_previous = None
|
||||
|
||||
self.emit_job_status(job, 'pending')
|
||||
self.emit_job_status(job, 'waiting')
|
||||
self.emit_job_status(job, 'running')
|
||||
|
||||
finish_status_index = self.determine_job_event_finish_status_index(job_event_count, random_seed)
|
||||
|
||||
for n, je_current in enumerate(job_events):
|
||||
if je_current.counter in skip_range:
|
||||
continue
|
||||
|
||||
if debug:
|
||||
input("{} of {}:".format(n, job_event_count))
|
||||
|
||||
if not je_previous:
|
||||
stats['recording_start'] = je_current.created
|
||||
self.start(je_current.created)
|
||||
@@ -146,7 +172,7 @@ class ReplayJobEvents():
|
||||
print("recording: next job in {} seconds".format(recording_diff))
|
||||
if replay_offset >= 0:
|
||||
replay_diff = recording_diff - replay_offset
|
||||
|
||||
|
||||
if replay_diff > 0:
|
||||
stats['events_ontime']['total'] += 1
|
||||
if verbosity >= 3:
|
||||
@@ -167,6 +193,11 @@ class ReplayJobEvents():
|
||||
stats['events_total'] += 1
|
||||
je_previous = je_current
|
||||
|
||||
if n == finish_status_index:
|
||||
if final_status_delay != 0:
|
||||
self.sleep(final_status_delay)
|
||||
self.emit_job_status(job, job.status)
|
||||
|
||||
if stats['events_total'] > 2:
|
||||
stats['replay_end'] = self.now()
|
||||
stats['replay_duration'] = (stats['replay_end'] - stats['replay_start']).total_seconds()
|
||||
@@ -206,16 +237,26 @@ class Command(BaseCommand):
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--job_id', dest='job_id', type=int, metavar='j',
|
||||
help='Id of the job to replay (job or adhoc)')
|
||||
parser.add_argument('--speed', dest='speed', type=int, metavar='s',
|
||||
parser.add_argument('--speed', dest='speed', type=float, metavar='s',
|
||||
help='Speedup factor.')
|
||||
parser.add_argument('--skip-range', dest='skip_range', type=str, metavar='k',
|
||||
default='0:-1:1', help='Range of events to skip')
|
||||
parser.add_argument('--random-seed', dest='random_seed', type=int, metavar='r',
|
||||
default=0, help='Random number generator seed to use when determining job_event index to emit final job status')
|
||||
parser.add_argument('--final-status-delay', dest='final_status_delay', type=float, metavar='f',
|
||||
default=0, help='Delay between event and final status emit')
|
||||
parser.add_argument('--debug', dest='debug', type=bool, metavar='d',
|
||||
default=False, help='Enable step mode to control emission of job events one at a time.')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
job_id = options.get('job_id')
|
||||
speed = options.get('speed') or 1
|
||||
verbosity = options.get('verbosity') or 0
|
||||
random_seed = options.get('random_seed')
|
||||
final_status_delay = options.get('final_status_delay')
|
||||
debug = options.get('debug')
|
||||
skip = self._parse_slice_range(options.get('skip_range'))
|
||||
|
||||
replayer = ReplayJobEvents()
|
||||
replayer.run(job_id, speed, verbosity, skip)
|
||||
replayer.run(job_id, speed=speed, verbosity=verbosity, skip_range=skip, random_seed=random_seed,
|
||||
final_status_delay=final_status_delay, debug=debug)
|
||||
|
||||
37
awx/main/management/commands/revoke_oauth2_tokens.py
Normal file
37
awx/main/management/commands/revoke_oauth2_tokens.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
|
||||
# AWX
|
||||
from awx.main.models.oauth import OAuth2AccessToken
|
||||
from oauth2_provider.models import RefreshToken
|
||||
|
||||
|
||||
def revoke_tokens(token_list):
|
||||
for token in token_list:
|
||||
token.revoke()
|
||||
print('revoked {} {}'.format(token.__class__.__name__, token.token))
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Command that revokes OAuth2 access tokens."""
|
||||
help='Revokes OAuth2 access tokens. Use --all to revoke access and refresh tokens.'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--user', dest='user', type=str, help='revoke OAuth2 tokens for a specific username')
|
||||
parser.add_argument('--all', dest='all', action='store_true', help='revoke OAuth2 access tokens and refresh tokens')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if not options['user']:
|
||||
if options['all']:
|
||||
revoke_tokens(RefreshToken.objects.filter(revoked=None))
|
||||
revoke_tokens(OAuth2AccessToken.objects.all())
|
||||
else:
|
||||
try:
|
||||
user = User.objects.get(username=options['user'])
|
||||
except ObjectDoesNotExist:
|
||||
raise CommandError('A user with that username does not exist.')
|
||||
if options['all']:
|
||||
revoke_tokens(RefreshToken.objects.filter(revoked=None).filter(user=user))
|
||||
revoke_tokens(user.main_oauth2accesstoken.filter(user=user))
|
||||
@@ -19,7 +19,7 @@ logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
|
||||
def construct_bcast_queue_name(common_name):
|
||||
return common_name.encode('utf8') + '_' + settings.CLUSTER_HOST_ID
|
||||
return common_name + '_' + settings.CLUSTER_HOST_ID
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -69,21 +69,42 @@ class Command(BaseCommand):
|
||||
|
||||
return TaskResult()
|
||||
|
||||
sched_file = '/var/lib/awx/beat.db'
|
||||
app = Celery()
|
||||
app.conf.BROKER_URL = settings.BROKER_URL
|
||||
app.conf.CELERY_TASK_RESULT_EXPIRES = False
|
||||
|
||||
# celery in py3 seems to have a bug where the celerybeat schedule
|
||||
# shelve can become corrupted; we've _only_ seen this in Ubuntu and py36
|
||||
# it can be avoided by detecting and removing the corrupted file
|
||||
# at some point, we'll just stop using celerybeat, because it's clearly
|
||||
# buggy, too -_-
|
||||
#
|
||||
# https://github.com/celery/celery/issues/4777
|
||||
sched = AWXScheduler(schedule_filename=sched_file, app=app)
|
||||
try:
|
||||
sched.setup_schedule()
|
||||
except Exception:
|
||||
logger.exception('{} is corrupted, removing.'.format(sched_file))
|
||||
sched._remove_db()
|
||||
finally:
|
||||
try:
|
||||
sched.close()
|
||||
except Exception:
|
||||
logger.exception('{} failed to sync/close'.format(sched_file))
|
||||
|
||||
beat.Beat(
|
||||
30,
|
||||
app,
|
||||
schedule='/var/lib/awx/beat.db', scheduler_cls=AWXScheduler
|
||||
schedule=sched_file, scheduler_cls=AWXScheduler
|
||||
).run()
|
||||
|
||||
def handle(self, *arg, **options):
|
||||
if options.get('status'):
|
||||
print Control('dispatcher').status()
|
||||
print(Control('dispatcher').status())
|
||||
return
|
||||
if options.get('running'):
|
||||
print Control('dispatcher').running()
|
||||
print(Control('dispatcher').running())
|
||||
return
|
||||
if options.get('reload'):
|
||||
return Control('dispatcher').control({'control': 'reload'})
|
||||
|
||||
@@ -28,7 +28,7 @@ class Command(BaseCommand):
|
||||
args = [
|
||||
'ansible', 'all', '-i', '{},'.format(hostname), '-u',
|
||||
settings.AWX_ISOLATED_USERNAME, '-T5', '-m', 'shell',
|
||||
'-a', 'awx-expect -h', '-vvv'
|
||||
'-a', 'ansible-runner --version', '-vvv'
|
||||
]
|
||||
if all([
|
||||
getattr(settings, 'AWX_ISOLATED_KEY_GENERATION', False) is True,
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""A command that reports whether a username exists within the
|
||||
system or not.
|
||||
"""
|
||||
def handle(self, *args, **options):
|
||||
"""Print out information about the user to the console."""
|
||||
|
||||
# Sanity check: There should be one and exactly one positional
|
||||
# argument.
|
||||
if len(args) != 1:
|
||||
raise CommandError('This command requires one positional argument '
|
||||
'(a username).')
|
||||
|
||||
# Get the user.
|
||||
try:
|
||||
username = args[0]
|
||||
user = User.objects.get(username=username)
|
||||
|
||||
# Print a cute header.
|
||||
header = 'Information for user: %s' % username
|
||||
print('%s\n%s' % (header, '=' * len(header)))
|
||||
|
||||
# Print the email and real name of the user.
|
||||
print('Email: %s' % user.email)
|
||||
if user.first_name or user.last_name:
|
||||
print('Name: %s %s' % (user.first_name, user.last_name))
|
||||
else:
|
||||
print('No name provided.')
|
||||
except User.DoesNotExist:
|
||||
raise CommandError('User %s does not exist.' % username)
|
||||
|
||||
@@ -29,6 +29,34 @@ class HostManager(models.Manager):
|
||||
"""
|
||||
return self.order_by().exclude(inventory_sources__source='tower').values('name').distinct().count()
|
||||
|
||||
def org_active_count(self, org_id):
|
||||
"""Return count of active, unique hosts used by an organization.
|
||||
Construction of query involves:
|
||||
- remove any ordering specified in model's Meta
|
||||
- Exclude hosts sourced from another Tower
|
||||
- Consider only hosts where the canonical inventory is owned by the organization
|
||||
- Restrict the query to only return the name column
|
||||
- Only consider results that are unique
|
||||
- Return the count of this query
|
||||
"""
|
||||
return self.order_by().exclude(
|
||||
inventory_sources__source='tower'
|
||||
).filter(inventory__organization=org_id).values('name').distinct().count()
|
||||
|
||||
def active_counts_by_org(self):
|
||||
"""Return the counts of active, unique hosts for each organization.
|
||||
Construction of query involves:
|
||||
- remove any ordering specified in model's Meta
|
||||
- Exclude hosts sourced from another Tower
|
||||
- Consider only hosts where the canonical inventory is owned by each organization
|
||||
- Restrict the query to only count distinct names
|
||||
- Return the counts
|
||||
"""
|
||||
return self.order_by().exclude(
|
||||
inventory_sources__source='tower'
|
||||
).values('inventory__organization').annotate(
|
||||
inventory__organization__count=models.Count('name', distinct=True))
|
||||
|
||||
def get_queryset(self):
|
||||
"""When the parent instance of the host query set has a `kind=smart` and a `host_filter`
|
||||
set. Use the `host_filter` to generate the queryset for the hosts.
|
||||
@@ -38,20 +66,20 @@ class HostManager(models.Manager):
|
||||
hasattr(self.instance, 'host_filter') and
|
||||
hasattr(self.instance, 'kind')):
|
||||
if self.instance.kind == 'smart' and self.instance.host_filter is not None:
|
||||
q = SmartFilter.query_from_string(self.instance.host_filter)
|
||||
if self.instance.organization_id:
|
||||
q = q.filter(inventory__organization=self.instance.organization_id)
|
||||
# If we are using host_filters, disable the core_filters, this allows
|
||||
# us to access all of the available Host entries, not just the ones associated
|
||||
# with a specific FK/relation.
|
||||
#
|
||||
# If we don't disable this, a filter of {'inventory': self.instance} gets automatically
|
||||
# injected by the related object mapper.
|
||||
self.core_filters = {}
|
||||
q = SmartFilter.query_from_string(self.instance.host_filter)
|
||||
if self.instance.organization_id:
|
||||
q = q.filter(inventory__organization=self.instance.organization_id)
|
||||
# If we are using host_filters, disable the core_filters, this allows
|
||||
# us to access all of the available Host entries, not just the ones associated
|
||||
# with a specific FK/relation.
|
||||
#
|
||||
# If we don't disable this, a filter of {'inventory': self.instance} gets automatically
|
||||
# injected by the related object mapper.
|
||||
self.core_filters = {}
|
||||
|
||||
qs = qs & q
|
||||
unique_by_name = qs.order_by('name', 'pk').distinct('name')
|
||||
return qs.filter(pk__in=unique_by_name)
|
||||
qs = qs & q
|
||||
unique_by_name = qs.order_by('name', 'pk').distinct('name')
|
||||
return qs.filter(pk__in=unique_by_name)
|
||||
return qs
|
||||
|
||||
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
import uuid
|
||||
import logging
|
||||
import threading
|
||||
import six
|
||||
import time
|
||||
import cProfile
|
||||
import pstats
|
||||
import os
|
||||
import urllib.parse
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
@@ -34,7 +34,7 @@ perf_logger = logging.getLogger('awx.analytics.performance')
|
||||
|
||||
class TimingMiddleware(threading.local):
|
||||
|
||||
dest = '/var/lib/awx/profile'
|
||||
dest = '/var/log/tower/profile'
|
||||
|
||||
def process_request(self, request):
|
||||
self.start_time = time.time()
|
||||
@@ -57,7 +57,7 @@ class TimingMiddleware(threading.local):
|
||||
def save_profile_file(self, request):
|
||||
if not os.path.isdir(self.dest):
|
||||
os.makedirs(self.dest)
|
||||
filename = '%.3fs-%s' % (pstats.Stats(self.prof).total_tt, uuid.uuid4())
|
||||
filename = '%.3fs-%s.pstats' % (pstats.Stats(self.prof).total_tt, uuid.uuid4())
|
||||
filepath = os.path.join(self.dest, filename)
|
||||
with open(filepath, 'w') as f:
|
||||
f.write('%s %s\n' % (request.method, request.get_full_path()))
|
||||
@@ -126,8 +126,9 @@ class SessionTimeoutMiddleware(object):
|
||||
"""
|
||||
|
||||
def process_response(self, request, response):
|
||||
should_skip = 'HTTP_X_WS_SESSION_QUIET' in request.META
|
||||
req_session = getattr(request, 'session', None)
|
||||
if req_session and not req_session.is_empty():
|
||||
if req_session and not req_session.is_empty() and should_skip is False:
|
||||
expiry = int(settings.SESSION_COOKIE_AGE)
|
||||
request.session.set_expiry(expiry)
|
||||
response['Session-Timeout'] = expiry
|
||||
@@ -194,7 +195,7 @@ class URLModificationMiddleware(object):
|
||||
|
||||
def process_request(self, request):
|
||||
if hasattr(request, 'environ') and 'REQUEST_URI' in request.environ:
|
||||
old_path = six.moves.urllib.parse.urlsplit(request.environ['REQUEST_URI']).path
|
||||
old_path = urllib.parse.urlsplit(request.environ['REQUEST_URI']).path
|
||||
old_path = old_path[request.path.find(request.path_info):]
|
||||
else:
|
||||
old_path = request.path_info
|
||||
|
||||
@@ -27,7 +27,7 @@ class Migration(migrations.Migration):
|
||||
name='ActivityStream',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('operation', models.CharField(max_length=13, choices=[(b'create', 'Entity Created'), (b'update', 'Entity Updated'), (b'delete', 'Entity Deleted'), (b'associate', 'Entity Associated with another Entity'), (b'disassociate', 'Entity was Disassociated with another Entity')])),
|
||||
('operation', models.CharField(max_length=13, choices=[('create', 'Entity Created'), ('update', 'Entity Updated'), ('delete', 'Entity Deleted'), ('associate', 'Entity Associated with another Entity'), ('disassociate', 'Entity was Disassociated with another Entity')])),
|
||||
('timestamp', models.DateTimeField(auto_now_add=True)),
|
||||
('changes', models.TextField(blank=True)),
|
||||
('object_relationship_type', models.TextField(blank=True)),
|
||||
@@ -42,8 +42,8 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_skipped', 'Host Skipped')])),
|
||||
('host_name', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('event', models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_skipped', 'Host Skipped')])),
|
||||
('event_data', jsonfield.fields.JSONField(default={}, blank=True)),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('changed', models.BooleanField(default=False, editable=False)),
|
||||
@@ -60,8 +60,8 @@ class Migration(migrations.Migration):
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('modified', models.DateTimeField(auto_now=True)),
|
||||
('expires', models.DateTimeField(default=django.utils.timezone.now)),
|
||||
('request_hash', models.CharField(default=b'', max_length=40, blank=True)),
|
||||
('reason', models.CharField(default=b'', help_text='Reason the auth token was invalidated.', max_length=1024, blank=True)),
|
||||
('request_hash', models.CharField(default='', max_length=40, blank=True)),
|
||||
('reason', models.CharField(default='', help_text='Reason the auth token was invalidated.', max_length=1024, blank=True)),
|
||||
('user', models.ForeignKey(related_name='auth_tokens', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
),
|
||||
@@ -71,22 +71,22 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('kind', models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'openstack', 'OpenStack')])),
|
||||
('kind', models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('openstack', 'OpenStack')])),
|
||||
('cloud', models.BooleanField(default=False, editable=False)),
|
||||
('host', models.CharField(default=b'', help_text='The hostname or IP address to use.', max_length=1024, verbose_name='Host', blank=True)),
|
||||
('username', models.CharField(default=b'', help_text='Username for this credential.', max_length=1024, verbose_name='Username', blank=True)),
|
||||
('password', models.CharField(default=b'', help_text='Password for this credential (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='Password', blank=True)),
|
||||
('security_token', models.CharField(default=b'', help_text='Security Token for this credential', max_length=1024, verbose_name='Security Token', blank=True)),
|
||||
('project', models.CharField(default=b'', help_text='The identifier for the project.', max_length=100, verbose_name='Project', blank=True)),
|
||||
('ssh_key_data', models.TextField(default=b'', help_text='RSA or DSA private key to be used instead of password.', verbose_name='SSH private key', blank=True)),
|
||||
('ssh_key_unlock', models.CharField(default=b'', help_text='Passphrase to unlock SSH private key if encrypted (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='SSH key unlock', blank=True)),
|
||||
('become_method', models.CharField(default=b'', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[(b'', 'None'), (b'sudo', 'Sudo'), (b'su', 'Su'), (b'pbrun', 'Pbrun'), (b'pfexec', 'Pfexec')])),
|
||||
('become_username', models.CharField(default=b'', help_text='Privilege escalation username.', max_length=1024, blank=True)),
|
||||
('become_password', models.CharField(default=b'', help_text='Password for privilege escalation method.', max_length=1024, blank=True)),
|
||||
('vault_password', models.CharField(default=b'', help_text='Vault password (or "ASK" to prompt the user).', max_length=1024, blank=True)),
|
||||
('host', models.CharField(default='', help_text='The hostname or IP address to use.', max_length=1024, verbose_name='Host', blank=True)),
|
||||
('username', models.CharField(default='', help_text='Username for this credential.', max_length=1024, verbose_name='Username', blank=True)),
|
||||
('password', models.CharField(default='', help_text='Password for this credential (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='Password', blank=True)),
|
||||
('security_token', models.CharField(default='', help_text='Security Token for this credential', max_length=1024, verbose_name='Security Token', blank=True)),
|
||||
('project', models.CharField(default='', help_text='The identifier for the project.', max_length=100, verbose_name='Project', blank=True)),
|
||||
('ssh_key_data', models.TextField(default='', help_text='RSA or DSA private key to be used instead of password.', verbose_name='SSH private key', blank=True)),
|
||||
('ssh_key_unlock', models.CharField(default='', help_text='Passphrase to unlock SSH private key if encrypted (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='SSH key unlock', blank=True)),
|
||||
('become_method', models.CharField(default='', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec')])),
|
||||
('become_username', models.CharField(default='', help_text='Privilege escalation username.', max_length=1024, blank=True)),
|
||||
('become_password', models.CharField(default='', help_text='Password for privilege escalation method.', max_length=1024, blank=True)),
|
||||
('vault_password', models.CharField(default='', help_text='Vault password (or "ASK" to prompt the user).', max_length=1024, blank=True)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
|
||||
@@ -101,10 +101,10 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('script', models.TextField(default=b'', help_text='Inventory script contents', blank=True)),
|
||||
('script', models.TextField(default='', help_text='Inventory script contents', blank=True)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
],
|
||||
@@ -118,10 +118,10 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('variables', models.TextField(default=b'', help_text='Group variables in JSON or YAML format.', blank=True)),
|
||||
('variables', models.TextField(default='', help_text='Group variables in JSON or YAML format.', blank=True)),
|
||||
('total_hosts', models.PositiveIntegerField(default=0, help_text='Total number of hosts directly or indirectly in this group.', editable=False)),
|
||||
('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether this group has any hosts with active failures.', editable=False)),
|
||||
('hosts_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of hosts in this group with active failures.', editable=False)),
|
||||
@@ -140,12 +140,12 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('enabled', models.BooleanField(default=True, help_text='Is this host online and available for running jobs?')),
|
||||
('instance_id', models.CharField(default=b'', max_length=100, blank=True)),
|
||||
('variables', models.TextField(default=b'', help_text='Host variables in JSON or YAML format.', blank=True)),
|
||||
('instance_id', models.CharField(default='', max_length=100, blank=True)),
|
||||
('variables', models.TextField(default='', help_text='Host variables in JSON or YAML format.', blank=True)),
|
||||
('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether the last job failed for this host.', editable=False)),
|
||||
('has_inventory_sources', models.BooleanField(default=False, help_text='Flag indicating whether this host was created/updated from any external inventory sources.', editable=False)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'host', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
@@ -171,10 +171,10 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('variables', models.TextField(default=b'', help_text='Inventory variables in JSON or YAML format.', blank=True)),
|
||||
('variables', models.TextField(default='', help_text='Inventory variables in JSON or YAML format.', blank=True)),
|
||||
('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether any hosts in this inventory have failed.', editable=False)),
|
||||
('total_hosts', models.PositiveIntegerField(default=0, help_text='Total number of hosts in this inventory.', editable=False)),
|
||||
('hosts_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of hosts in this inventory with active failures.', editable=False)),
|
||||
@@ -197,14 +197,14 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_error', 'Host Failure'), (b'runner_on_skipped', 'Host Skipped'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_no_hosts', 'No Hosts Remaining'), (b'runner_on_async_poll', 'Host Polling'), (b'runner_on_async_ok', 'Host Async OK'), (b'runner_on_async_failed', 'Host Async Failure'), (b'runner_on_file_diff', 'File Difference'), (b'playbook_on_start', 'Playbook Started'), (b'playbook_on_notify', 'Running Handlers'), (b'playbook_on_no_hosts_matched', 'No Hosts Matched'), (b'playbook_on_no_hosts_remaining', 'No Hosts Remaining'), (b'playbook_on_task_start', 'Task Started'), (b'playbook_on_vars_prompt', 'Variables Prompted'), (b'playbook_on_setup', 'Gathering Facts'), (b'playbook_on_import_for_host', 'internal: on Import for Host'), (b'playbook_on_not_import_for_host', 'internal: on Not Import for Host'), (b'playbook_on_play_start', 'Play Started'), (b'playbook_on_stats', 'Playbook Complete')])),
|
||||
('event', models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete')])),
|
||||
('event_data', jsonfield.fields.JSONField(default={}, blank=True)),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('changed', models.BooleanField(default=False, editable=False)),
|
||||
('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('play', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('role', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('task', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('host_name', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('play', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('role', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('task', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('counter', models.PositiveIntegerField(default=0)),
|
||||
('host', models.ForeignKey(related_name='job_events_as_primary_host', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Host', null=True)),
|
||||
('hosts', models.ManyToManyField(related_name='job_events', editable=False, to='main.Host')),
|
||||
@@ -220,7 +220,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('host_name', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('changed', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('dark', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('failures', models.PositiveIntegerField(default=0, editable=False)),
|
||||
@@ -250,7 +250,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('admins', models.ManyToManyField(related_name='admin_of_organizations', to=settings.AUTH_USER_MODEL, blank=True)),
|
||||
@@ -269,10 +269,10 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('permission_type', models.CharField(max_length=64, choices=[(b'read', 'Read Inventory'), (b'write', 'Edit Inventory'), (b'admin', 'Administrate Inventory'), (b'run', 'Deploy To Inventory'), (b'check', 'Deploy To Inventory (Dry Run)'), (b'scan', 'Scan an Inventory'), (b'create', 'Create a Job Template')])),
|
||||
('permission_type', models.CharField(max_length=64, choices=[('read', 'Read Inventory'), ('write', 'Edit Inventory'), ('admin', 'Administrate Inventory'), ('run', 'Deploy To Inventory'), ('check', 'Deploy To Inventory (Dry Run)'), ('scan', 'Scan an Inventory'), ('create', 'Create a Job Template')])),
|
||||
('run_ad_hoc_commands', models.BooleanField(default=False, help_text='Execute Commands on the Inventory')),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'permission', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('inventory', models.ForeignKey(related_name='permissions', on_delete=django.db.models.deletion.SET_NULL, to='main.Inventory', null=True)),
|
||||
@@ -286,7 +286,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('ldap_dn', models.CharField(default=b'', max_length=1024)),
|
||||
('ldap_dn', models.CharField(default='', max_length=1024)),
|
||||
('user', awx.main.fields.AutoOneToOneField(related_name='profile', editable=False, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
),
|
||||
@@ -296,7 +296,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('enabled', models.BooleanField(default=True)),
|
||||
@@ -319,7 +319,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'team', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
@@ -338,26 +338,26 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('old_pk', models.PositiveIntegerField(default=None, null=True, editable=False)),
|
||||
('launch_type', models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency')])),
|
||||
('launch_type', models.CharField(default='manual', max_length=20, editable=False, choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency')])),
|
||||
('cancel_flag', models.BooleanField(default=False, editable=False)),
|
||||
('status', models.CharField(default=b'new', max_length=20, editable=False, choices=[(b'new', 'New'), (b'pending', 'Pending'), (b'waiting', 'Waiting'), (b'running', 'Running'), (b'successful', 'Successful'), (b'failed', 'Failed'), (b'error', 'Error'), (b'canceled', 'Canceled')])),
|
||||
('status', models.CharField(default='new', max_length=20, editable=False, choices=[('new', 'New'), ('pending', 'Pending'), ('waiting', 'Waiting'), ('running', 'Running'), ('successful', 'Successful'), ('failed', 'Failed'), ('error', 'Error'), ('canceled', 'Canceled')])),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('started', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('finished', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('elapsed', models.DecimalField(editable=False, max_digits=12, decimal_places=3)),
|
||||
('job_args', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('job_cwd', models.CharField(default=b'', max_length=1024, editable=False, blank=True)),
|
||||
('job_args', models.TextField(default='', editable=False, blank=True)),
|
||||
('job_cwd', models.CharField(default='', max_length=1024, editable=False, blank=True)),
|
||||
('job_env', jsonfield.fields.JSONField(default={}, editable=False, blank=True)),
|
||||
('job_explanation', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('start_args', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('result_stdout_text', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('result_stdout_file', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('result_traceback', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('celery_task_id', models.CharField(default=b'', max_length=100, editable=False, blank=True)),
|
||||
('job_explanation', models.TextField(default='', editable=False, blank=True)),
|
||||
('start_args', models.TextField(default='', editable=False, blank=True)),
|
||||
('result_stdout_text', models.TextField(default='', editable=False, blank=True)),
|
||||
('result_stdout_file', models.TextField(default='', editable=False, blank=True)),
|
||||
('result_traceback', models.TextField(default='', editable=False, blank=True)),
|
||||
('celery_task_id', models.CharField(default='', max_length=100, editable=False, blank=True)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
@@ -366,7 +366,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('old_pk', models.PositiveIntegerField(default=None, null=True, editable=False)),
|
||||
@@ -374,19 +374,19 @@ class Migration(migrations.Migration):
|
||||
('last_job_run', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('has_schedules', models.BooleanField(default=False, editable=False)),
|
||||
('next_job_run', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('status', models.CharField(default=b'ok', max_length=32, editable=False, choices=[(b'new', 'New'), (b'pending', 'Pending'), (b'waiting', 'Waiting'), (b'running', 'Running'), (b'successful', 'Successful'), (b'failed', 'Failed'), (b'error', 'Error'), (b'canceled', 'Canceled'), (b'never updated', b'Never Updated'), (b'ok', b'OK'), (b'missing', b'Missing'), (b'none', 'No External Source'), (b'updating', 'Updating')])),
|
||||
('status', models.CharField(default='ok', max_length=32, editable=False, choices=[('new', 'New'), ('pending', 'Pending'), ('waiting', 'Waiting'), ('running', 'Running'), ('successful', 'Successful'), ('failed', 'Failed'), ('error', 'Error'), ('canceled', 'Canceled'), ('never updated', 'Never Updated'), ('ok', 'OK'), ('missing', 'Missing'), ('none', 'No External Source'), ('updating', 'Updating')])),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AdHocCommand',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check')])),
|
||||
('limit', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('module_name', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('module_args', models.TextField(default=b'', blank=True)),
|
||||
('job_type', models.CharField(default='run', max_length=64, choices=[('run', 'Run'), ('check', 'Check')])),
|
||||
('limit', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('module_name', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('module_args', models.TextField(default='', blank=True)),
|
||||
('forks', models.PositiveIntegerField(default=0, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, '0 (Normal)'), (1, '1 (Verbose)'), (2, '2 (More Verbose)'), (3, '3 (Debug)'), (4, '4 (Connection Debug)'), (5, '5 (WinRM Debug)')])),
|
||||
('become_enabled', models.BooleanField(default=False)),
|
||||
],
|
||||
bases=('main.unifiedjob',),
|
||||
@@ -395,12 +395,12 @@ class Migration(migrations.Migration):
|
||||
name='InventorySource',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('source', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')])),
|
||||
('source_path', models.CharField(default=b'', max_length=1024, editable=False, blank=True)),
|
||||
('source_vars', models.TextField(default=b'', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
|
||||
('source_regions', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('instance_filters', models.CharField(default=b'', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
|
||||
('group_by', models.CharField(default=b'', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
|
||||
('source', models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')])),
|
||||
('source_path', models.CharField(default='', max_length=1024, editable=False, blank=True)),
|
||||
('source_vars', models.TextField(default='', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
|
||||
('source_regions', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('instance_filters', models.CharField(default='', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
|
||||
('group_by', models.CharField(default='', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
|
||||
('overwrite', models.BooleanField(default=False, help_text='Overwrite local groups and hosts from remote inventory source.')),
|
||||
('overwrite_vars', models.BooleanField(default=False, help_text='Overwrite local variables from remote inventory source.')),
|
||||
('update_on_launch', models.BooleanField(default=False)),
|
||||
@@ -412,12 +412,12 @@ class Migration(migrations.Migration):
|
||||
name='InventoryUpdate',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('source', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')])),
|
||||
('source_path', models.CharField(default=b'', max_length=1024, editable=False, blank=True)),
|
||||
('source_vars', models.TextField(default=b'', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
|
||||
('source_regions', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('instance_filters', models.CharField(default=b'', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
|
||||
('group_by', models.CharField(default=b'', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
|
||||
('source', models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')])),
|
||||
('source_path', models.CharField(default='', max_length=1024, editable=False, blank=True)),
|
||||
('source_vars', models.TextField(default='', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
|
||||
('source_regions', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('instance_filters', models.CharField(default='', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
|
||||
('group_by', models.CharField(default='', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
|
||||
('overwrite', models.BooleanField(default=False, help_text='Overwrite local groups and hosts from remote inventory source.')),
|
||||
('overwrite_vars', models.BooleanField(default=False, help_text='Overwrite local variables from remote inventory source.')),
|
||||
('license_error', models.BooleanField(default=False, editable=False)),
|
||||
@@ -428,16 +428,16 @@ class Migration(migrations.Migration):
|
||||
name='Job',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check'), (b'scan', 'Scan')])),
|
||||
('playbook', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('job_type', models.CharField(default='run', max_length=64, choices=[('run', 'Run'), ('check', 'Check'), ('scan', 'Scan')])),
|
||||
('playbook', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('forks', models.PositiveIntegerField(default=0, blank=True)),
|
||||
('limit', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('job_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('limit', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, '0 (Normal)'), (1, '1 (Verbose)'), (2, '2 (More Verbose)'), (3, '3 (Debug)'), (4, '4 (Connection Debug)'), (5, '5 (WinRM Debug)')])),
|
||||
('extra_vars', models.TextField(default='', blank=True)),
|
||||
('job_tags', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('force_handlers', models.BooleanField(default=False)),
|
||||
('skip_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('start_at_task', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('skip_tags', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('start_at_task', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('become_enabled', models.BooleanField(default=False)),
|
||||
],
|
||||
options={
|
||||
@@ -449,18 +449,18 @@ class Migration(migrations.Migration):
|
||||
name='JobTemplate',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check'), (b'scan', 'Scan')])),
|
||||
('playbook', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('job_type', models.CharField(default='run', max_length=64, choices=[('run', 'Run'), ('check', 'Check'), ('scan', 'Scan')])),
|
||||
('playbook', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('forks', models.PositiveIntegerField(default=0, blank=True)),
|
||||
('limit', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('job_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('limit', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, '0 (Normal)'), (1, '1 (Verbose)'), (2, '2 (More Verbose)'), (3, '3 (Debug)'), (4, '4 (Connection Debug)'), (5, '5 (WinRM Debug)')])),
|
||||
('extra_vars', models.TextField(default='', blank=True)),
|
||||
('job_tags', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('force_handlers', models.BooleanField(default=False)),
|
||||
('skip_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('start_at_task', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('skip_tags', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('start_at_task', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('become_enabled', models.BooleanField(default=False)),
|
||||
('host_config_key', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('host_config_key', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('ask_variables_on_launch', models.BooleanField(default=False)),
|
||||
('survey_enabled', models.BooleanField(default=False)),
|
||||
('survey_spec', jsonfield.fields.JSONField(default={}, blank=True)),
|
||||
@@ -475,9 +475,9 @@ class Migration(migrations.Migration):
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('local_path', models.CharField(help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True)),
|
||||
('scm_type', models.CharField(default=b'', max_length=8, verbose_name='SCM Type', blank=True, choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')])),
|
||||
('scm_url', models.CharField(default=b'', max_length=1024, verbose_name='SCM URL', blank=True)),
|
||||
('scm_branch', models.CharField(default=b'', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
|
||||
('scm_type', models.CharField(default='', max_length=8, verbose_name='SCM Type', blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')])),
|
||||
('scm_url', models.CharField(default='', max_length=1024, verbose_name='SCM URL', blank=True)),
|
||||
('scm_branch', models.CharField(default='', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
|
||||
('scm_clean', models.BooleanField(default=False)),
|
||||
('scm_delete_on_update', models.BooleanField(default=False)),
|
||||
('scm_delete_on_next_update', models.BooleanField(default=False, editable=False)),
|
||||
@@ -494,9 +494,9 @@ class Migration(migrations.Migration):
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('local_path', models.CharField(help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True)),
|
||||
('scm_type', models.CharField(default=b'', max_length=8, verbose_name='SCM Type', blank=True, choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')])),
|
||||
('scm_url', models.CharField(default=b'', max_length=1024, verbose_name='SCM URL', blank=True)),
|
||||
('scm_branch', models.CharField(default=b'', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
|
||||
('scm_type', models.CharField(default='', max_length=8, verbose_name='SCM Type', blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')])),
|
||||
('scm_url', models.CharField(default='', max_length=1024, verbose_name='SCM URL', blank=True)),
|
||||
('scm_branch', models.CharField(default='', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
|
||||
('scm_clean', models.BooleanField(default=False)),
|
||||
('scm_delete_on_update', models.BooleanField(default=False)),
|
||||
],
|
||||
@@ -506,8 +506,8 @@ class Migration(migrations.Migration):
|
||||
name='SystemJob',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('job_type', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_deleted', 'Purge previously deleted items from the database'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('job_type', models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_deleted', 'Purge previously deleted items from the database'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
|
||||
('extra_vars', models.TextField(default='', blank=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('id',),
|
||||
@@ -518,7 +518,7 @@ class Migration(migrations.Migration):
|
||||
name='SystemJobTemplate',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('job_type', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_deleted', 'Purge previously deleted items from the database'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
|
||||
('job_type', models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_deleted', 'Purge previously deleted items from the database'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
|
||||
],
|
||||
bases=('main.unifiedjobtemplate', models.Model),
|
||||
),
|
||||
|
||||
@@ -105,24 +105,24 @@ def create_system_job_templates(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [(b'main', '0002_v300_tower_settings_changes'),
|
||||
(b'main', '0003_v300_notification_changes'),
|
||||
(b'main', '0004_v300_fact_changes'),
|
||||
(b'main', '0005_v300_migrate_facts'),
|
||||
(b'main', '0006_v300_active_flag_cleanup'),
|
||||
(b'main', '0007_v300_active_flag_removal'),
|
||||
(b'main', '0008_v300_rbac_changes'),
|
||||
(b'main', '0009_v300_rbac_migrations'),
|
||||
(b'main', '0010_v300_create_system_job_templates'),
|
||||
(b'main', '0011_v300_credential_domain_field'),
|
||||
(b'main', '0012_v300_create_labels'),
|
||||
(b'main', '0013_v300_label_changes'),
|
||||
(b'main', '0014_v300_invsource_cred'),
|
||||
(b'main', '0015_v300_label_changes'),
|
||||
(b'main', '0016_v300_prompting_changes'),
|
||||
(b'main', '0017_v300_prompting_migrations'),
|
||||
(b'main', '0018_v300_host_ordering'),
|
||||
(b'main', '0019_v300_new_azure_credential'),]
|
||||
replaces = [('main', '0002_v300_tower_settings_changes'),
|
||||
('main', '0003_v300_notification_changes'),
|
||||
('main', '0004_v300_fact_changes'),
|
||||
('main', '0005_v300_migrate_facts'),
|
||||
('main', '0006_v300_active_flag_cleanup'),
|
||||
('main', '0007_v300_active_flag_removal'),
|
||||
('main', '0008_v300_rbac_changes'),
|
||||
('main', '0009_v300_rbac_migrations'),
|
||||
('main', '0010_v300_create_system_job_templates'),
|
||||
('main', '0011_v300_credential_domain_field'),
|
||||
('main', '0012_v300_create_labels'),
|
||||
('main', '0013_v300_label_changes'),
|
||||
('main', '0014_v300_invsource_cred'),
|
||||
('main', '0015_v300_label_changes'),
|
||||
('main', '0016_v300_prompting_changes'),
|
||||
('main', '0017_v300_prompting_migrations'),
|
||||
('main', '0018_v300_host_ordering'),
|
||||
('main', '0019_v300_new_azure_credential'),]
|
||||
|
||||
dependencies = [
|
||||
('taggit', '0002_auto_20150616_2121'),
|
||||
@@ -143,7 +143,7 @@ class Migration(migrations.Migration):
|
||||
('description', models.TextField()),
|
||||
('category', models.CharField(max_length=128)),
|
||||
('value', models.TextField(blank=True)),
|
||||
('value_type', models.CharField(max_length=12, choices=[(b'string', 'String'), (b'int', 'Integer'), (b'float', 'Decimal'), (b'json', 'JSON'), (b'bool', 'Boolean'), (b'password', 'Password'), (b'list', 'List')])),
|
||||
('value_type', models.CharField(max_length=12, choices=[('string', 'String'), ('int', 'Integer'), ('float', 'Decimal'), ('json', 'JSON'), ('bool', 'Boolean'), ('password', 'Password'), ('list', 'List')])),
|
||||
('user', models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
],
|
||||
),
|
||||
@@ -154,12 +154,12 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('status', models.CharField(default=b'pending', max_length=20, editable=False, choices=[(b'pending', 'Pending'), (b'successful', 'Successful'), (b'failed', 'Failed')])),
|
||||
('error', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('status', models.CharField(default='pending', max_length=20, editable=False, choices=[('pending', 'Pending'), ('successful', 'Successful'), ('failed', 'Failed')])),
|
||||
('error', models.TextField(default='', editable=False, blank=True)),
|
||||
('notifications_sent', models.IntegerField(default=0, editable=False)),
|
||||
('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'mattermost', 'Mattermost'), (b'rocketchat', 'Rocket.Chat'), (b'irc', 'IRC')])),
|
||||
('recipients', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('subject', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('notification_type', models.CharField(max_length=32, choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')])),
|
||||
('recipients', models.TextField(default='', editable=False, blank=True)),
|
||||
('subject', models.TextField(default='', editable=False, blank=True)),
|
||||
('body', jsonfield.fields.JSONField(default=dict, blank=True)),
|
||||
],
|
||||
options={
|
||||
@@ -172,9 +172,9 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'mattermost', 'Mattermost'), (b'rocketchat', 'Rocket.Chat'), (b'irc', 'IRC')])),
|
||||
('notification_type', models.CharField(max_length=32, choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')])),
|
||||
('notification_configuration', jsonfield.fields.JSONField(default=dict)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'notificationtemplate', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'notificationtemplate', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
@@ -381,7 +381,7 @@ class Migration(migrations.Migration):
|
||||
('singleton_name', models.TextField(default=None, unique=True, null=True, db_index=True)),
|
||||
('members', models.ManyToManyField(related_name='roles', to=settings.AUTH_USER_MODEL)),
|
||||
('parents', models.ManyToManyField(related_name='children', to='main.Role')),
|
||||
('implicit_parents', models.TextField(default=b'[]')),
|
||||
('implicit_parents', models.TextField(default='[]')),
|
||||
('content_type', models.ForeignKey(default=None, to='contenttypes.ContentType', null=True)),
|
||||
('object_id', models.PositiveIntegerField(default=None, null=True)),
|
||||
|
||||
@@ -422,122 +422,122 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_administrator'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'use_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='custominventoryscript',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'organization.admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='organization.admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='custominventoryscript',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'organization.member_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'organization.member_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'organization.admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='organization.admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='adhoc_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='update_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'adhoc_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='adhoc_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'update_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'update_role', 'use_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'project.organization.admin_role', b'inventory.organization.admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['project.organization.admin_role', 'inventory.organization.admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'project.organization.auditor_role', b'inventory.organization.auditor_role', b'execute_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['project.organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_administrator', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='singleton:system_administrator', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='auditor_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_auditor', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='singleton:system_auditor', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'member_role', b'auditor_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['member_role', 'auditor_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.admin_role', b'singleton:system_administrator'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.admin_role', 'singleton:system_administrator'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='update_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'singleton:system_auditor', b'use_role', b'update_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'singleton:system_auditor', 'use_role', 'update_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='team',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'organization.admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='organization.admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='team',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=None, to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=None, to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='team',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role', b'organization.auditor_role', b'member_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role', 'organization.auditor_role', 'member_role'], to='main.Role', null='True'),
|
||||
),
|
||||
|
||||
# System Job Templates
|
||||
@@ -545,18 +545,18 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='systemjob',
|
||||
name='job_type',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='systemjobtemplate',
|
||||
name='job_type',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
|
||||
),
|
||||
# Credential domain field
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='domain',
|
||||
field=models.CharField(default=b'', help_text='The identifier for the domain.', max_length=100, verbose_name='Domain', blank=True),
|
||||
field=models.CharField(default='', help_text='The identifier for the domain.', max_length=100, verbose_name='Domain', blank=True),
|
||||
),
|
||||
# Create Labels
|
||||
migrations.CreateModel(
|
||||
@@ -565,7 +565,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'label', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'label', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
@@ -625,7 +625,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='authorize_password',
|
||||
field=models.CharField(default=b'', help_text='Password used by the authorize mechanism.', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', help_text='Password used by the authorize mechanism.', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
@@ -640,17 +640,17 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'openstack', 'OpenStack')]),
|
||||
field=models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('net', 'Network'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('openstack', 'OpenStack')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
@@ -702,41 +702,41 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='client',
|
||||
field=models.CharField(default=b'', help_text='Client Id or Application Id for the credential', max_length=128, blank=True),
|
||||
field=models.CharField(default='', help_text='Client Id or Application Id for the credential', max_length=128, blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='secret',
|
||||
field=models.CharField(default=b'', help_text='Secret Token for this credential', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', help_text='Secret Token for this credential', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='subscription',
|
||||
field=models.CharField(default=b'', help_text='Subscription identifier for this credential', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', help_text='Subscription identifier for this credential', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='tenant',
|
||||
field=models.CharField(default=b'', help_text='Tenant identifier for this credential', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', help_text='Tenant identifier for this credential', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'openstack', 'OpenStack')]),
|
||||
field=models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('net', 'Network'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('satellite6', 'Satellite 6'), ('cloudforms', 'CloudForms'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('openstack', 'OpenStack')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='host',
|
||||
name='instance_id',
|
||||
field=models.CharField(default=b'', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Satellite 6'), ('cloudforms', 'CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Satellite 6'), ('cloudforms', 'CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -9,20 +9,20 @@ from django.db import migrations, models
|
||||
from django.conf import settings
|
||||
import awx.main.fields
|
||||
|
||||
import _squashed
|
||||
from _squashed_30 import SQUASHED_30
|
||||
from . import _squashed
|
||||
from ._squashed_30 import SQUASHED_30
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [(b'main', '0020_v300_labels_changes'),
|
||||
(b'main', '0021_v300_activity_stream'),
|
||||
(b'main', '0022_v300_adhoc_extravars'),
|
||||
(b'main', '0023_v300_activity_stream_ordering'),
|
||||
(b'main', '0024_v300_jobtemplate_allow_simul'),
|
||||
(b'main', '0025_v300_update_rbac_parents'),
|
||||
(b'main', '0026_v300_credential_unique'),
|
||||
(b'main', '0027_v300_team_migrations'),
|
||||
(b'main', '0028_v300_org_team_cascade')] + _squashed.replaces(SQUASHED_30, applied=True)
|
||||
replaces = [('main', '0020_v300_labels_changes'),
|
||||
('main', '0021_v300_activity_stream'),
|
||||
('main', '0022_v300_adhoc_extravars'),
|
||||
('main', '0023_v300_activity_stream_ordering'),
|
||||
('main', '0024_v300_jobtemplate_allow_simul'),
|
||||
('main', '0025_v300_update_rbac_parents'),
|
||||
('main', '0026_v300_credential_unique'),
|
||||
('main', '0027_v300_team_migrations'),
|
||||
('main', '0028_v300_org_team_cascade')] + _squashed.replaces(SQUASHED_30, applied=True)
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
@@ -63,22 +63,22 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='adhoccommand',
|
||||
name='extra_vars',
|
||||
field=models.TextField(default=b'', blank=True),
|
||||
field=models.TextField(default='', blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'openstack', 'OpenStack')]),
|
||||
field=models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('net', 'Network'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('openstack', 'OpenStack')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
# jobtemplate allow simul
|
||||
migrations.AddField(
|
||||
@@ -90,17 +90,17 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.admin_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.admin_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'member_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'member_role'], to='main.Role', null='True'),
|
||||
),
|
||||
# Unique credential
|
||||
migrations.AlterUniqueTogether(
|
||||
@@ -110,7 +110,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'use_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
# Team cascade
|
||||
migrations.AlterField(
|
||||
|
||||
@@ -8,8 +8,8 @@ import django.db.models.deletion
|
||||
import awx.main.models.workflow
|
||||
import awx.main.fields
|
||||
|
||||
import _squashed
|
||||
from _squashed_30 import SQUASHED_30
|
||||
from . import _squashed
|
||||
from ._squashed_30 import SQUASHED_30
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -19,7 +19,7 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
replaces = _squashed.replaces(SQUASHED_30) + [
|
||||
(b'main', '0034_v310_release'),
|
||||
('main', '0034_v310_release'),
|
||||
]
|
||||
|
||||
operations = _squashed.operations(SQUASHED_30) + [
|
||||
@@ -42,13 +42,13 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
name='uuid',
|
||||
field=models.CharField(default=b'', max_length=1024, editable=False),
|
||||
field=models.CharField(default='', max_length=1024, editable=False),
|
||||
),
|
||||
# Job Parent Event UUID
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
name='parent_uuid',
|
||||
field=models.CharField(default=b'', max_length=1024, editable=False),
|
||||
field=models.CharField(default='', max_length=1024, editable=False),
|
||||
),
|
||||
# Modify the HA Instance
|
||||
migrations.RemoveField(
|
||||
@@ -63,19 +63,19 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='become_method',
|
||||
field=models.CharField(default=b'', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[(b'', 'None'), (b'sudo', 'Sudo'), (b'su', 'Su'), (b'pbrun', 'Pbrun'), (b'pfexec', 'Pfexec'), (b'dzdo', 'DZDO'), (b'pmrun', 'Pmrun')]),
|
||||
field=models.CharField(default='', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec'), ('dzdo', 'DZDO'), ('pmrun', 'Pmrun')]),
|
||||
),
|
||||
# Add Workflows
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='launch_type',
|
||||
field=models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency'), (b'workflow', 'Workflow'), (b'sync', 'Sync')]),
|
||||
field=models.CharField(default='manual', max_length=20, editable=False, choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('sync', 'Sync')]),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='WorkflowJob',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('extra_vars', models.TextField(default='', blank=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('id',),
|
||||
@@ -101,8 +101,8 @@ class Migration(migrations.Migration):
|
||||
name='WorkflowJobTemplate',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('admin_role', awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_administrator', to='main.Role', null=b'True')),
|
||||
('extra_vars', models.TextField(default='', blank=True)),
|
||||
('admin_role', awx.main.fields.ImplicitRoleField(related_name='+', parent_role='singleton:system_administrator', to='main.Role', null='True')),
|
||||
],
|
||||
bases=('main.unifiedjobtemplate', models.Model),
|
||||
),
|
||||
@@ -176,7 +176,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
@@ -186,7 +186,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'execute_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplatenode',
|
||||
@@ -216,7 +216,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_administrator', 'organization.admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplatenode',
|
||||
@@ -269,23 +269,23 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='execution_node',
|
||||
field=models.TextField(default=b'', editable=False, blank=True),
|
||||
field=models.TextField(default='', editable=False, blank=True),
|
||||
),
|
||||
# SCM Revision
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='scm_revision',
|
||||
field=models.CharField(default=b'', editable=False, max_length=1024, blank=True, help_text='The last revision fetched by a project update', verbose_name='SCM Revision'),
|
||||
field=models.CharField(default='', editable=False, max_length=1024, blank=True, help_text='The last revision fetched by a project update', verbose_name='SCM Revision'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='projectupdate',
|
||||
name='job_type',
|
||||
field=models.CharField(default=b'check', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check')]),
|
||||
field=models.CharField(default='check', max_length=64, choices=[('run', 'Run'), ('check', 'Check')]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='scm_revision',
|
||||
field=models.CharField(default=b'', editable=False, max_length=1024, blank=True, help_text='The SCM Revision from the Project used for this job, if available', verbose_name='SCM Revision'),
|
||||
field=models.CharField(default='', editable=False, max_length=1024, blank=True, help_text='The SCM Revision from the Project used for this job, if available', verbose_name='SCM Revision'),
|
||||
),
|
||||
# Project Playbook Files
|
||||
migrations.AddField(
|
||||
@@ -307,12 +307,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='adhoccommandevent',
|
||||
name='stdout',
|
||||
field=models.TextField(default=b'', editable=False),
|
||||
field=models.TextField(default='', editable=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoccommandevent',
|
||||
name='uuid',
|
||||
field=models.CharField(default=b'', max_length=1024, editable=False),
|
||||
field=models.CharField(default='', max_length=1024, editable=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoccommandevent',
|
||||
@@ -327,7 +327,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
name='playbook',
|
||||
field=models.CharField(default=b'', max_length=1024, editable=False),
|
||||
field=models.CharField(default='', max_length=1024, editable=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
@@ -337,7 +337,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
name='stdout',
|
||||
field=models.TextField(default=b'', editable=False),
|
||||
field=models.TextField(default='', editable=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
@@ -352,7 +352,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='adhoccommandevent',
|
||||
name='event',
|
||||
field=models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_skipped', 'Host Skipped'), (b'debug', 'Debug'), (b'verbose', 'Verbose'), (b'deprecated', 'Deprecated'), (b'warning', 'Warning'), (b'system_warning', 'System Warning'), (b'error', 'Error')]),
|
||||
field=models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_skipped', 'Host Skipped'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='jobevent',
|
||||
@@ -362,7 +362,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='jobevent',
|
||||
name='event',
|
||||
field=models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_error', 'Host Failure'), (b'runner_on_skipped', 'Host Skipped'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_no_hosts', 'No Hosts Remaining'), (b'runner_on_async_poll', 'Host Polling'), (b'runner_on_async_ok', 'Host Async OK'), (b'runner_on_async_failed', 'Host Async Failure'), (b'runner_item_on_ok', 'Item OK'), (b'runner_item_on_failed', 'Item Failed'), (b'runner_item_on_skipped', 'Item Skipped'), (b'runner_retry', 'Host Retry'), (b'runner_on_file_diff', 'File Difference'), (b'playbook_on_start', 'Playbook Started'), (b'playbook_on_notify', 'Running Handlers'), (b'playbook_on_include', 'Including File'), (b'playbook_on_no_hosts_matched', 'No Hosts Matched'), (b'playbook_on_no_hosts_remaining', 'No Hosts Remaining'), (b'playbook_on_task_start', 'Task Started'), (b'playbook_on_vars_prompt', 'Variables Prompted'), (b'playbook_on_setup', 'Gathering Facts'), (b'playbook_on_import_for_host', 'internal: on Import for Host'), (b'playbook_on_not_import_for_host', 'internal: on Not Import for Host'), (b'playbook_on_play_start', 'Play Started'), (b'playbook_on_stats', 'Playbook Complete'), (b'debug', 'Debug'), (b'verbose', 'Verbose'), (b'deprecated', 'Deprecated'), (b'warning', 'Warning'), (b'system_warning', 'System Warning'), (b'error', 'Error')]),
|
||||
field=models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='adhoccommandevent',
|
||||
@@ -505,7 +505,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='host',
|
||||
name='instance_id',
|
||||
field=models.CharField(default=b'', help_text='The value used by the remote inventory source to uniquely identify the host', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', help_text='The value used by the remote inventory source to uniquely identify the host', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
@@ -520,7 +520,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
@@ -535,7 +535,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_url',
|
||||
field=models.CharField(default=b'', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True),
|
||||
field=models.CharField(default='', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
@@ -555,12 +555,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_url',
|
||||
field=models.CharField(default=b'', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True),
|
||||
field=models.CharField(default='', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
@@ -600,7 +600,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='execution_node',
|
||||
field=models.TextField(default=b'', help_text='The Tower node the job executed on.', editable=False, blank=True),
|
||||
field=models.TextField(default='', help_text='The Tower node the job executed on.', editable=False, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
@@ -610,7 +610,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='job_explanation',
|
||||
field=models.TextField(default=b'', help_text="A status field to indicate the state of the job if it wasn't able to run and capture stdout", editable=False, blank=True),
|
||||
field=models.TextField(default='', help_text="A status field to indicate the state of the job if it wasn't able to run and capture stdout", editable=False, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
from __future__ import unicode_literals
|
||||
from django.db import migrations
|
||||
|
||||
import _squashed
|
||||
from _squashed_31 import SQUASHED_31
|
||||
from . import _squashed
|
||||
from ._squashed_31 import SQUASHED_31
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -72,7 +72,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[(b'', 'Hosts have a direct link to this inventory.'), (b'smart', 'Hosts for inventory generated using the host_filter property.')]),
|
||||
field=models.CharField(default='', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[('', 'Hosts have a direct link to this inventory.'), ('smart', 'Hosts for inventory generated using the host_filter property.')]),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SmartInventoryMembership',
|
||||
@@ -143,7 +143,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
name='scm_last_revision',
|
||||
field=models.CharField(default=b'', max_length=1024, editable=False, blank=True),
|
||||
field=models.CharField(default='', max_length=1024, editable=False, blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
@@ -163,27 +163,27 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source_path',
|
||||
field=models.CharField(default=b'', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source_path',
|
||||
field=models.CharField(default=b'', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='launch_type',
|
||||
field=models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency'), (b'workflow', 'Workflow'), (b'sync', 'Sync'), (b'scm', 'SCM Update')]),
|
||||
field=models.CharField(default='manual', max_length=20, editable=False, choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('sync', 'Sync'), ('scm', 'SCM Update')]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
@@ -211,12 +211,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
name='verbosity',
|
||||
field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, b'0 (WARNING)'), (1, b'1 (INFO)'), (2, b'2 (DEBUG)')]),
|
||||
field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, '0 (WARNING)'), (1, '1 (INFO)'), (2, '2 (DEBUG)')]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventoryupdate',
|
||||
name='verbosity',
|
||||
field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, b'0 (WARNING)'), (1, b'1 (INFO)'), (2, b'2 (DEBUG)')]),
|
||||
field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, '0 (WARNING)'), (1, '1 (INFO)'), (2, '2 (DEBUG)')]),
|
||||
),
|
||||
|
||||
# Job Templates
|
||||
@@ -317,7 +317,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='inventory',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[(b'', 'Hosts have a direct link to this inventory.'), (b'smart', 'Hosts for inventory generated using the host_filter property.')]),
|
||||
field=models.CharField(default='', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[('', 'Hosts have a direct link to this inventory.'), ('smart', 'Hosts for inventory generated using the host_filter property.')]),
|
||||
),
|
||||
|
||||
# Timeout help text update
|
||||
@@ -378,9 +378,9 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('kind', models.CharField(max_length=32, choices=[(b'ssh', 'Machine'), (b'vault', 'Vault'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'cloud', 'Cloud'), (b'insights', 'Insights')])),
|
||||
('kind', models.CharField(max_length=32, choices=[('ssh', 'Machine'), ('vault', 'Vault'), ('net', 'Network'), ('scm', 'Source Control'), ('cloud', 'Cloud'), ('insights', 'Insights')])),
|
||||
('managed_by_tower', models.BooleanField(default=False, editable=False)),
|
||||
('inputs', awx.main.fields.CredentialTypeInputField(default={}, blank=True, help_text='Enter inputs using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.')),
|
||||
('injectors', awx.main.fields.CredentialTypeInjectorField(default={}, blank=True, help_text='Enter injectors using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.')),
|
||||
@@ -435,7 +435,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='become_method',
|
||||
field=models.CharField(default=b'', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[(b'', 'None'), (b'sudo', 'Sudo'), (b'su', 'Su'), (b'pbrun', 'Pbrun'), (b'pfexec', 'Pfexec'), (b'dzdo', 'DZDO'), (b'pmrun', 'Pmrun'), (b'runas', 'Runas')]),
|
||||
field=models.CharField(default='', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec'), ('dzdo', 'DZDO'), ('pmrun', 'Pmrun'), ('runas', 'Runas')]),
|
||||
),
|
||||
|
||||
# Connecting activity stream
|
||||
@@ -496,6 +496,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='execution_node',
|
||||
field=models.TextField(default=b'', help_text='The node the job executed on.', editable=False, blank=True),
|
||||
field=models.TextField(default='', help_text='The node the job executed on.', editable=False, blank=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -20,11 +20,11 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'rhv', 'Red Hat Virtualization'), (b'tower', 'Ansible Tower'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'rhv', 'Red Hat Virtualization'), (b'tower', 'Ansible Tower'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -21,9 +21,9 @@ class Migration(migrations.Migration):
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('event_data', awx.main.fields.JSONField(blank=True, default={})),
|
||||
('uuid', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('uuid', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('counter', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('stdout', models.TextField(default=b'', editable=False)),
|
||||
('stdout', models.TextField(default='', editable=False)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('start_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('end_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
@@ -39,17 +39,17 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('event', models.CharField(choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_error', 'Host Failure'), (b'runner_on_skipped', 'Host Skipped'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_no_hosts', 'No Hosts Remaining'), (b'runner_on_async_poll', 'Host Polling'), (b'runner_on_async_ok', 'Host Async OK'), (b'runner_on_async_failed', 'Host Async Failure'), (b'runner_item_on_ok', 'Item OK'), (b'runner_item_on_failed', 'Item Failed'), (b'runner_item_on_skipped', 'Item Skipped'), (b'runner_retry', 'Host Retry'), (b'runner_on_file_diff', 'File Difference'), (b'playbook_on_start', 'Playbook Started'), (b'playbook_on_notify', 'Running Handlers'), (b'playbook_on_include', 'Including File'), (b'playbook_on_no_hosts_matched', 'No Hosts Matched'), (b'playbook_on_no_hosts_remaining', 'No Hosts Remaining'), (b'playbook_on_task_start', 'Task Started'), (b'playbook_on_vars_prompt', 'Variables Prompted'), (b'playbook_on_setup', 'Gathering Facts'), (b'playbook_on_import_for_host', 'internal: on Import for Host'), (b'playbook_on_not_import_for_host', 'internal: on Not Import for Host'), (b'playbook_on_play_start', 'Play Started'), (b'playbook_on_stats', 'Playbook Complete'), (b'debug', 'Debug'), (b'verbose', 'Verbose'), (b'deprecated', 'Deprecated'), (b'warning', 'Warning'), (b'system_warning', 'System Warning'), (b'error', 'Error')], max_length=100)),
|
||||
('event', models.CharField(choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')], max_length=100)),
|
||||
('event_data', awx.main.fields.JSONField(blank=True, default={})),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('changed', models.BooleanField(default=False, editable=False)),
|
||||
('uuid', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('playbook', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('play', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('role', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('task', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('uuid', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('playbook', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('play', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('role', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('task', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('counter', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('stdout', models.TextField(default=b'', editable=False)),
|
||||
('stdout', models.TextField(default='', editable=False)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('start_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('end_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
@@ -66,9 +66,9 @@ class Migration(migrations.Migration):
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('event_data', awx.main.fields.JSONField(blank=True, default={})),
|
||||
('uuid', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('uuid', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('counter', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('stdout', models.TextField(default=b'', editable=False)),
|
||||
('stdout', models.TextField(default='', editable=False)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('start_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('end_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
|
||||
@@ -18,77 +18,77 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='job_template_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='credential_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='inventory_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='project_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='workflow_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notification_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'singleton:system_administrator', b'organization.credential_admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['singleton:system_administrator', 'organization.credential_admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventory',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'organization.inventory_admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'organization.project_admin_role', b'singleton:system_administrator'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['organization.project_admin_role', 'singleton:system_administrator'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'singleton:system_administrator', b'organization.workflow_admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'organization.execute_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'organization.execute_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='jobtemplate',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'project.organization.job_template_admin_role', b'inventory.organization.job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='jobtemplate',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'project.organization.execute_role', b'inventory.organization.execute_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='organization',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'execute_role', b'project_admin_role', b'inventory_admin_role', b'workflow_admin_role', b'notification_admin_role', b'credential_admin_role', b'job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'execute_role', 'project_admin_role', 'inventory_admin_role', 'workflow_admin_role', 'notification_admin_role', 'credential_admin_role', 'job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
|
||||
]
|
||||
|
||||
@@ -35,8 +35,8 @@ class Migration(migrations.Migration):
|
||||
('skip_authorization', models.BooleanField(default=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('description', models.TextField(blank=True, default=b'')),
|
||||
('logo_data', models.TextField(default=b'', editable=False, validators=[django.core.validators.RegexValidator(re.compile(b'.*'))])),
|
||||
('description', models.TextField(blank=True, default='')),
|
||||
('logo_data', models.TextField(default='', editable=False, validators=[django.core.validators.RegexValidator(re.compile('.*'))])),
|
||||
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='main_oauth2application', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
@@ -52,7 +52,7 @@ class Migration(migrations.Migration):
|
||||
('scope', models.TextField(blank=True)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('description', models.CharField(blank=True, default=b'', max_length=200)),
|
||||
('description', models.CharField(blank=True, default='', max_length=200)),
|
||||
('last_used', models.DateTimeField(default=None, editable=False, null=True)),
|
||||
('application', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.OAUTH2_PROVIDER_APPLICATION_MODEL)),
|
||||
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='main_oauth2accesstoken', to=settings.AUTH_USER_MODEL)),
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user