mirror of
https://github.com/ansible/awx.git
synced 2026-02-06 03:54:44 -03:30
Compare commits
645 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b23856f126 | ||
|
|
c062728359 | ||
|
|
85d185cc8b | ||
|
|
77b8f345ae | ||
|
|
0052967aee | ||
|
|
63e9aed601 | ||
|
|
d4be8c8168 | ||
|
|
cdf4b0d1ed | ||
|
|
c43a59e475 | ||
|
|
014520ee2b | ||
|
|
c1abc56753 | ||
|
|
c5b4681bf4 | ||
|
|
00b7d6571a | ||
|
|
9ed2534ac5 | ||
|
|
a3bc3986bb | ||
|
|
230933744c | ||
|
|
227a90006e | ||
|
|
112f89660b | ||
|
|
a0910eb6de | ||
|
|
5433af6716 | ||
|
|
9744b89737 | ||
|
|
04c535e3f9 | ||
|
|
259e53f59d | ||
|
|
ac9bf1afcf | ||
|
|
4b62d77015 | ||
|
|
ef5ce0b082 | ||
|
|
1942be7dc3 | ||
|
|
210f9577b0 | ||
|
|
87a05a5b2e | ||
|
|
f8a754cf44 | ||
|
|
3ea37e1c79 | ||
|
|
c997fcfc2c | ||
|
|
4dd4928aab | ||
|
|
5aeaabaceb | ||
|
|
0d4e6d7e0b | ||
|
|
9ae038868c | ||
|
|
0b4ae74698 | ||
|
|
0d248a12bc | ||
|
|
7396e2e7ac | ||
|
|
cac3bece56 | ||
|
|
e4145b580c | ||
|
|
74076b99d6 | ||
|
|
5d35506b0c | ||
|
|
e646b46a2c | ||
|
|
475e2605d4 | ||
|
|
c16ad89ff9 | ||
|
|
425d1168b9 | ||
|
|
7ceaa9ec4a | ||
|
|
4b3d3537b4 | ||
|
|
efbff24528 | ||
|
|
1d9ce6cc15 | ||
|
|
794ce96b17 | ||
|
|
181421a2ee | ||
|
|
9c9496a683 | ||
|
|
2b111c81df | ||
|
|
f467e26842 | ||
|
|
7700050d10 | ||
|
|
a8d34b46fb | ||
|
|
bf6c16197c | ||
|
|
25cc341888 | ||
|
|
d899e75ad7 | ||
|
|
732da52239 | ||
|
|
ab2f212b04 | ||
|
|
f94438cf9b | ||
|
|
2569ec4f4f | ||
|
|
b58bff4686 | ||
|
|
6fab3590ae | ||
|
|
846fd67618 | ||
|
|
6254129f0d | ||
|
|
3409d39150 | ||
|
|
9de165a676 | ||
|
|
f54616912d | ||
|
|
c003e89ea9 | ||
|
|
6e64b5c070 | ||
|
|
fcfc34fef1 | ||
|
|
75b7d74f91 | ||
|
|
569b5bc533 | ||
|
|
9ab9c6961b | ||
|
|
2e525f8922 | ||
|
|
9c6300c2de | ||
|
|
f8153393b1 | ||
|
|
cb07e9c757 | ||
|
|
639b297027 | ||
|
|
4341d67fb0 | ||
|
|
6260633974 | ||
|
|
8ec856f3b6 | ||
|
|
5a207f155e | ||
|
|
2a722ba8d0 | ||
|
|
efbd2177a5 | ||
|
|
989e1ca5d6 | ||
|
|
4c89568d71 | ||
|
|
a9688ac805 | ||
|
|
12a8793ddb | ||
|
|
7bbf640389 | ||
|
|
cb6688c685 | ||
|
|
266a4e71c5 | ||
|
|
c29afce54d | ||
|
|
eddee456b3 | ||
|
|
be5a12a318 | ||
|
|
e131e8c151 | ||
|
|
d30ecaa7e3 | ||
|
|
dfc4a0c0e0 | ||
|
|
86ba1639c3 | ||
|
|
f1b4e24833 | ||
|
|
35d36a71c7 | ||
|
|
eadcbe1ce9 | ||
|
|
f0198105c4 | ||
|
|
e34c7acdc4 | ||
|
|
001d469bd0 | ||
|
|
a9e5981cfe | ||
|
|
b36b6978fb | ||
|
|
55a19ffe6a | ||
|
|
c4d358b870 | ||
|
|
5ae7df7757 | ||
|
|
220168f5ee | ||
|
|
3cc9139c6d | ||
|
|
01161c7afd | ||
|
|
6d595cbda6 | ||
|
|
419d32d3e3 | ||
|
|
334c63388b | ||
|
|
32f6f87463 | ||
|
|
0d92b2e703 | ||
|
|
bc6d879976 | ||
|
|
9bae9d32c7 | ||
|
|
b5724adae5 | ||
|
|
1048baa98c | ||
|
|
922ea67541 | ||
|
|
3d105e3b7a | ||
|
|
aceef98601 | ||
|
|
d41322c63c | ||
|
|
d6e5eb356b | ||
|
|
b46a2b43b0 | ||
|
|
6f54044cc6 | ||
|
|
5d1f322cd1 | ||
|
|
c11a8b8ae1 | ||
|
|
2d4df3d50e | ||
|
|
715483c669 | ||
|
|
30f65f38a7 | ||
|
|
aaf093b0e0 | ||
|
|
bd7248d21c | ||
|
|
9bdd49bec5 | ||
|
|
2506db88f2 | ||
|
|
61c38eabf8 | ||
|
|
37a1e5d9b0 | ||
|
|
c439a1ec8f | ||
|
|
a1d110aac7 | ||
|
|
c5e22f9aa3 | ||
|
|
6dc5f91a0f | ||
|
|
8a9ebe2086 | ||
|
|
a859ecfbde | ||
|
|
02fd26520d | ||
|
|
f8b2bcbae7 | ||
|
|
f5157784c4 | ||
|
|
93b49f314d | ||
|
|
55d81cf74d | ||
|
|
f629822596 | ||
|
|
bf2a4d1a2c | ||
|
|
afadfa939d | ||
|
|
02c3e1c32f | ||
|
|
a0d20a5d50 | ||
|
|
b8d27d53b8 | ||
|
|
878659cded | ||
|
|
027ce7fbdb | ||
|
|
540f8ab7d6 | ||
|
|
0362c88e48 | ||
|
|
129374a1c2 | ||
|
|
63fd546f44 | ||
|
|
9856c9154e | ||
|
|
e7a712394a | ||
|
|
208e36f83b | ||
|
|
68a6984fcd | ||
|
|
a90e0e8834 | ||
|
|
8ab6a79b37 | ||
|
|
e68d576fd2 | ||
|
|
ca247182df | ||
|
|
1f628778bb | ||
|
|
dcbb2813b5 | ||
|
|
9cdb281f06 | ||
|
|
8116ec8e1f | ||
|
|
c373420982 | ||
|
|
93a9a0354f | ||
|
|
ee6e28e066 | ||
|
|
ea5d429399 | ||
|
|
3b49dd78bf | ||
|
|
42b019d8c8 | ||
|
|
a1af4e1808 | ||
|
|
ffdcb2f8eb | ||
|
|
7b5f4f51fb | ||
|
|
25c2b9610a | ||
|
|
5935583c4c | ||
|
|
68f17eb370 | ||
|
|
1ad8a49155 | ||
|
|
47ed5ef848 | ||
|
|
a56686ca77 | ||
|
|
211786976d | ||
|
|
7e82f0fad7 | ||
|
|
8612bf79e8 | ||
|
|
78edf51803 | ||
|
|
632810f3a8 | ||
|
|
695eab1fdd | ||
|
|
081a0fc04e | ||
|
|
48f10669d6 | ||
|
|
4f8b624b96 | ||
|
|
c87c0aa712 | ||
|
|
05e6f4ab3c | ||
|
|
1a85874964 | ||
|
|
6f2224c8e5 | ||
|
|
57e155f0f9 | ||
|
|
a6924c1bcf | ||
|
|
4acb28f6f5 | ||
|
|
3ed5d6ec65 | ||
|
|
15bcea7301 | ||
|
|
ce8c0066d0 | ||
|
|
bdd63f36a8 | ||
|
|
24abc1462f | ||
|
|
12363ae175 | ||
|
|
1b50895738 | ||
|
|
1fbae00e37 | ||
|
|
b10a71786b | ||
|
|
0d659b0111 | ||
|
|
deb8714987 | ||
|
|
ee8775a08d | ||
|
|
31650bb0bd | ||
|
|
cbf085ab43 | ||
|
|
78d715efed | ||
|
|
2cb5b0563b | ||
|
|
0eb55f5038 | ||
|
|
daf3bbc7ef | ||
|
|
caa6d0c4d3 | ||
|
|
bc7ae4ca46 | ||
|
|
db2316b791 | ||
|
|
b7efd5a9ab | ||
|
|
83caf99c58 | ||
|
|
285fb2582e | ||
|
|
19180a1bc4 | ||
|
|
9c86f521e9 | ||
|
|
2171823846 | ||
|
|
c4143b0111 | ||
|
|
94fa4deab3 | ||
|
|
53aadd3b96 | ||
|
|
faa0802d97 | ||
|
|
fa144aa98f | ||
|
|
ea4e98c52a | ||
|
|
8ff413efc0 | ||
|
|
804a3c17bf | ||
|
|
da5eb710cd | ||
|
|
57f9b31b2b | ||
|
|
34ba858e3b | ||
|
|
5e24cee0ae | ||
|
|
a026838f77 | ||
|
|
e2cd86089b | ||
|
|
85d5387f31 | ||
|
|
cf13a1b70a | ||
|
|
fed6a86170 | ||
|
|
c8907fb39d | ||
|
|
162e4aeec4 | ||
|
|
c6d2fa86c7 | ||
|
|
f89db3586b | ||
|
|
19742859b6 | ||
|
|
eff46dbc71 | ||
|
|
80b75a163a | ||
|
|
fe65073f3e | ||
|
|
6f2b10daf5 | ||
|
|
ad3d89afd3 | ||
|
|
e3c2c310ef | ||
|
|
c574cdc7dc | ||
|
|
097b59e74a | ||
|
|
8e7d607a47 | ||
|
|
4c32faa448 | ||
|
|
17509d560d | ||
|
|
7b1b656455 | ||
|
|
268b22c550 | ||
|
|
b525d0a6f4 | ||
|
|
c7cabfa785 | ||
|
|
9c2797b34c | ||
|
|
732f7d2292 | ||
|
|
f5fc0871fc | ||
|
|
9458741b72 | ||
|
|
a1f7f967e3 | ||
|
|
91c78d7137 | ||
|
|
b88f4ce27c | ||
|
|
e8606d9478 | ||
|
|
90d38a50de | ||
|
|
a83164cca6 | ||
|
|
e1e7e9047d | ||
|
|
094eef635d | ||
|
|
56bb82e303 | ||
|
|
0290dd3246 | ||
|
|
de8c46cab0 | ||
|
|
9028a48ab2 | ||
|
|
709fa74070 | ||
|
|
5342faa997 | ||
|
|
f0865d69f0 | ||
|
|
ddf9fd581e | ||
|
|
b6745db4b8 | ||
|
|
35a565d09f | ||
|
|
b878aed400 | ||
|
|
1961a8ba15 | ||
|
|
b76018d6e0 | ||
|
|
a40398e6a1 | ||
|
|
97e2fbbe27 | ||
|
|
bcbad06c10 | ||
|
|
1c74773eac | ||
|
|
9701ac1804 | ||
|
|
3d90c6dfcf | ||
|
|
1402a2c8a5 | ||
|
|
6567ad612c | ||
|
|
a15bf9ee41 | ||
|
|
da448f6a0b | ||
|
|
513f54a422 | ||
|
|
05d9220b21 | ||
|
|
9bb9bc682f | ||
|
|
1d6f116687 | ||
|
|
9a9d53d17a | ||
|
|
755ffc9844 | ||
|
|
0ffbb06427 | ||
|
|
c13c5b6c13 | ||
|
|
70979df36a | ||
|
|
83ee39cabd | ||
|
|
b0d31a64aa | ||
|
|
06c53c14be | ||
|
|
a63778e40e | ||
|
|
6f38edf9a3 | ||
|
|
1f05372ac9 | ||
|
|
d0327fc044 | ||
|
|
068dab14d4 | ||
|
|
f64d0dde5a | ||
|
|
7cc0041aa8 | ||
|
|
f66f24eb83 | ||
|
|
e3ee3c5a00 | ||
|
|
1198c067b2 | ||
|
|
d3ea09d60c | ||
|
|
c0abb063f9 | ||
|
|
95cdddd670 | ||
|
|
d91aa8c6cf | ||
|
|
052f101a70 | ||
|
|
c96e88877f | ||
|
|
1564dfc80f | ||
|
|
b0cb3ca9da | ||
|
|
84b5fb89a3 | ||
|
|
5319659d58 | ||
|
|
5d27c28b47 | ||
|
|
68a6315626 | ||
|
|
8bfbd85cf9 | ||
|
|
f7b6d9fdff | ||
|
|
ab4fba7ce9 | ||
|
|
deb6e58397 | ||
|
|
4746bc7c09 | ||
|
|
823a74c98b | ||
|
|
c294a63f32 | ||
|
|
84bce530dc | ||
|
|
6acd3c98b7 | ||
|
|
1e80b2e295 | ||
|
|
e4721d7722 | ||
|
|
0cea8121bb | ||
|
|
eaac54040c | ||
|
|
763ac25b2e | ||
|
|
922723cf39 | ||
|
|
f216c8f90f | ||
|
|
4e31bdd2d2 | ||
|
|
d5e9716ceb | ||
|
|
01963b0ee7 | ||
|
|
a353f2a807 | ||
|
|
69205c5f6b | ||
|
|
941bba2ae0 | ||
|
|
ddccfaa6fe | ||
|
|
24da2b78b8 | ||
|
|
6fee0db17b | ||
|
|
b25fbc5266 | ||
|
|
e6235a4046 | ||
|
|
0f32161df0 | ||
|
|
b570c8ad2a | ||
|
|
9170aa184a | ||
|
|
68c26014cc | ||
|
|
f049b61460 | ||
|
|
458ca69405 | ||
|
|
8a4c85e473 | ||
|
|
09d883f94a | ||
|
|
9ef57ec510 | ||
|
|
5be006f9d3 | ||
|
|
089bafa5d4 | ||
|
|
fa278f83ad | ||
|
|
0d68ca8f14 | ||
|
|
2ec90f17d0 | ||
|
|
ecf340f722 | ||
|
|
713079bd70 | ||
|
|
d77040a7a9 | ||
|
|
d3b137fbc4 | ||
|
|
857faf570d | ||
|
|
5246c842b2 | ||
|
|
1dca4c9098 | ||
|
|
8cb32045f0 | ||
|
|
4962b729de | ||
|
|
ed39a127e7 | ||
|
|
c4b4a4c21a | ||
|
|
bd81fda05c | ||
|
|
83550eeba0 | ||
|
|
4540cb653e | ||
|
|
69597c5654 | ||
|
|
fa61aef194 | ||
|
|
871d87374b | ||
|
|
e35f6b2acb | ||
|
|
b3e056fe55 | ||
|
|
a8140e86d7 | ||
|
|
e5b76c6427 | ||
|
|
4d4ae84e32 | ||
|
|
ae349addfe | ||
|
|
31fdd5e85c | ||
|
|
e4bde24f38 | ||
|
|
9c019e1cc0 | ||
|
|
b3d298269b | ||
|
|
21f7ca21e0 | ||
|
|
43bf370f8c | ||
|
|
6057921e34 | ||
|
|
d645d0894a | ||
|
|
4575cae458 | ||
|
|
6982a8aee7 | ||
|
|
fa1091d089 | ||
|
|
5095816762 | ||
|
|
c605705b39 | ||
|
|
24eae09ed9 | ||
|
|
a2fee252f9 | ||
|
|
ab80c2276d | ||
|
|
f78c9f357d | ||
|
|
da1e43dc12 | ||
|
|
ccc2a616c1 | ||
|
|
51184ba20d | ||
|
|
db33c0e4fa | ||
|
|
e9728f2a78 | ||
|
|
5cdf2f88da | ||
|
|
93e940adfc | ||
|
|
64776f97cf | ||
|
|
fc080732d4 | ||
|
|
d02364a833 | ||
|
|
176da040d9 | ||
|
|
f2b4d87152 | ||
|
|
17798edbc4 | ||
|
|
c1da74cbc0 | ||
|
|
5e6ee4a371 | ||
|
|
288fea8960 | ||
|
|
dca9daf719 | ||
|
|
634504c7a1 | ||
|
|
c019d873b9 | ||
|
|
e4a21b67c7 | ||
|
|
2e6c484a50 | ||
|
|
f8b64f2222 | ||
|
|
6060b62acd | ||
|
|
0dcf6a2b1f | ||
|
|
452c1b53f7 | ||
|
|
cb354c2ef1 | ||
|
|
42d2f72683 | ||
|
|
57e8ba7f3c | ||
|
|
c882cda586 | ||
|
|
784d18705c | ||
|
|
36996584f9 | ||
|
|
0160dbe8bc | ||
|
|
28994d4b0b | ||
|
|
9b09344bae | ||
|
|
84ba383199 | ||
|
|
6dcd87afec | ||
|
|
243ab58902 | ||
|
|
6c877a15e3 | ||
|
|
2ccf0a0004 | ||
|
|
c69db02762 | ||
|
|
59e1c6d492 | ||
|
|
35c27c8b16 | ||
|
|
91edac0d84 | ||
|
|
ae1bd9d1e9 | ||
|
|
cf168b27d2 | ||
|
|
8cb7b388dc | ||
|
|
171f0d6340 | ||
|
|
aff31ac02f | ||
|
|
a23754897e | ||
|
|
3094b67664 | ||
|
|
98d3f3dc8a | ||
|
|
6f2a07a7df | ||
|
|
54ac1905b3 | ||
|
|
1bdae2d1f7 | ||
|
|
2bc2e26cc7 | ||
|
|
5010602e6b | ||
|
|
c103a813bf | ||
|
|
e097bc61c8 | ||
|
|
2ea63eeca0 | ||
|
|
52336c0fe8 | ||
|
|
220354241b | ||
|
|
1ae8fdc15c | ||
|
|
4bbdce3478 | ||
|
|
d25e6249fd | ||
|
|
71d7bac261 | ||
|
|
acba5306c6 | ||
|
|
fca9245536 | ||
|
|
47031da65b | ||
|
|
b024d91c66 | ||
|
|
da7002cf0c | ||
|
|
f4f1762805 | ||
|
|
ad5857e06b | ||
|
|
12d735ec8f | ||
|
|
1e9173e8ef | ||
|
|
4809c40f3c | ||
|
|
4e9ec271c5 | ||
|
|
6cd6a42e20 | ||
|
|
f234c0f771 | ||
|
|
3f49d2c455 | ||
|
|
a0fb9bef3a | ||
|
|
ccaaee61f0 | ||
|
|
70269d9a0d | ||
|
|
ab6322a8f7 | ||
|
|
8bc6367e1e | ||
|
|
b74bf9f266 | ||
|
|
321aa3b01d | ||
|
|
7f1096f711 | ||
|
|
2b6cfd7b3d | ||
|
|
b2b33605cc | ||
|
|
d06b0de74b | ||
|
|
6dfc714c75 | ||
|
|
cf5d3d55f0 | ||
|
|
e91d383165 | ||
|
|
72d19b93a0 | ||
|
|
ff1c96b0e0 | ||
|
|
6aaf906594 | ||
|
|
da7baced50 | ||
|
|
2b10c0f3f2 | ||
|
|
01788263e2 | ||
|
|
8daceabd26 | ||
|
|
712b07c136 | ||
|
|
8fbfed5c55 | ||
|
|
c4a3c0aac1 | ||
|
|
4d0c567d73 | ||
|
|
365f897059 | ||
|
|
7b1158ee8e | ||
|
|
d8814b7162 | ||
|
|
9af3fa557b | ||
|
|
e0d8d35090 | ||
|
|
7e83ddc968 | ||
|
|
b48815d2bb | ||
|
|
ad383cdb44 | ||
|
|
bbbacd62ae | ||
|
|
91afa88b44 | ||
|
|
a6fd3d0c09 | ||
|
|
b575fa4243 | ||
|
|
edf0d4bf85 | ||
|
|
3cab73c574 | ||
|
|
b3af64d66f | ||
|
|
1869b73826 | ||
|
|
5ab09686c9 | ||
|
|
4ed4d85b91 | ||
|
|
cc47afa856 | ||
|
|
e066b688fc | ||
|
|
15111dd24a | ||
|
|
841975d72b | ||
|
|
31a96d20ab | ||
|
|
9a70ac88c0 | ||
|
|
2ec5dda1d8 | ||
|
|
dab80fb842 | ||
|
|
a6404bdd0d | ||
|
|
ee5199f77a | ||
|
|
7f409c6487 | ||
|
|
678ce81487 | ||
|
|
69e0f858bc | ||
|
|
2b12e26b98 | ||
|
|
634550fb0b | ||
|
|
491e4c709e | ||
|
|
480c8516ab | ||
|
|
9eda4efb74 | ||
|
|
a517b15c26 | ||
|
|
609528e8a3 | ||
|
|
e17ee4b58f | ||
|
|
3dc8a10e85 | ||
|
|
dc89479b4c | ||
|
|
e893017e00 | ||
|
|
b51b1a959f | ||
|
|
4a1c121792 | ||
|
|
8de92b152c | ||
|
|
95ab5327c3 | ||
|
|
d39ad9d9ce | ||
|
|
07a5e17284 | ||
|
|
583d1390d2 | ||
|
|
638f8eae21 | ||
|
|
e40f29092b | ||
|
|
b394862210 | ||
|
|
0434c611f0 | ||
|
|
201ae5f948 | ||
|
|
9d93b78296 | ||
|
|
1d7bd835e6 | ||
|
|
4f90406e91 | ||
|
|
53b4dd5dbf | ||
|
|
491f4824b0 | ||
|
|
91721e09df | ||
|
|
2828d31141 | ||
|
|
d10e727b3c | ||
|
|
f57cf03f4b | ||
|
|
8669e87454 | ||
|
|
b319f47048 | ||
|
|
432daa6139 | ||
|
|
835c26f6cb | ||
|
|
b2557c6fd8 | ||
|
|
f1c2a95f0d | ||
|
|
8f5d25a5df | ||
|
|
4c199b0ab2 | ||
|
|
ef7b3fec94 | ||
|
|
93bd1e6705 | ||
|
|
58e84a40e5 | ||
|
|
b13009b9a3 | ||
|
|
fc941eda98 | ||
|
|
32deca2e92 | ||
|
|
ff1a618a93 | ||
|
|
0af79b729e | ||
|
|
76711febd1 | ||
|
|
81e545b720 | ||
|
|
d985b1215a | ||
|
|
157bec1777 | ||
|
|
1754076a56 | ||
|
|
d3132820a5 | ||
|
|
9f4d65891c | ||
|
|
653ec0ffab | ||
|
|
28228a3b57 | ||
|
|
e2470200da | ||
|
|
9c04e08b4d | ||
|
|
cfd7946097 | ||
|
|
bda1abab8d | ||
|
|
8356327c2b | ||
|
|
fb67b8edf9 | ||
|
|
7af2bcc9b0 | ||
|
|
8e83c86d88 | ||
|
|
53cf6cf17c | ||
|
|
8701f83922 | ||
|
|
cafac2338d | ||
|
|
7344ee23ef | ||
|
|
e5dfc62dce | ||
|
|
a0bf3459eb | ||
|
|
facec0fe76 | ||
|
|
11edd43af3 | ||
|
|
6fb09d73b1 | ||
|
|
27d0111a27 | ||
|
|
58367811a0 | ||
|
|
a3519ce1df | ||
|
|
812d00f490 | ||
|
|
5ac2211ef4 | ||
|
|
9c9bf0ed84 | ||
|
|
c013d656c8 | ||
|
|
0c0e172caf | ||
|
|
e38ed6574c | ||
|
|
267e297eca | ||
|
|
a733a59b8d | ||
|
|
1f76a88656 |
1
.github/BOTMETA.yml
vendored
1
.github/BOTMETA.yml
vendored
@@ -1,3 +1,4 @@
|
||||
---
|
||||
files:
|
||||
awx/ui/:
|
||||
labels: component:ui
|
||||
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -135,9 +135,10 @@ use_dev_supervisor.txt
|
||||
|
||||
|
||||
# Ansible module tests
|
||||
awx_collection_test_venv/
|
||||
awx_collection/*.tar.gz
|
||||
awx_collection/galaxy.yml
|
||||
/awx_collection_test_venv/
|
||||
/awx_collection/*.tar.gz
|
||||
/awx_collection/galaxy.yml
|
||||
/sanity/
|
||||
|
||||
.idea/*
|
||||
*.unison.tmp
|
||||
|
||||
12
.yamllint
Normal file
12
.yamllint
Normal file
@@ -0,0 +1,12 @@
|
||||
---
|
||||
ignore: |
|
||||
.tox
|
||||
awx/main/tests/data/inventory/plugins/**
|
||||
# vault files
|
||||
awx/main/tests/data/ansible_utils/playbooks/valid/vault.yml
|
||||
awx/ui/test/e2e/tests/smoke-vars.yml
|
||||
|
||||
extends: default
|
||||
|
||||
rules:
|
||||
line-length: disable
|
||||
139
INSTALL.md
139
INSTALL.md
@@ -4,41 +4,45 @@ This document provides a guide for installing AWX.
|
||||
|
||||
## Table of contents
|
||||
|
||||
- [Getting started](#getting-started)
|
||||
- [Clone the repo](#clone-the-repo)
|
||||
- [AWX branding](#awx-branding)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [System Requirements](#system-requirements)
|
||||
- [AWX Tunables](#awx-tunables)
|
||||
- [Choose a deployment platform](#choose-a-deployment-platform)
|
||||
- [Official vs Building Images](#official-vs-building-images)
|
||||
- [OpenShift](#openshift)
|
||||
- [Prerequisites](#prerequisites-1)
|
||||
- [Deploying to Minishift](#deploying-to-minishift)
|
||||
- [Pre-build steps](#pre-build-steps)
|
||||
- [PostgreSQL](#postgresql)
|
||||
- [Start the build](#start-the-build)
|
||||
- [Post build](#post-build)
|
||||
- [Accessing AWX](#accessing-awx)
|
||||
- [Kubernetes](#kubernetes)
|
||||
- [Prerequisites](#prerequisites-2)
|
||||
- [Pre-build steps](#pre-build-steps-1)
|
||||
- [Configuring Helm](#configuring-helm)
|
||||
- [Start the build](#start-the-build-1)
|
||||
- [Accessing AWX](#accessing-awx-1)
|
||||
- [SSL Termination](#ssl-termination)
|
||||
- [Docker Compose](#docker-compose)
|
||||
- [Prerequisites](#prerequisites-3)
|
||||
- [Pre-build steps](#pre-build-steps-2)
|
||||
- [Deploying to a remote host](#deploying-to-a-remote-host)
|
||||
- [Inventory variables](#inventory-variables)
|
||||
- [Installing AWX](#installing-awx)
|
||||
* [Getting started](#getting-started)
|
||||
+ [Clone the repo](#clone-the-repo)
|
||||
+ [AWX branding](#awx-branding)
|
||||
+ [Prerequisites](#prerequisites)
|
||||
+ [System Requirements](#system-requirements)
|
||||
+ [AWX Tunables](#awx-tunables)
|
||||
+ [Choose a deployment platform](#choose-a-deployment-platform)
|
||||
+ [Official vs Building Images](#official-vs-building-images)
|
||||
* [Upgrading from previous versions](#upgrading-from-previous-versions)
|
||||
* [OpenShift](#openshift)
|
||||
+ [Prerequisites](#prerequisites-1)
|
||||
+ [Pre-install steps](#pre-install-steps)
|
||||
- [Deploying to Minishift](#deploying-to-minishift)
|
||||
- [PostgreSQL](#postgresql)
|
||||
+ [Run the installer](#run-the-installer)
|
||||
+ [Post-install](#post-install)
|
||||
+ [Accessing AWX](#accessing-awx)
|
||||
* [Kubernetes](#kubernetes)
|
||||
+ [Prerequisites](#prerequisites-2)
|
||||
+ [Pre-install steps](#pre-install-steps-1)
|
||||
+ [Configuring Helm](#configuring-helm)
|
||||
+ [Run the installer](#run-the-installer-1)
|
||||
+ [Post-install](#post-install-1)
|
||||
+ [Accessing AWX](#accessing-awx-1)
|
||||
+ [SSL Termination](#ssl-termination)
|
||||
* [Docker-Compose](#docker-compose)
|
||||
+ [Prerequisites](#prerequisites-3)
|
||||
+ [Pre-install steps](#pre-install-steps-2)
|
||||
- [Deploying to a remote host](#deploying-to-a-remote-host)
|
||||
- [Inventory variables](#inventory-variables)
|
||||
- [Docker registry](#docker-registry)
|
||||
- [PostgreSQL](#postgresql-1)
|
||||
- [Proxy settings](#proxy-settings)
|
||||
- [Start the build](#start-the-build-2)
|
||||
- [Post build](#post-build-2)
|
||||
- [Accessing AWX](#accessing-awx-2)
|
||||
- [PostgreSQL](#postgresql-1)
|
||||
+ [Run the installer](#run-the-installer-2)
|
||||
+ [Post-install](#post-install-2)
|
||||
+ [Accessing AWX](#accessing-awx-2)
|
||||
|
||||
|
||||
## Getting started
|
||||
|
||||
### Clone the repo
|
||||
@@ -57,7 +61,7 @@ To install the assets, clone the `awx-logos` repo so that it is next to your `aw
|
||||
|
||||
Before you can run a deployment, you'll need the following installed in your local environment:
|
||||
|
||||
- [Ansible](http://docs.ansible.com/ansible/latest/intro_installation.html) Requires Version 2.4+
|
||||
- [Ansible](http://docs.ansible.com/ansible/latest/intro_installation.html) Requires Version 2.8+
|
||||
- [Docker](https://docs.docker.com/engine/installation/)
|
||||
+ A recent version
|
||||
- [docker](https://pypi.org/project/docker/) Python module
|
||||
@@ -114,12 +118,34 @@ If these variables are present then all deployments will use these hosted images
|
||||
|
||||
> Multiple versions are provided. `latest` always pulls the most recent. You may also select version numbers at different granularities: 1, 1.0, 1.0.1, 1.0.0.123
|
||||
|
||||
|
||||
## Upgrading from previous versions
|
||||
|
||||
Upgrading AWX involves rerunning the install playbook. Download a newer release from [https://github.com/ansible/awx/releases](https://github.com/ansible/awx/releases) and re-populate the inventory file with your customized variables.
|
||||
|
||||
For convenience, you can create a file called `vars.yml`:
|
||||
|
||||
```
|
||||
admin_password: 'adminpass'
|
||||
pg_password: 'pgpass'
|
||||
rabbitmq_password: 'rabbitpass'
|
||||
secret_key: 'mysupersecret'
|
||||
```
|
||||
|
||||
And pass it to the installer:
|
||||
|
||||
```
|
||||
$ ansible-playbook -i inventory install.yml -e @vars.yml
|
||||
```
|
||||
|
||||
## OpenShift
|
||||
|
||||
### Prerequisites
|
||||
|
||||
To complete a deployment to OpenShift, you will obviously need access to an OpenShift cluster. For demo and testing purposes, you can use [Minishift](https://github.com/minishift/minishift) to create a single node cluster running inside a virtual machine.
|
||||
|
||||
When using OpenShift for deploying AWX make sure you have correct privileges to add the security context 'privileged', otherwise the installation will fail. The privileged context is needed because of the use of [the bubblewrap tool](https://github.com/containers/bubblewrap) to add an additional layer of security when using containers.
|
||||
|
||||
You will also need to have the `oc` command in your PATH. The `install.yml` playbook will call out to `oc` when logging into, and creating objects on the cluster.
|
||||
|
||||
The default resource requests per-deployment requires:
|
||||
@@ -131,9 +157,9 @@ This can be tuned by overriding the variables found in [/installer/roles/kuberne
|
||||
|
||||
For more detail on how resource requests are formed see: [https://docs.openshift.com/container-platform/latest/dev_guide/compute_resources.html#dev-compute-resources](https://docs.openshift.com/container-platform/latest/dev_guide/compute_resources.html#dev-compute-resources)
|
||||
|
||||
### Pre-build steps
|
||||
### Pre-install steps
|
||||
|
||||
Before starting the build process, review the [inventory](./installer/inventory) file, and uncomment and provide values for the following variables found in the `[all:vars]` section:
|
||||
Before starting the install, review the [inventory](./installer/inventory) file, and uncomment and provide values for the following variables found in the `[all:vars]` section:
|
||||
|
||||
*openshift_host*
|
||||
|
||||
@@ -195,20 +221,20 @@ By default, AWX will deploy a PostgreSQL pod inside of your cluster. You will ne
|
||||
|
||||
If you wish to use an external database, in the inventory file, set the value of `pg_hostname`, and update `pg_username`, `pg_password`, `pg_admin_password`, `pg_database`, and `pg_port` with the connection information. When setting `pg_hostname` the installer will assume you have configured the database in that location and will not launch the postgresql pod.
|
||||
|
||||
### Start the build
|
||||
### Run the installer
|
||||
|
||||
To start the build, you will pass two *extra* variables on the command line. The first is *openshift_password*, which is the password for the *openshift_user*, and the second is *docker_registry_password*, which is the password associated with *docker_registry_username*.
|
||||
To start the install, you will pass two *extra* variables on the command line. The first is *openshift_password*, which is the password for the *openshift_user*, and the second is *docker_registry_password*, which is the password associated with *docker_registry_username*.
|
||||
|
||||
If you're using the OpenShift internal registry, then you'll pass an access token for the *docker_registry_password* value, rather than a password. The `oc whoami -t` command will generate the required token, as long as you're logged into the cluster via `oc cluster login`.
|
||||
|
||||
To start the build and deployment, run the following (docker_registry_password is optional if using official images):
|
||||
Run the following command (docker_registry_password is optional if using official images):
|
||||
|
||||
```bash
|
||||
# Start the build and deployment
|
||||
# Start the install
|
||||
$ ansible-playbook -i inventory install.yml -e openshift_password=developer -e docker_registry_password=$(oc whoami -t)
|
||||
```
|
||||
|
||||
### Post build
|
||||
### Post-install
|
||||
|
||||
After the playbook run completes, check the status of the deployment by running `oc get pods`:
|
||||
|
||||
@@ -325,9 +351,9 @@ This can be tuned by overriding the variables found in [/installer/roles/kuberne
|
||||
|
||||
For more detail on how resource requests are formed see: [https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/](https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/)
|
||||
|
||||
### Pre-build steps
|
||||
### Pre-install steps
|
||||
|
||||
Before starting the build process, review the [inventory](./installer/inventory) file, and uncomment and provide values for the following variables found in the `[all:vars]` section uncommenting when necessary. Make sure the openshift and standalone docker sections are commented out:
|
||||
Before starting the install process, review the [inventory](./installer/inventory) file, and uncomment and provide values for the following variables found in the `[all:vars]` section uncommenting when necessary. Make sure the openshift and standalone docker sections are commented out:
|
||||
|
||||
*kubernetes_context*
|
||||
|
||||
@@ -347,7 +373,7 @@ If you want the AWX installer to manage creating the database pod (rather than i
|
||||
|
||||
Newer Kubernetes clusters with RBAC enabled will need to make sure a service account is created, make sure to follow the instructions here [https://docs.helm.sh/using_helm/#role-based-access-control](https://docs.helm.sh/using_helm/#role-based-access-control)
|
||||
|
||||
### Start the build
|
||||
### Run the installer
|
||||
|
||||
After making changes to the `inventory` file use `ansible-playbook` to begin the install
|
||||
|
||||
@@ -355,7 +381,7 @@ After making changes to the `inventory` file use `ansible-playbook` to begin the
|
||||
$ ansible-playbook -i inventory install.yml
|
||||
```
|
||||
|
||||
### Post build
|
||||
### Post-install
|
||||
|
||||
After the playbook run completes, check the status of the deployment by running `kubectl get pods --namespace awx` (replace awx with the namespace you used):
|
||||
|
||||
@@ -403,7 +429,7 @@ Unlike Openshift's `Route` the Kubernetes `Ingress` doesn't yet handle SSL termi
|
||||
+ This also installs the `docker` Python module, which is incompatible with `docker-py`. If you have previously installed `docker-py`, please uninstall it.
|
||||
- [Docker Compose](https://docs.docker.com/compose/install/).
|
||||
|
||||
### Pre-build steps
|
||||
### Pre-install steps
|
||||
|
||||
#### Deploying to a remote host
|
||||
|
||||
@@ -434,7 +460,7 @@ If you choose to use the official images then the remote host will be the one to
|
||||
|
||||
#### Inventory variables
|
||||
|
||||
Before starting the build process, review the [inventory](./installer/inventory) file, and uncomment and provide values for the following variables found in the `[all:vars]` section:
|
||||
Before starting the install process, review the [inventory](./installer/inventory) file, and uncomment and provide values for the following variables found in the `[all:vars]` section:
|
||||
|
||||
*postgres_data_dir*
|
||||
|
||||
@@ -456,6 +482,10 @@ Before starting the build process, review the [inventory](./installer/inventory)
|
||||
|
||||
> When using docker-compose, the `docker-compose.yml` file will be created there (default `/tmp/awxcompose`).
|
||||
|
||||
*custom_venv_dir*
|
||||
|
||||
> Adds the custom venv environments from the local host to be passed into the containers at install.
|
||||
|
||||
*ca_trust_dir*
|
||||
|
||||
> If you're using a non trusted CA, provide a path where the untrusted Certs are stored on your Host.
|
||||
@@ -505,9 +535,9 @@ AWX requires access to a PostgreSQL database, and by default, one will be create
|
||||
|
||||
If you wish to use an external database, in the inventory file, set the value of `pg_hostname`, and update `pg_username`, `pg_password`, `pg_admin_password`, `pg_database`, and `pg_port` with the connection information.
|
||||
|
||||
### Start the build
|
||||
### Run the installer
|
||||
|
||||
If you are not pushing images to a Docker registry, start the build by running the following:
|
||||
If you are not pushing images to a Docker registry, start the install by running the following:
|
||||
|
||||
```bash
|
||||
# Set the working directory to installer
|
||||
@@ -527,7 +557,7 @@ $ cd installer
|
||||
$ ansible-playbook -i inventory -e docker_registry_password=password install.yml
|
||||
```
|
||||
|
||||
### Post build
|
||||
### Post-install
|
||||
|
||||
After the playbook run completes, Docker will report up to 5 running containers. If you chose to use an existing PostgresSQL database, then it will report 4. You can view the running containers using the `docker ps` command, as follows:
|
||||
|
||||
@@ -604,14 +634,3 @@ Added instance awx to tower
|
||||
The AWX web server is accessible on the deployment host, using the *host_port* value set in the *inventory* file. The default URL is [http://localhost](http://localhost).
|
||||
|
||||
You will prompted with a login dialog. The default administrator username is `admin`, and the password is `password`.
|
||||
|
||||
### Maintenance using docker-compose
|
||||
|
||||
After the installation, maintenance operations with docker-compose can be done by using the `docker-compose.yml` file created at the location pointed by `docker_compose_dir`.
|
||||
|
||||
Among the possible operations, you may:
|
||||
|
||||
- Stop AWX : `docker-compose stop`
|
||||
- Upgrade AWX : `docker-compose pull && docker-compose up --force-recreate`
|
||||
|
||||
See the [docker-compose documentation](https://docs.docker.com/compose/) for details.
|
||||
|
||||
45
Makefile
45
Makefile
@@ -100,7 +100,7 @@ clean-languages:
|
||||
find . -type f -regex ".*\.mo$$" -delete
|
||||
|
||||
# Remove temporary build files, compiled Python files.
|
||||
clean: clean-ui clean-api clean-dist
|
||||
clean: clean-ui clean-api clean-awxkit clean-dist
|
||||
rm -rf awx/public
|
||||
rm -rf awx/lib/site-packages
|
||||
rm -rf awx/job_status
|
||||
@@ -116,6 +116,10 @@ clean-api:
|
||||
find . -type d -name "__pycache__" -delete
|
||||
rm -f awx/awx_test.sqlite3*
|
||||
rm -rf requirements/vendor
|
||||
rm -rf awx/projects
|
||||
|
||||
clean-awxkit:
|
||||
rm -rf awxkit/*.egg-info awxkit/.tox
|
||||
|
||||
# convenience target to assert environment variables are defined
|
||||
guard-%:
|
||||
@@ -196,7 +200,7 @@ requirements_awx_dev:
|
||||
|
||||
requirements: requirements_ansible requirements_awx
|
||||
|
||||
requirements_dev: requirements requirements_awx_dev requirements_ansible_dev
|
||||
requirements_dev: requirements_awx requirements_ansible_py3 requirements_awx_dev requirements_ansible_dev
|
||||
|
||||
requirements_test: requirements
|
||||
|
||||
@@ -364,7 +368,7 @@ check: flake8 pep8 # pyflakes pylint
|
||||
|
||||
awx-link:
|
||||
cp -R /tmp/awx.egg-info /awx_devel/ || true
|
||||
sed -i "s/placeholder/$(shell git describe --long | sed 's/\./\\./g')/" /awx_devel/awx.egg-info/PKG-INFO
|
||||
sed -i "s/placeholder/$(shell cat VERSION)/" /awx_devel/awx.egg-info/PKG-INFO
|
||||
cp -f /tmp/awx.egg-link /venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.egg-link
|
||||
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||
@@ -381,7 +385,6 @@ test:
|
||||
prepare_collection_venv:
|
||||
rm -rf $(COLLECTION_VENV)
|
||||
mkdir $(COLLECTION_VENV)
|
||||
ln -s /usr/lib/python2.7/site-packages/ansible $(COLLECTION_VENV)/ansible
|
||||
$(VENV_BASE)/awx/bin/pip install --target=$(COLLECTION_VENV) git+https://github.com/ansible/tower-cli.git
|
||||
|
||||
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
||||
@@ -399,6 +402,13 @@ flake8_collection:
|
||||
|
||||
test_collection_all: prepare_collection_venv test_collection flake8_collection
|
||||
|
||||
test_collection_sanity:
|
||||
rm -rf sanity
|
||||
mkdir -p sanity/ansible_collections/awx
|
||||
cp -Ra awx_collection sanity/ansible_collections/awx/awx # symlinks do not work
|
||||
cd sanity/ansible_collections/awx/awx && git init && git add . # requires both this file structure and a git repo, so there you go
|
||||
cd sanity/ansible_collections/awx/awx && ansible-test sanity
|
||||
|
||||
build_collection:
|
||||
ansible-playbook -i localhost, awx_collection/template_galaxy.yml -e collection_package=$(COLLECTION_PACKAGE) -e collection_namespace=$(COLLECTION_NAMESPACE) -e collection_version=$(VERSION)
|
||||
ansible-galaxy collection build awx_collection --output-path=awx_collection
|
||||
@@ -609,28 +619,34 @@ docker-auth:
|
||||
echo "$(IMAGE_REPOSITORY_AUTH)" | docker login -u oauth2accesstoken --password-stdin $(IMAGE_REPOSITORY_BASE); \
|
||||
fi;
|
||||
|
||||
# This directory is bind-mounted inside of the development container and
|
||||
# needs to be pre-created for permissions to be set correctly. Otherwise,
|
||||
# Docker will create this directory as root.
|
||||
awx/projects:
|
||||
@mkdir -p $@
|
||||
|
||||
# Docker isolated rampart
|
||||
docker-compose-isolated:
|
||||
docker-compose-isolated: awx/projects
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-isolated-override.yml up
|
||||
|
||||
# Docker Compose Development environment
|
||||
docker-compose: docker-auth
|
||||
docker-compose: docker-auth awx/projects
|
||||
CURRENT_UID=$(shell id -u) OS="$(shell docker info | grep 'Operating System')" TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml up --no-recreate awx
|
||||
|
||||
docker-compose-cluster: docker-auth
|
||||
docker-compose-cluster: docker-auth awx/projects
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose-cluster.yml up
|
||||
|
||||
docker-compose-credential-plugins: docker-auth
|
||||
docker-compose-credential-plugins: docker-auth awx/projects
|
||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx
|
||||
|
||||
docker-compose-test: docker-auth
|
||||
docker-compose-test: docker-auth awx/projects
|
||||
cd tools && CURRENT_UID=$(shell id -u) OS="$(shell docker info | grep 'Operating System')" TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm --service-ports awx /bin/bash
|
||||
|
||||
docker-compose-runtest:
|
||||
docker-compose-runtest: awx/projects
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm --service-ports awx /start_tests.sh
|
||||
|
||||
docker-compose-build-swagger:
|
||||
docker-compose-build-swagger: awx/projects
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm --service-ports awx /start_tests.sh swagger
|
||||
|
||||
detect-schema-change: genschema
|
||||
@@ -638,7 +654,7 @@ detect-schema-change: genschema
|
||||
# Ignore differences in whitespace with -b
|
||||
diff -u -b reference-schema.json schema.json
|
||||
|
||||
docker-compose-clean:
|
||||
docker-compose-clean: awx/projects
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm -w /awx_devel --service-ports awx make clean
|
||||
cd tools && TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose rm -sf
|
||||
|
||||
@@ -647,7 +663,6 @@ docker-compose-build: awx-devel-build
|
||||
# Base development image build
|
||||
awx-devel-build:
|
||||
docker build -t ansible/awx_devel -f tools/docker-compose/Dockerfile \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:devel \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||
docker tag ansible/awx_devel $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
#docker push $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
@@ -667,10 +682,10 @@ docker-clean:
|
||||
docker-refresh: docker-clean docker-compose
|
||||
|
||||
# Docker Development Environment with Elastic Stack Connected
|
||||
docker-compose-elk: docker-auth
|
||||
docker-compose-elk: docker-auth awx/projects
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
docker-compose-cluster-elk: docker-auth
|
||||
docker-compose-cluster-elk: docker-auth awx/projects
|
||||
TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose-cluster.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
prometheus:
|
||||
|
||||
@@ -86,7 +86,14 @@ def oauth2_getattribute(self, attr):
|
||||
# Custom method to override
|
||||
# oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__
|
||||
from django.conf import settings
|
||||
val = settings.OAUTH2_PROVIDER.get(attr)
|
||||
val = None
|
||||
if 'migrate' not in sys.argv:
|
||||
# certain Django OAuth Toolkit migrations actually reference
|
||||
# setting lookups for references to model classes (e.g.,
|
||||
# oauth2_settings.REFRESH_TOKEN_MODEL)
|
||||
# If we're doing an OAuth2 setting lookup *while running* a migration,
|
||||
# don't do our usual "Configure Tower in Tower" database setting lookup
|
||||
val = settings.OAUTH2_PROVIDER.get(attr)
|
||||
if val is None:
|
||||
val = object.__getattribute__(self, attr)
|
||||
return val
|
||||
|
||||
@@ -62,3 +62,14 @@ register(
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
register(
|
||||
'LOGIN_REDIRECT_OVERRIDE',
|
||||
field_class=fields.CharField,
|
||||
allow_blank=True,
|
||||
required=False,
|
||||
label=_('Login redirect override URL'),
|
||||
help_text=_('URL to which unauthorized users will be redirected to log in. '
|
||||
'If blank, users will be sent to the Tower login page.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
@@ -574,7 +574,7 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Verify we have permission to add the object as given.
|
||||
if not request.user.can_access(self.model, 'add', serializer.initial_data):
|
||||
if not request.user.can_access(self.model, 'add', serializer.validated_data):
|
||||
raise PermissionDenied()
|
||||
|
||||
# save the object through the serializer, reload and returned the saved
|
||||
|
||||
@@ -158,9 +158,16 @@ class Metadata(metadata.SimpleMetadata):
|
||||
isinstance(field, JSONField) or
|
||||
isinstance(model_field, JSONField) or
|
||||
isinstance(field, DRFJSONField) or
|
||||
isinstance(getattr(field, 'model_field', None), JSONField)
|
||||
isinstance(getattr(field, 'model_field', None), JSONField) or
|
||||
field.field_name == 'credential_passwords'
|
||||
):
|
||||
field_info['type'] = 'json'
|
||||
elif (
|
||||
isinstance(field, ManyRelatedField) and
|
||||
field.field_name == 'credentials'
|
||||
# launch-time credentials
|
||||
):
|
||||
field_info['type'] = 'list_of_ids'
|
||||
elif isinstance(model_field, BooleanField):
|
||||
field_info['type'] = 'boolean'
|
||||
|
||||
|
||||
@@ -1472,7 +1472,7 @@ class ProjectUpdateSerializer(UnifiedJobSerializer, ProjectOptionsSerializer):
|
||||
|
||||
class Meta:
|
||||
model = ProjectUpdate
|
||||
fields = ('*', 'project', 'job_type', '-controller_node')
|
||||
fields = ('*', 'project', 'job_type', 'job_tags', '-controller_node')
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(ProjectUpdateSerializer, self).get_related(obj)
|
||||
@@ -2456,12 +2456,18 @@ class CredentialTypeSerializer(BaseSerializer):
|
||||
raise PermissionDenied(
|
||||
detail=_("Modifications not allowed for managed credential types")
|
||||
)
|
||||
|
||||
old_inputs = {}
|
||||
if self.instance:
|
||||
old_inputs = copy.deepcopy(self.instance.inputs)
|
||||
|
||||
ret = super(CredentialTypeSerializer, self).validate(attrs)
|
||||
|
||||
if self.instance and self.instance.credentials.exists():
|
||||
if 'inputs' in attrs and attrs['inputs'] != self.instance.inputs:
|
||||
if 'inputs' in attrs and old_inputs != self.instance.inputs:
|
||||
raise PermissionDenied(
|
||||
detail= _("Modifications to inputs are not allowed for credential types that are in use")
|
||||
)
|
||||
ret = super(CredentialTypeSerializer, self).validate(attrs)
|
||||
|
||||
if 'kind' in attrs and attrs['kind'] not in ('cloud', 'net'):
|
||||
raise serializers.ValidationError({
|
||||
@@ -4338,13 +4344,30 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
error_list = []
|
||||
collected_messages = []
|
||||
|
||||
def check_messages(messages):
|
||||
for message_type in messages:
|
||||
if message_type not in ('message', 'body'):
|
||||
error_list.append(_("Message type '{}' invalid, must be either 'message' or 'body'").format(message_type))
|
||||
continue
|
||||
message = messages[message_type]
|
||||
if message is None:
|
||||
continue
|
||||
if not isinstance(message, str):
|
||||
error_list.append(_("Expected string for '{}', found {}, ").format(message_type, type(message)))
|
||||
continue
|
||||
if message_type == 'message':
|
||||
if '\n' in message:
|
||||
error_list.append(_("Messages cannot contain newlines (found newline in {} event)".format(event)))
|
||||
continue
|
||||
collected_messages.append(message)
|
||||
|
||||
# Validate structure / content types
|
||||
if not isinstance(messages, dict):
|
||||
error_list.append(_("Expected dict for 'messages' field, found {}".format(type(messages))))
|
||||
else:
|
||||
for event in messages:
|
||||
if event not in ['started', 'success', 'error']:
|
||||
error_list.append(_("Event '{}' invalid, must be one of 'started', 'success', or 'error'").format(event))
|
||||
if event not in ('started', 'success', 'error', 'workflow_approval'):
|
||||
error_list.append(_("Event '{}' invalid, must be one of 'started', 'success', 'error', or 'workflow_approval'").format(event))
|
||||
continue
|
||||
event_messages = messages[event]
|
||||
if event_messages is None:
|
||||
@@ -4352,21 +4375,21 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
if not isinstance(event_messages, dict):
|
||||
error_list.append(_("Expected dict for event '{}', found {}").format(event, type(event_messages)))
|
||||
continue
|
||||
for message_type in event_messages:
|
||||
if message_type not in ['message', 'body']:
|
||||
error_list.append(_("Message type '{}' invalid, must be either 'message' or 'body'").format(message_type))
|
||||
continue
|
||||
message = event_messages[message_type]
|
||||
if message is None:
|
||||
continue
|
||||
if not isinstance(message, str):
|
||||
error_list.append(_("Expected string for '{}', found {}, ").format(message_type, type(message)))
|
||||
continue
|
||||
if message_type == 'message':
|
||||
if '\n' in message:
|
||||
error_list.append(_("Messages cannot contain newlines (found newline in {} event)".format(event)))
|
||||
if event == 'workflow_approval':
|
||||
for subevent in event_messages:
|
||||
if subevent not in ('running', 'approved', 'timed_out', 'denied'):
|
||||
error_list.append(_("Workflow Approval event '{}' invalid, must be one of "
|
||||
"'running', 'approved', 'timed_out', or 'denied'").format(subevent))
|
||||
continue
|
||||
collected_messages.append(message)
|
||||
subevent_messages = event_messages[subevent]
|
||||
if subevent_messages is None:
|
||||
continue
|
||||
if not isinstance(subevent_messages, dict):
|
||||
error_list.append(_("Expected dict for workflow approval event '{}', found {}").format(subevent, type(subevent_messages)))
|
||||
continue
|
||||
check_messages(subevent_messages)
|
||||
else:
|
||||
check_messages(event_messages)
|
||||
|
||||
# Subclass to return name of undefined field
|
||||
class DescriptiveUndefined(StrictUndefined):
|
||||
@@ -4497,8 +4520,18 @@ class NotificationSerializer(BaseSerializer):
|
||||
'notification_type', 'recipients', 'subject', 'body')
|
||||
|
||||
def get_body(self, obj):
|
||||
if obj.notification_type == 'webhook' and 'body' in obj.body:
|
||||
return obj.body['body']
|
||||
if obj.notification_type in ('webhook', 'pagerduty'):
|
||||
if isinstance(obj.body, dict):
|
||||
if 'body' in obj.body:
|
||||
return obj.body['body']
|
||||
elif isinstance(obj.body, str):
|
||||
# attempt to load json string
|
||||
try:
|
||||
potential_body = json.loads(obj.body)
|
||||
if isinstance(potential_body, dict):
|
||||
return potential_body
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
return obj.body
|
||||
|
||||
def get_related(self, obj):
|
||||
@@ -4631,6 +4664,10 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
summary_fields = super(ScheduleSerializer, self).get_summary_fields(obj)
|
||||
|
||||
if isinstance(obj.unified_job_template, SystemJobTemplate):
|
||||
summary_fields['unified_job_template']['job_type'] = obj.unified_job_template.job_type
|
||||
|
||||
if 'inventory' in summary_fields:
|
||||
return summary_fields
|
||||
|
||||
@@ -4774,6 +4811,18 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError(_('Isolated instances may not be added or removed from instances groups via the API.'))
|
||||
if self.instance and self.instance.controller_id is not None:
|
||||
raise serializers.ValidationError(_('Isolated instance group membership may not be managed via the API.'))
|
||||
if value and self.instance and self.instance.is_containerized:
|
||||
raise serializers.ValidationError(_('Containerized instances may not be managed via the API'))
|
||||
return value
|
||||
|
||||
def validate_policy_instance_percentage(self, value):
|
||||
if value and self.instance and self.instance.is_containerized:
|
||||
raise serializers.ValidationError(_('Containerized instances may not be managed via the API'))
|
||||
return value
|
||||
|
||||
def validate_policy_instance_minimum(self, value):
|
||||
if value and self.instance and self.instance.is_containerized:
|
||||
raise serializers.ValidationError(_('Containerized instances may not be managed via the API'))
|
||||
return value
|
||||
|
||||
def validate_name(self, value):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Cancel Inventory Update
|
||||
|
||||
Make a GET request to this resource to determine if the inventory update can be
|
||||
cancelled. The response will include the following field:
|
||||
canceled. The response will include the following field:
|
||||
|
||||
* `can_cancel`: Indicates whether this update can be canceled (boolean,
|
||||
read-only)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{% ifmeth GET %}
|
||||
# Determine if a Job can be cancelled
|
||||
# Determine if a Job can be canceled
|
||||
|
||||
Make a GET request to this resource to determine if the job can be cancelled.
|
||||
Make a GET request to this resource to determine if the job can be canceled.
|
||||
The response will include the following field:
|
||||
|
||||
* `can_cancel`: Indicates whether this job can be canceled (boolean, read-only)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Cancel Project Update
|
||||
|
||||
Make a GET request to this resource to determine if the project update can be
|
||||
cancelled. The response will include the following field:
|
||||
canceled. The response will include the following field:
|
||||
|
||||
* `can_cancel`: Indicates whether this update can be canceled (boolean,
|
||||
read-only)
|
||||
|
||||
@@ -72,12 +72,11 @@ from awx.api.generics import (
|
||||
SubListDestroyAPIView
|
||||
)
|
||||
from awx.api.versioning import reverse
|
||||
from awx.conf.license import get_license
|
||||
from awx.main import models
|
||||
from awx.main.utils import (
|
||||
camelcase_to_underscore,
|
||||
extract_ansible_vars,
|
||||
get_awx_version,
|
||||
get_awx_http_client_headers,
|
||||
get_object_or_400,
|
||||
getattrd,
|
||||
get_pk_from_dict,
|
||||
@@ -102,7 +101,7 @@ from awx.main.scheduler.dag_workflow import WorkflowDAG
|
||||
from awx.api.views.mixin import (
|
||||
ControlledByScmMixin, InstanceGroupMembershipMixin,
|
||||
OrganizationCountsMixin, RelatedJobsPreventDeleteMixin,
|
||||
UnifiedJobDeletionMixin,
|
||||
UnifiedJobDeletionMixin, NoTruncateMixin,
|
||||
)
|
||||
from awx.api.views.organization import ( # noqa
|
||||
OrganizationList,
|
||||
@@ -383,6 +382,13 @@ class InstanceGroupDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAP
|
||||
serializer_class = serializers.InstanceGroupSerializer
|
||||
permission_classes = (InstanceGroupTowerPermission,)
|
||||
|
||||
def update_raw_data(self, data):
|
||||
if self.get_object().is_containerized:
|
||||
data.pop('policy_instance_percentage', None)
|
||||
data.pop('policy_instance_minimum', None)
|
||||
data.pop('policy_instance_list', None)
|
||||
return super(InstanceGroupDetail, self).update_raw_data(data)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
if instance.controller is not None:
|
||||
@@ -568,6 +574,7 @@ class TeamUsersList(BaseUsersList):
|
||||
serializer_class = serializers.UserSerializer
|
||||
parent_model = models.Team
|
||||
relationship = 'member_role.members'
|
||||
ordering = ('username',)
|
||||
|
||||
|
||||
class TeamRolesList(SubListAttachDetachAPIView):
|
||||
@@ -904,6 +911,7 @@ class UserList(ListCreateAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
permission_classes = (UserPermission,)
|
||||
ordering = ('username',)
|
||||
|
||||
|
||||
class UserMeList(ListAPIView):
|
||||
@@ -911,6 +919,7 @@ class UserMeList(ListAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
name = _('Me')
|
||||
ordering = ('username',)
|
||||
|
||||
def get_queryset(self):
|
||||
return self.model.objects.filter(pk=self.request.user.pk)
|
||||
@@ -1254,6 +1263,7 @@ class CredentialOwnerUsersList(SubListAPIView):
|
||||
serializer_class = serializers.UserSerializer
|
||||
parent_model = models.Credential
|
||||
relationship = 'admin_role.members'
|
||||
ordering = ('username',)
|
||||
|
||||
|
||||
class CredentialOwnerTeamsList(SubListAPIView):
|
||||
@@ -1375,6 +1385,7 @@ class CredentialExternalTest(SubDetailAPIView):
|
||||
|
||||
model = models.Credential
|
||||
serializer_class = serializers.EmptySerializer
|
||||
obj_permission_type = 'use'
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
@@ -1632,18 +1643,6 @@ class HostInsights(GenericAPIView):
|
||||
|
||||
return session
|
||||
|
||||
def _get_headers(self):
|
||||
license = get_license(show_key=False).get('license_type', 'UNLICENSED')
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': '{} {} ({})'.format(
|
||||
'AWX' if license == 'open' else 'Red Hat Ansible Tower',
|
||||
get_awx_version(),
|
||||
license
|
||||
)
|
||||
}
|
||||
|
||||
return headers
|
||||
|
||||
def _get_platform_info(self, host, session, headers):
|
||||
url = '{}/api/inventory/v1/hosts?insights_id={}'.format(
|
||||
@@ -1710,7 +1709,7 @@ class HostInsights(GenericAPIView):
|
||||
username = cred.get_input('username', default='')
|
||||
password = cred.get_input('password', default='')
|
||||
session = self._get_session(username, password)
|
||||
headers = self._get_headers()
|
||||
headers = get_awx_http_client_headers()
|
||||
|
||||
data = self._get_insights(host, session, headers)
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
@@ -2136,12 +2135,21 @@ class InventorySourceHostsList(HostRelatedSearchMixin, SubListDestroyAPIView):
|
||||
def perform_list_destroy(self, instance_list):
|
||||
inv_source = self.get_parent_object()
|
||||
with ignore_inventory_computed_fields():
|
||||
# Activity stream doesn't record disassociation here anyway
|
||||
# no signals-related reason to not bulk-delete
|
||||
models.Host.groups.through.objects.filter(
|
||||
host__inventory_sources=inv_source
|
||||
).delete()
|
||||
r = super(InventorySourceHostsList, self).perform_list_destroy(instance_list)
|
||||
if not settings.ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC:
|
||||
from awx.main.signals import disable_activity_stream
|
||||
with disable_activity_stream():
|
||||
# job host summary deletion necessary to avoid deadlock
|
||||
models.JobHostSummary.objects.filter(host__inventory_sources=inv_source).update(host=None)
|
||||
models.Host.objects.filter(inventory_sources=inv_source).delete()
|
||||
r = super(InventorySourceHostsList, self).perform_list_destroy([])
|
||||
else:
|
||||
# Advance delete of group-host memberships to prevent deadlock
|
||||
# Activity stream doesn't record disassociation here anyway
|
||||
# no signals-related reason to not bulk-delete
|
||||
models.Host.groups.through.objects.filter(
|
||||
host__inventory_sources=inv_source
|
||||
).delete()
|
||||
r = super(InventorySourceHostsList, self).perform_list_destroy(instance_list)
|
||||
update_inventory_computed_fields.delay(inv_source.inventory_id, True)
|
||||
return r
|
||||
|
||||
@@ -2157,11 +2165,18 @@ class InventorySourceGroupsList(SubListDestroyAPIView):
|
||||
def perform_list_destroy(self, instance_list):
|
||||
inv_source = self.get_parent_object()
|
||||
with ignore_inventory_computed_fields():
|
||||
# Same arguments for bulk delete as with host list
|
||||
models.Group.hosts.through.objects.filter(
|
||||
group__inventory_sources=inv_source
|
||||
).delete()
|
||||
r = super(InventorySourceGroupsList, self).perform_list_destroy(instance_list)
|
||||
if not settings.ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC:
|
||||
from awx.main.signals import disable_activity_stream
|
||||
with disable_activity_stream():
|
||||
models.Group.objects.filter(inventory_sources=inv_source).delete()
|
||||
r = super(InventorySourceGroupsList, self).perform_list_destroy([])
|
||||
else:
|
||||
# Advance delete of group-host memberships to prevent deadlock
|
||||
# Same arguments for bulk delete as with host list
|
||||
models.Group.hosts.through.objects.filter(
|
||||
group__inventory_sources=inv_source
|
||||
).delete()
|
||||
r = super(InventorySourceGroupsList, self).perform_list_destroy(instance_list)
|
||||
update_inventory_computed_fields.delay(inv_source.inventory_id, True)
|
||||
return r
|
||||
|
||||
@@ -3762,18 +3777,12 @@ class JobHostSummaryDetail(RetrieveAPIView):
|
||||
serializer_class = serializers.JobHostSummarySerializer
|
||||
|
||||
|
||||
class JobEventList(ListAPIView):
|
||||
class JobEventList(NoTruncateMixin, ListAPIView):
|
||||
|
||||
model = models.JobEvent
|
||||
serializer_class = serializers.JobEventSerializer
|
||||
search_fields = ('stdout',)
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
if self.request.query_params.get('no_truncate'):
|
||||
context.update(no_truncate=True)
|
||||
return context
|
||||
|
||||
|
||||
class JobEventDetail(RetrieveAPIView):
|
||||
|
||||
@@ -3786,7 +3795,7 @@ class JobEventDetail(RetrieveAPIView):
|
||||
return context
|
||||
|
||||
|
||||
class JobEventChildrenList(SubListAPIView):
|
||||
class JobEventChildrenList(NoTruncateMixin, SubListAPIView):
|
||||
|
||||
model = models.JobEvent
|
||||
serializer_class = serializers.JobEventSerializer
|
||||
@@ -3811,7 +3820,7 @@ class JobEventHostsList(HostRelatedSearchMixin, SubListAPIView):
|
||||
name = _('Job Event Hosts List')
|
||||
|
||||
|
||||
class BaseJobEventsList(SubListAPIView):
|
||||
class BaseJobEventsList(NoTruncateMixin, SubListAPIView):
|
||||
|
||||
model = models.JobEvent
|
||||
serializer_class = serializers.JobEventSerializer
|
||||
@@ -4007,18 +4016,12 @@ class AdHocCommandRelaunch(GenericAPIView):
|
||||
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
|
||||
class AdHocCommandEventList(ListAPIView):
|
||||
class AdHocCommandEventList(NoTruncateMixin, ListAPIView):
|
||||
|
||||
model = models.AdHocCommandEvent
|
||||
serializer_class = serializers.AdHocCommandEventSerializer
|
||||
search_fields = ('stdout',)
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
if self.request.query_params.get('no_truncate'):
|
||||
context.update(no_truncate=True)
|
||||
return context
|
||||
|
||||
|
||||
class AdHocCommandEventDetail(RetrieveAPIView):
|
||||
|
||||
@@ -4031,7 +4034,7 @@ class AdHocCommandEventDetail(RetrieveAPIView):
|
||||
return context
|
||||
|
||||
|
||||
class BaseAdHocCommandEventsList(SubListAPIView):
|
||||
class BaseAdHocCommandEventsList(NoTruncateMixin, SubListAPIView):
|
||||
|
||||
model = models.AdHocCommandEvent
|
||||
serializer_class = serializers.AdHocCommandEventSerializer
|
||||
@@ -4297,8 +4300,15 @@ class NotificationTemplateTest(GenericAPIView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
notification = obj.generate_notification("Tower Notification Test {} {}".format(obj.id, settings.TOWER_URL_BASE),
|
||||
{"body": "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)})
|
||||
msg = "Tower Notification Test {} {}".format(obj.id, settings.TOWER_URL_BASE)
|
||||
if obj.notification_type in ('email', 'pagerduty'):
|
||||
body = "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)
|
||||
elif obj.notification_type == 'webhook':
|
||||
body = '{{"body": "Ansible Tower Test Notification {} {}"}}'.format(obj.id, settings.TOWER_URL_BASE)
|
||||
else:
|
||||
body = {"body": "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)}
|
||||
notification = obj.generate_notification(msg, body)
|
||||
|
||||
if not notification:
|
||||
return Response({}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
|
||||
@@ -270,3 +270,11 @@ class ControlledByScmMixin(object):
|
||||
obj = super(ControlledByScmMixin, self).get_parent_object()
|
||||
self._reset_inv_src_rev(obj)
|
||||
return obj
|
||||
|
||||
|
||||
class NoTruncateMixin(object):
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
if self.request.query_params.get('no_truncate'):
|
||||
context.update(no_truncate=True)
|
||||
return context
|
||||
|
||||
@@ -60,6 +60,7 @@ class ApiRootView(APIView):
|
||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
|
||||
return Response(data)
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from hashlib import sha1
|
||||
import hmac
|
||||
import json
|
||||
import logging
|
||||
import urllib.parse
|
||||
|
||||
@@ -151,13 +150,13 @@ class WebhookReceiverBase(APIView):
|
||||
'webhook_credential': obj.webhook_credential,
|
||||
'webhook_guid': event_guid,
|
||||
},
|
||||
'extra_vars': json.dumps({
|
||||
'extra_vars': {
|
||||
'tower_webhook_event_type': event_type,
|
||||
'tower_webhook_event_guid': event_guid,
|
||||
'tower_webhook_event_ref': event_ref,
|
||||
'tower_webhook_status_api': status_api,
|
||||
'tower_webhook_payload': request.data,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
new_job = obj.create_unified_job(**kwargs)
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
# Python
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
import urllib.parse as urlparse
|
||||
from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
from django.core.validators import URLValidator
|
||||
from django.core.validators import URLValidator, _lazy_re_compile
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
@@ -118,17 +119,42 @@ class StringListPathField(StringListField):
|
||||
|
||||
|
||||
class URLField(CharField):
|
||||
# these lines set up a custom regex that allow numbers in the
|
||||
# top-level domain
|
||||
tld_re = (
|
||||
r'\.' # dot
|
||||
r'(?!-)' # can't start with a dash
|
||||
r'(?:[a-z' + URLValidator.ul + r'0-9' + '-]{2,63}' # domain label, this line was changed from the original URLValidator
|
||||
r'|xn--[a-z0-9]{1,59})' # or punycode label
|
||||
r'(?<!-)' # can't end with a dash
|
||||
r'\.?' # may have a trailing dot
|
||||
)
|
||||
|
||||
host_re = '(' + URLValidator.hostname_re + URLValidator.domain_re + tld_re + '|localhost)'
|
||||
|
||||
regex = _lazy_re_compile(
|
||||
r'^(?:[a-z0-9\.\-\+]*)://' # scheme is validated separately
|
||||
r'(?:[^\s:@/]+(?::[^\s:@/]*)?@)?' # user:pass authentication
|
||||
r'(?:' + URLValidator.ipv4_re + '|' + URLValidator.ipv6_re + '|' + host_re + ')'
|
||||
r'(?::\d{2,5})?' # port
|
||||
r'(?:[/?#][^\s]*)?' # resource path
|
||||
r'\Z', re.IGNORECASE)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
schemes = kwargs.pop('schemes', None)
|
||||
regex = kwargs.pop('regex', None)
|
||||
self.allow_plain_hostname = kwargs.pop('allow_plain_hostname', False)
|
||||
self.allow_numbers_in_top_level_domain = kwargs.pop('allow_numbers_in_top_level_domain', True)
|
||||
super(URLField, self).__init__(**kwargs)
|
||||
validator_kwargs = dict(message=_('Enter a valid URL'))
|
||||
if schemes is not None:
|
||||
validator_kwargs['schemes'] = schemes
|
||||
if regex is not None:
|
||||
validator_kwargs['regex'] = regex
|
||||
if self.allow_numbers_in_top_level_domain and regex is None:
|
||||
# default behavior is to allow numbers in the top level domain
|
||||
# if a custom regex isn't provided
|
||||
validator_kwargs['regex'] = URLField.regex
|
||||
self.validators.append(URLValidator(**validator_kwargs))
|
||||
|
||||
def to_representation(self, value):
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Tower
|
||||
from awx.main.utils.common import get_licenser
|
||||
|
||||
__all__ = ['get_license']
|
||||
|
||||
|
||||
def _get_validated_license_data():
|
||||
from awx.main.utils.common import get_licenser
|
||||
return get_licenser().validate()
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
from awx.conf.migrations import _reencrypt
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -12,5 +11,8 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(_reencrypt.replace_aesecb_fernet),
|
||||
# This list is intentionally empty.
|
||||
# Tower 3.2 included several data migrations that are no longer
|
||||
# necessary (this list is now empty because Tower 3.2 is past EOL and
|
||||
# cannot be directly upgraded to modern versions of Tower)
|
||||
]
|
||||
|
||||
@@ -1,30 +1,13 @@
|
||||
import base64
|
||||
import hashlib
|
||||
|
||||
from django.utils.encoding import smart_str
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher
|
||||
from cryptography.hazmat.primitives.ciphers.algorithms import AES
|
||||
from cryptography.hazmat.primitives.ciphers.modes import ECB
|
||||
|
||||
from awx.conf import settings_registry
|
||||
|
||||
|
||||
__all__ = ['replace_aesecb_fernet', 'get_encryption_key', 'encrypt_field',
|
||||
'decrypt_value', 'decrypt_value', 'should_decrypt_field']
|
||||
|
||||
|
||||
def replace_aesecb_fernet(apps, schema_editor):
|
||||
from awx.main.utils.encryption import encrypt_field
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
|
||||
for setting in Setting.objects.filter().order_by('pk'):
|
||||
if settings_registry.is_setting_encrypted(setting.key):
|
||||
if should_decrypt_field(setting.value):
|
||||
setting.value = decrypt_field(setting, 'value')
|
||||
setting.value = encrypt_field(setting, 'value')
|
||||
setting.save()
|
||||
__all__ = ['get_encryption_key', 'decrypt_field']
|
||||
|
||||
|
||||
def get_encryption_key(field_name, pk=None):
|
||||
@@ -76,38 +59,3 @@ def decrypt_field(instance, field_name, subfield=None):
|
||||
key = get_encryption_key(field_name, getattr(instance, 'pk', None))
|
||||
|
||||
return decrypt_value(key, value)
|
||||
|
||||
|
||||
def encrypt_field(instance, field_name, ask=False, subfield=None, skip_utf8=False):
|
||||
'''
|
||||
Return content of the given instance and field name encrypted.
|
||||
'''
|
||||
value = getattr(instance, field_name)
|
||||
if isinstance(value, dict) and subfield is not None:
|
||||
value = value[subfield]
|
||||
if not value or value.startswith('$encrypted$') or (ask and value == 'ASK'):
|
||||
return value
|
||||
if skip_utf8:
|
||||
utf8 = False
|
||||
else:
|
||||
utf8 = type(value) == str
|
||||
value = smart_str(value)
|
||||
key = get_encryption_key(field_name, getattr(instance, 'pk', None))
|
||||
encryptor = Cipher(AES(key), ECB(), default_backend()).encryptor()
|
||||
block_size = 16
|
||||
while len(value) % block_size != 0:
|
||||
value += '\x00'
|
||||
encrypted = encryptor.update(value) + encryptor.finalize()
|
||||
b64data = base64.b64encode(encrypted)
|
||||
tokens = ['$encrypted', 'AES', b64data]
|
||||
if utf8:
|
||||
# If the value to encrypt is utf-8, we need to add a marker so we
|
||||
# know to decode the data when it's decrypted later
|
||||
tokens.insert(1, 'UTF8')
|
||||
return '$'.join(tokens)
|
||||
|
||||
|
||||
def should_decrypt_field(value):
|
||||
if hasattr(value, 'startswith'):
|
||||
return value.startswith('$encrypted$') and '$AESCBC$' not in value
|
||||
return False
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import pytest
|
||||
|
||||
from rest_framework.fields import ValidationError
|
||||
from awx.conf.fields import StringListBooleanField, StringListPathField, ListTuplesField
|
||||
from awx.conf.fields import StringListBooleanField, StringListPathField, ListTuplesField, URLField
|
||||
|
||||
|
||||
class TestStringListBooleanField():
|
||||
@@ -62,7 +62,7 @@ class TestListTuplesField():
|
||||
FIELD_VALUES = [
|
||||
([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")]),
|
||||
]
|
||||
|
||||
|
||||
FIELD_VALUES_INVALID = [
|
||||
("abc", type("abc")),
|
||||
([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))),
|
||||
@@ -130,3 +130,25 @@ class TestStringListPathField():
|
||||
field.to_internal_value([value])
|
||||
assert e.value.detail[0] == "{} is not a valid path choice.".format(value)
|
||||
|
||||
|
||||
class TestURLField():
|
||||
regex = "^https://www.example.org$"
|
||||
|
||||
@pytest.mark.parametrize("url,schemes,regex, allow_numbers_in_top_level_domain, expect_no_error",[
|
||||
("ldap://www.example.org42", "ldap", None, True, True),
|
||||
("https://www.example.org42", "https", None, False, False),
|
||||
("https://www.example.org", None, regex, None, True),
|
||||
("https://www.example3.org", None, regex, None, False),
|
||||
("ftp://www.example.org", "https", None, None, False)
|
||||
])
|
||||
def test_urls(self, url, schemes, regex, allow_numbers_in_top_level_domain, expect_no_error):
|
||||
kwargs = {}
|
||||
kwargs.setdefault("allow_numbers_in_top_level_domain", allow_numbers_in_top_level_domain)
|
||||
kwargs.setdefault("schemes", schemes)
|
||||
kwargs.setdefault("regex", regex)
|
||||
field = URLField(**kwargs)
|
||||
if expect_no_error:
|
||||
field.run_validators(url)
|
||||
else:
|
||||
with pytest.raises(ValidationError):
|
||||
field.run_validators(url)
|
||||
|
||||
@@ -465,7 +465,7 @@ class BaseAccess(object):
|
||||
else:
|
||||
relationship = 'members'
|
||||
return access_method(obj, parent_obj, relationship, skip_sub_obj_read_check=True, data={})
|
||||
except (ParseError, ObjectDoesNotExist):
|
||||
except (ParseError, ObjectDoesNotExist, PermissionDenied):
|
||||
return False
|
||||
return False
|
||||
|
||||
@@ -1660,26 +1660,19 @@ class JobAccess(BaseAccess):
|
||||
except JobLaunchConfig.DoesNotExist:
|
||||
config = None
|
||||
|
||||
if obj.job_template and (self.user not in obj.job_template.execute_role):
|
||||
return False
|
||||
|
||||
# Check if JT execute access (and related prompts) is sufficient
|
||||
if obj.job_template is not None:
|
||||
if config is None:
|
||||
prompts_access = False
|
||||
elif not config.has_user_prompts(obj.job_template):
|
||||
prompts_access = True
|
||||
elif obj.created_by_id != self.user.pk and vars_are_encrypted(config.extra_data):
|
||||
prompts_access = False
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with secret prompts provided by another user.')
|
||||
else:
|
||||
prompts_access = (
|
||||
JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}) and
|
||||
not config.has_unprompted(obj.job_template)
|
||||
)
|
||||
jt_access = self.user in obj.job_template.execute_role
|
||||
if prompts_access and jt_access:
|
||||
if config and obj.job_template:
|
||||
if not config.has_user_prompts(obj.job_template):
|
||||
return True
|
||||
elif not jt_access:
|
||||
return False
|
||||
elif obj.created_by_id != self.user.pk and vars_are_encrypted(config.extra_data):
|
||||
# never allowed, not even for org admins
|
||||
raise PermissionDenied(_('Job was launched with secret prompts provided by another user.'))
|
||||
elif not config.has_unprompted(obj.job_template):
|
||||
if JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}):
|
||||
return True
|
||||
|
||||
org_access = bool(obj.inventory) and self.user in obj.inventory.organization.inventory_admin_role
|
||||
project_access = obj.project is None or self.user in obj.project.admin_role
|
||||
@@ -2098,23 +2091,20 @@ class WorkflowJobAccess(BaseAccess):
|
||||
self.messages['detail'] = _('Workflow Job was launched with unknown prompts.')
|
||||
return False
|
||||
|
||||
# execute permission to WFJT is mandatory for any relaunch
|
||||
if self.user not in template.execute_role:
|
||||
return False
|
||||
|
||||
# Check if access to prompts to prevent relaunch
|
||||
if config.prompts_dict():
|
||||
if obj.created_by_id != self.user.pk and vars_are_encrypted(config.extra_data):
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with secret prompts provided by another user.')
|
||||
return False
|
||||
raise PermissionDenied(_("Job was launched with secret prompts provided by another user."))
|
||||
if not JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}):
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with prompts you lack access to.')
|
||||
return False
|
||||
raise PermissionDenied(_('Job was launched with prompts you lack access to.'))
|
||||
if config.has_unprompted(template):
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with prompts no longer accepted.')
|
||||
return False
|
||||
raise PermissionDenied(_('Job was launched with prompts no longer accepted.'))
|
||||
|
||||
# execute permission to WFJT is mandatory for any relaunch
|
||||
return (self.user in template.execute_role)
|
||||
return True # passed config checks
|
||||
|
||||
def can_recreate(self, obj):
|
||||
node_qs = obj.workflow_job_nodes.all().prefetch_related('inventory', 'credentials', 'unified_job_template')
|
||||
|
||||
@@ -166,6 +166,8 @@ def instance_info(since, include_hostnames=False):
|
||||
instances = models.Instance.objects.values_list('hostname').values(
|
||||
'uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'hostname', 'last_isolated_check', 'enabled')
|
||||
for instance in instances:
|
||||
consumed_capacity = sum(x.task_impact for x in models.UnifiedJob.objects.filter(execution_node=instance['hostname'],
|
||||
status__in=('running', 'waiting')))
|
||||
instance_info = {
|
||||
'uuid': instance['uuid'],
|
||||
'version': instance['version'],
|
||||
@@ -174,7 +176,9 @@ def instance_info(since, include_hostnames=False):
|
||||
'memory': instance['memory'],
|
||||
'managed_by_policy': instance['managed_by_policy'],
|
||||
'last_isolated_check': _get_isolated_datetime(instance['last_isolated_check']),
|
||||
'enabled': instance['enabled']
|
||||
'enabled': instance['enabled'],
|
||||
'consumed_capacity': consumed_capacity,
|
||||
'remaining_capacity': instance['capacity'] - consumed_capacity
|
||||
}
|
||||
if include_hostnames is True:
|
||||
instance_info['hostname'] = instance['hostname']
|
||||
|
||||
@@ -15,6 +15,7 @@ from awx.conf.license import get_license
|
||||
from awx.main.models import Job
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.models.ha import TowerAnalyticsState
|
||||
from awx.main.utils import get_awx_http_client_headers
|
||||
|
||||
|
||||
__all__ = ['register', 'gather', 'ship', 'table_version']
|
||||
@@ -165,11 +166,15 @@ def ship(path):
|
||||
return logger.error('REDHAT_PASSWORD is not set')
|
||||
with open(path, 'rb') as f:
|
||||
files = {'file': (os.path.basename(path), f, settings.INSIGHTS_AGENT_MIME)}
|
||||
response = requests.post(url,
|
||||
files=files,
|
||||
verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
|
||||
auth=(rh_user, rh_password),
|
||||
timeout=(31, 31))
|
||||
s = requests.Session()
|
||||
s.headers = get_awx_http_client_headers()
|
||||
s.headers.pop('Content-Type')
|
||||
response = s.post(url,
|
||||
files=files,
|
||||
verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
|
||||
auth=(rh_user, rh_password),
|
||||
headers=s.headers,
|
||||
timeout=(31, 31))
|
||||
if response.status_code != 202:
|
||||
return logger.exception('Upload failed with status {}, {}'.format(response.status_code,
|
||||
response.text))
|
||||
|
||||
@@ -46,6 +46,8 @@ INSTANCE_MEMORY = Gauge('awx_instance_memory', 'RAM (Kb) on each node in a Tower
|
||||
INSTANCE_INFO = Info('awx_instance', 'Info about each node in a Tower system', ['hostname', 'instance_uuid',])
|
||||
INSTANCE_LAUNCH_TYPE = Gauge('awx_instance_launch_type_total', 'Type of Job launched', ['node', 'launch_type',])
|
||||
INSTANCE_STATUS = Gauge('awx_instance_status_total', 'Status of Job launched', ['node', 'status',])
|
||||
INSTANCE_CONSUMED_CAPACITY = Gauge('awx_instance_consumed_capacity', 'Consumed capacity of each node in a Tower system', ['hostname', 'instance_uuid',])
|
||||
INSTANCE_REMAINING_CAPACITY = Gauge('awx_instance_remaining_capacity', 'Remaining capacity of each node in a Tower system', ['hostname', 'instance_uuid',])
|
||||
|
||||
LICENSE_INSTANCE_TOTAL = Gauge('awx_license_instance_total', 'Total number of managed hosts provided by your license')
|
||||
LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining managed hosts provided by your license')
|
||||
@@ -104,6 +106,8 @@ def metrics():
|
||||
INSTANCE_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['capacity'])
|
||||
INSTANCE_CPU.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['cpu'])
|
||||
INSTANCE_MEMORY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['memory'])
|
||||
INSTANCE_CONSUMED_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['consumed_capacity'])
|
||||
INSTANCE_REMAINING_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['remaining_capacity'])
|
||||
INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid).info({
|
||||
'enabled': str(instance_data[uuid]['enabled']),
|
||||
'last_isolated_check': getattr(instance_data[uuid], 'last_isolated_check', 'None'),
|
||||
|
||||
@@ -1,8 +1,17 @@
|
||||
from django.apps import AppConfig
|
||||
from django.db.models.signals import pre_migrate
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
def raise_migration_flag(**kwargs):
|
||||
from awx.main.tasks import set_migration_flag
|
||||
set_migration_flag.delay()
|
||||
|
||||
|
||||
class MainConfig(AppConfig):
|
||||
|
||||
name = 'awx.main'
|
||||
verbose_name = _('Main')
|
||||
|
||||
def ready(self):
|
||||
pre_migrate.connect(raise_migration_flag, sender=self)
|
||||
|
||||
@@ -54,15 +54,6 @@ register(
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'TOWER_ADMIN_ALERTS',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Enable Administrator Alerts'),
|
||||
help_text=_('Email Admin users for system events that may require attention.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'TOWER_URL_BASE',
|
||||
field_class=fields.URLField,
|
||||
@@ -513,6 +504,27 @@ register(
|
||||
category_slug='jobs'
|
||||
)
|
||||
|
||||
register(
|
||||
'PUBLIC_GALAXY_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
default=True,
|
||||
label=_('Allow Access to Public Galaxy'),
|
||||
help_text=_('Allow or deny access to the public Ansible Galaxy during project updates.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs'
|
||||
)
|
||||
|
||||
register(
|
||||
'GALAXY_IGNORE_CERTS',
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Ignore Ansible Galaxy SSL Certificate Verification'),
|
||||
help_text=_('If set to true, certificate validation will not be done when'
|
||||
'installing content from any Galaxy server.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs'
|
||||
)
|
||||
|
||||
register(
|
||||
'STDOUT_MAX_BYTES_DISPLAY',
|
||||
field_class=fields.IntegerField,
|
||||
|
||||
@@ -123,8 +123,16 @@ class PoolWorker(object):
|
||||
# if any tasks were finished, removed them from the managed tasks for
|
||||
# this worker
|
||||
for uuid in finished:
|
||||
self.messages_finished += 1
|
||||
del self.managed_tasks[uuid]
|
||||
try:
|
||||
del self.managed_tasks[uuid]
|
||||
self.messages_finished += 1
|
||||
except KeyError:
|
||||
# ansible _sometimes_ appears to send events w/ duplicate UUIDs;
|
||||
# UUIDs for ansible events are *not* actually globally unique
|
||||
# when this occurs, it's _fine_ to ignore this KeyError because
|
||||
# the purpose of self.managed_tasks is to just track internal
|
||||
# state of which events are *currently* being processed.
|
||||
pass
|
||||
|
||||
@property
|
||||
def current_task(self):
|
||||
|
||||
@@ -4,6 +4,7 @@ import importlib
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from kubernetes.config import kube_config
|
||||
|
||||
from awx.main.tasks import dispatch_startup, inform_cluster_of_shutdown
|
||||
|
||||
@@ -107,6 +108,14 @@ class TaskWorker(BaseWorker):
|
||||
for callback in body.get('errbacks', []) or []:
|
||||
callback['uuid'] = body['uuid']
|
||||
self.perform_work(callback)
|
||||
finally:
|
||||
# It's frustrating that we have to do this, but the python k8s
|
||||
# client leaves behind cacert files in /tmp, so we must clean up
|
||||
# the tmpdir per-dispatcher process every time a new task comes in
|
||||
try:
|
||||
kube_config._cleanup_temp_files()
|
||||
except Exception:
|
||||
logger.exception('failed to cleanup k8s client tmp files')
|
||||
|
||||
for callback in body.get('callbacks', []) or []:
|
||||
callback['uuid'] = body['uuid']
|
||||
|
||||
@@ -6,6 +6,7 @@ import stat
|
||||
import tempfile
|
||||
import time
|
||||
import logging
|
||||
import yaml
|
||||
|
||||
from django.conf import settings
|
||||
import ansible_runner
|
||||
@@ -31,15 +32,14 @@ def set_pythonpath(venv_libdir, env):
|
||||
|
||||
class IsolatedManager(object):
|
||||
|
||||
def __init__(self, cancelled_callback=None, check_callback=None, pod_manager=None):
|
||||
def __init__(self, canceled_callback=None, check_callback=None, pod_manager=None):
|
||||
"""
|
||||
:param cancelled_callback: a callable - which returns `True` or `False`
|
||||
:param canceled_callback: a callable - which returns `True` or `False`
|
||||
- signifying if the job has been prematurely
|
||||
cancelled
|
||||
canceled
|
||||
"""
|
||||
self.cancelled_callback = cancelled_callback
|
||||
self.canceled_callback = canceled_callback
|
||||
self.check_callback = check_callback
|
||||
self.idle_timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
self.started_at = None
|
||||
self.captured_command_artifact = False
|
||||
self.instance = None
|
||||
@@ -48,10 +48,17 @@ class IsolatedManager(object):
|
||||
def build_inventory(self, hosts):
|
||||
if self.instance and self.instance.is_containerized:
|
||||
inventory = {'all': {'hosts': {}}}
|
||||
fd, path = tempfile.mkstemp(
|
||||
prefix='.kubeconfig', dir=self.private_data_dir
|
||||
)
|
||||
with open(path, 'wb') as temp:
|
||||
temp.write(yaml.dump(self.pod_manager.kube_config).encode())
|
||||
temp.flush()
|
||||
os.chmod(temp.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
for host in hosts:
|
||||
inventory['all']['hosts'][host] = {
|
||||
"ansible_connection": "kubectl",
|
||||
"ansible_kubectl_config": self.pod_manager.kube_config
|
||||
"ansible_kubectl_config": path,
|
||||
}
|
||||
else:
|
||||
inventory = '\n'.join([
|
||||
@@ -98,9 +105,8 @@ class IsolatedManager(object):
|
||||
'envvars': env,
|
||||
'finished_callback': finished_callback,
|
||||
'verbosity': verbosity,
|
||||
'cancel_callback': self.cancelled_callback,
|
||||
'cancel_callback': self.canceled_callback,
|
||||
'settings': {
|
||||
'idle_timeout': self.idle_timeout,
|
||||
'job_timeout': settings.AWX_ISOLATED_LAUNCH_TIMEOUT,
|
||||
'pexpect_timeout': getattr(settings, 'PEXPECT_TIMEOUT', 5),
|
||||
'suppress_ansible_output': True,
|
||||
@@ -110,7 +116,7 @@ class IsolatedManager(object):
|
||||
def path_to(self, *args):
|
||||
return os.path.join(self.private_data_dir, *args)
|
||||
|
||||
def run_management_playbook(self, playbook, private_data_dir, **kw):
|
||||
def run_management_playbook(self, playbook, private_data_dir, idle_timeout=None, **kw):
|
||||
iso_dir = tempfile.mkdtemp(
|
||||
prefix=playbook,
|
||||
dir=private_data_dir
|
||||
@@ -118,6 +124,10 @@ class IsolatedManager(object):
|
||||
params = self.runner_params.copy()
|
||||
params['playbook'] = playbook
|
||||
params['private_data_dir'] = iso_dir
|
||||
if idle_timeout:
|
||||
params['settings']['idle_timeout'] = idle_timeout
|
||||
else:
|
||||
params['settings'].pop('idle_timeout', None)
|
||||
params.update(**kw)
|
||||
if all([
|
||||
getattr(settings, 'AWX_ISOLATED_KEY_GENERATION', False) is True,
|
||||
@@ -143,6 +153,8 @@ class IsolatedManager(object):
|
||||
'- /artifacts/job_events/*-partial.json.tmp',
|
||||
# don't rsync the ssh_key FIFO
|
||||
'- /env/ssh_key',
|
||||
# don't rsync kube config files
|
||||
'- .kubeconfig*'
|
||||
]
|
||||
|
||||
for filename, data in (
|
||||
@@ -167,6 +179,7 @@ class IsolatedManager(object):
|
||||
logger.debug('Starting job {} on isolated host with `run_isolated.yml` playbook.'.format(self.instance.id))
|
||||
runner_obj = self.run_management_playbook('run_isolated.yml',
|
||||
self.private_data_dir,
|
||||
idle_timeout=max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT),
|
||||
extravars=extravars)
|
||||
|
||||
if runner_obj.status == 'failed':
|
||||
@@ -198,14 +211,14 @@ class IsolatedManager(object):
|
||||
dispatcher = CallbackQueueDispatcher()
|
||||
|
||||
while status == 'failed':
|
||||
canceled = self.cancelled_callback() if self.cancelled_callback else False
|
||||
canceled = self.canceled_callback() if self.canceled_callback else False
|
||||
if not canceled and time.time() - last_check < interval:
|
||||
# If the job isn't cancelled, but we haven't waited `interval` seconds, wait longer
|
||||
# If the job isn't canceled, but we haven't waited `interval` seconds, wait longer
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
if canceled:
|
||||
logger.warning('Isolated job {} was manually cancelled.'.format(self.instance.id))
|
||||
logger.warning('Isolated job {} was manually canceled.'.format(self.instance.id))
|
||||
|
||||
logger.debug('Checking on isolated job {} with `check_isolated.yml`.'.format(self.instance.id))
|
||||
runner_obj = self.run_management_playbook('check_isolated.yml',
|
||||
|
||||
@@ -28,6 +28,7 @@ from awx.main.models.inventory import (
|
||||
Host
|
||||
)
|
||||
from awx.main.utils.mem_inventory import MemInventory, dict_to_mem_data
|
||||
from awx.main.utils.safe_yaml import sanitize_jinja
|
||||
|
||||
# other AWX imports
|
||||
from awx.main.models.rbac import batch_role_ancestor_rebuilding
|
||||
@@ -795,6 +796,10 @@ class Command(BaseCommand):
|
||||
if self.instance_id_var:
|
||||
instance_id = self._get_instance_id(mem_host.variables)
|
||||
host_attrs['instance_id'] = instance_id
|
||||
try:
|
||||
sanitize_jinja(mem_host_name)
|
||||
except ValueError as e:
|
||||
raise ValueError(str(e) + ': {}'.format(mem_host_name))
|
||||
db_host = self.inventory.hosts.update_or_create(name=mem_host_name, defaults=host_attrs)[0]
|
||||
if enabled is False:
|
||||
logger.debug('Host "%s" added (disabled)', mem_host_name)
|
||||
|
||||
129
awx/main/management/commands/regenerate_secret_key.py
Normal file
129
awx/main/management/commands/regenerate_secret_key.py
Normal file
@@ -0,0 +1,129 @@
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.db.models.signals import post_save
|
||||
|
||||
from awx.conf import settings_registry
|
||||
from awx.conf.models import Setting
|
||||
from awx.conf.signals import on_post_save_setting
|
||||
from awx.main.models import (
|
||||
UnifiedJob, Credential, NotificationTemplate, Job, JobTemplate, WorkflowJob,
|
||||
WorkflowJobTemplate, OAuth2Application
|
||||
)
|
||||
from awx.main.utils.encryption import (
|
||||
encrypt_field, decrypt_field, encrypt_value, decrypt_value, get_encryption_key
|
||||
)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""
|
||||
Regenerate a new SECRET_KEY value and re-encrypt every secret in the
|
||||
Tower database.
|
||||
"""
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, **options):
|
||||
self.old_key = settings.SECRET_KEY
|
||||
self.new_key = base64.encodebytes(os.urandom(33)).decode().rstrip()
|
||||
self._notification_templates()
|
||||
self._credentials()
|
||||
self._unified_jobs()
|
||||
self._oauth2_app_secrets()
|
||||
self._settings()
|
||||
self._survey_passwords()
|
||||
return self.new_key
|
||||
|
||||
def _notification_templates(self):
|
||||
for nt in NotificationTemplate.objects.iterator():
|
||||
CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NotificationTemplate.NOTIFICATION_TYPES])
|
||||
notification_class = CLASS_FOR_NOTIFICATION_TYPE[nt.notification_type]
|
||||
for field in filter(lambda x: notification_class.init_parameters[x]['type'] == "password",
|
||||
notification_class.init_parameters):
|
||||
nt.notification_configuration[field] = decrypt_field(nt, 'notification_configuration', subfield=field, secret_key=self.old_key)
|
||||
nt.notification_configuration[field] = encrypt_field(nt, 'notification_configuration', subfield=field, secret_key=self.new_key)
|
||||
nt.save()
|
||||
|
||||
def _credentials(self):
|
||||
for credential in Credential.objects.iterator():
|
||||
for field_name in credential.credential_type.secret_fields:
|
||||
if field_name in credential.inputs:
|
||||
credential.inputs[field_name] = decrypt_field(
|
||||
credential,
|
||||
field_name,
|
||||
secret_key=self.old_key
|
||||
)
|
||||
credential.inputs[field_name] = encrypt_field(
|
||||
credential,
|
||||
field_name,
|
||||
secret_key=self.new_key
|
||||
)
|
||||
credential.save()
|
||||
|
||||
def _unified_jobs(self):
|
||||
for uj in UnifiedJob.objects.iterator():
|
||||
if uj.start_args:
|
||||
uj.start_args = decrypt_field(
|
||||
uj,
|
||||
'start_args',
|
||||
secret_key=self.old_key
|
||||
)
|
||||
uj.start_args = encrypt_field(uj, 'start_args', secret_key=self.new_key)
|
||||
uj.save()
|
||||
|
||||
def _oauth2_app_secrets(self):
|
||||
for app in OAuth2Application.objects.iterator():
|
||||
raw = app.client_secret
|
||||
app.client_secret = raw
|
||||
encrypted = encrypt_value(raw, secret_key=self.new_key)
|
||||
OAuth2Application.objects.filter(pk=app.pk).update(client_secret=encrypted)
|
||||
|
||||
def _settings(self):
|
||||
# don't update memcached, the *actual* value isn't changing
|
||||
post_save.disconnect(on_post_save_setting, sender=Setting)
|
||||
for setting in Setting.objects.filter().order_by('pk'):
|
||||
if settings_registry.is_setting_encrypted(setting.key):
|
||||
setting.value = decrypt_field(setting, 'value', secret_key=self.old_key)
|
||||
setting.value = encrypt_field(setting, 'value', secret_key=self.new_key)
|
||||
setting.save()
|
||||
|
||||
def _survey_passwords(self):
|
||||
for _type in (JobTemplate, WorkflowJobTemplate):
|
||||
for jt in _type.objects.exclude(survey_spec={}):
|
||||
changed = False
|
||||
if jt.survey_spec.get('spec', []):
|
||||
for field in jt.survey_spec['spec']:
|
||||
if field.get('type') == 'password' and field.get('default', ''):
|
||||
raw = decrypt_value(
|
||||
get_encryption_key('value', None, secret_key=self.old_key),
|
||||
field['default']
|
||||
)
|
||||
field['default'] = encrypt_value(
|
||||
raw,
|
||||
pk=None,
|
||||
secret_key=self.new_key
|
||||
)
|
||||
changed = True
|
||||
if changed:
|
||||
jt.save(update_fields=["survey_spec"])
|
||||
|
||||
for _type in (Job, WorkflowJob):
|
||||
for job in _type.objects.exclude(survey_passwords={}).iterator():
|
||||
changed = False
|
||||
for key in job.survey_passwords:
|
||||
if key in job.extra_vars:
|
||||
extra_vars = json.loads(job.extra_vars)
|
||||
if not extra_vars.get(key):
|
||||
continue
|
||||
raw = decrypt_value(
|
||||
get_encryption_key('value', None, secret_key=self.old_key),
|
||||
extra_vars[key]
|
||||
)
|
||||
extra_vars[key] = encrypt_value(raw, pk=None, secret_key=self.new_key)
|
||||
job.extra_vars = json.dumps(extra_vars)
|
||||
changed = True
|
||||
if changed:
|
||||
job.save(update_fields=["extra_vars"])
|
||||
@@ -13,8 +13,7 @@ import urllib.parse
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.db.models.signals import post_save
|
||||
from django.db.migrations.executor import MigrationExecutor
|
||||
from django.db import IntegrityError, connection
|
||||
from django.db import IntegrityError
|
||||
from django.utils.functional import curry
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.apps import apps
|
||||
@@ -24,6 +23,7 @@ from django.urls import reverse, resolve
|
||||
|
||||
from awx.main.models import ActivityStream
|
||||
from awx.main.utils.named_url_graph import generate_graph, GraphNode
|
||||
from awx.main.utils.db import migration_in_progress_check_or_relase
|
||||
from awx.conf import fields, register
|
||||
|
||||
|
||||
@@ -213,8 +213,7 @@ class URLModificationMiddleware(MiddlewareMixin):
|
||||
class MigrationRanCheckMiddleware(MiddlewareMixin):
|
||||
|
||||
def process_request(self, request):
|
||||
executor = MigrationExecutor(connection)
|
||||
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
|
||||
if bool(plan) and \
|
||||
getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
||||
if migration_in_progress_check_or_relase():
|
||||
if getattr(resolve(request.path), 'url_name', '') == 'migrations_notran':
|
||||
return
|
||||
return redirect(reverse("ui:migrations_notran"))
|
||||
|
||||
@@ -7,12 +7,6 @@ from django.db import migrations, models
|
||||
|
||||
# AWX
|
||||
from awx.main.migrations import ActivityStreamDisabledMigration
|
||||
from awx.main.migrations import _inventory_source as invsrc
|
||||
from awx.main.migrations import _migration_utils as migration_utils
|
||||
from awx.main.migrations import _reencrypt as reencrypt
|
||||
from awx.main.migrations import _scan_jobs as scan_jobs
|
||||
from awx.main.migrations import _credentialtypes as credentialtypes
|
||||
from awx.main.migrations import _azure_credentials as azurecreds
|
||||
import awx.main.fields
|
||||
|
||||
|
||||
@@ -23,16 +17,8 @@ class Migration(ActivityStreamDisabledMigration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Inventory Refresh
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(invsrc.remove_rax_inventory_sources),
|
||||
migrations.RunPython(azurecreds.remove_azure_credentials),
|
||||
migrations.RunPython(invsrc.remove_azure_inventory_sources),
|
||||
migrations.RunPython(invsrc.remove_inventory_source_with_no_inventory_link),
|
||||
migrations.RunPython(invsrc.rename_inventory_sources),
|
||||
migrations.RunPython(reencrypt.replace_aesecb_fernet),
|
||||
migrations.RunPython(scan_jobs.migrate_scan_job_templates),
|
||||
|
||||
migrations.RunPython(credentialtypes.migrate_to_v2_credentials),
|
||||
migrations.RunPython(credentialtypes.migrate_job_credentials),
|
||||
# This list is intentionally empty.
|
||||
# Tower 3.2 included several data migrations that are no longer
|
||||
# necessary (this list is now empty because Tower 3.2 is past EOL and
|
||||
# cannot be directly upgraded to modern versions of Tower)
|
||||
]
|
||||
|
||||
@@ -15,8 +15,6 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(credentialtypes.create_rhv_tower_credtype),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
|
||||
@@ -3,8 +3,6 @@ from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
from awx.main.migrations import ActivityStreamDisabledMigration
|
||||
from awx.main.migrations import _reencrypt as reencrypt
|
||||
from awx.main.migrations import _migration_utils as migration_utils
|
||||
|
||||
|
||||
class Migration(ActivityStreamDisabledMigration):
|
||||
@@ -14,6 +12,8 @@ class Migration(ActivityStreamDisabledMigration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(reencrypt.encrypt_survey_passwords),
|
||||
# This list is intentionally empty.
|
||||
# Tower 3.2 included several data migrations that are no longer
|
||||
# necessary (this list is now empty because Tower 3.2 is past EOL and
|
||||
# cannot be directly upgraded to modern versions of Tower)
|
||||
]
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# AWX
|
||||
from awx.main.migrations import _migration_utils as migration_utils
|
||||
from awx.main.migrations import _credentialtypes as credentialtypes
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
@@ -15,6 +11,8 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(credentialtypes.add_azure_cloud_environment_field),
|
||||
# This list is intentionally empty.
|
||||
# Tower 3.2 included several data migrations that are no longer
|
||||
# necessary (this list is now empty because Tower 3.2 is past EOL and
|
||||
# cannot be directly upgraded to modern versions of Tower)
|
||||
]
|
||||
|
||||
@@ -19,11 +19,11 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='systemjob',
|
||||
name='job_type',
|
||||
field=models.CharField(blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('clearsessions', 'Removes expired browser sessions from the database'), ('cleartokens', 'Removes expired OAuth 2 access tokens and refresh tokens')], default='', max_length=32),
|
||||
field=models.CharField(blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_sessions', 'Removes expired browser sessions from the database'), ('cleanup_tokens', 'Removes expired OAuth 2 access tokens and refresh tokens')], default='', max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='systemjobtemplate',
|
||||
name='job_type',
|
||||
field=models.CharField(blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('clearsessions', 'Removes expired browser sessions from the database'), ('cleartokens', 'Removes expired OAuth 2 access tokens and refresh tokens')], default='', max_length=32),
|
||||
field=models.CharField(blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_sessions', 'Removes expired browser sessions from the database'), ('cleanup_tokens', 'Removes expired OAuth 2 access tokens and refresh tokens')], default='', max_length=32),
|
||||
),
|
||||
]
|
||||
|
||||
21
awx/main/migrations/0099_v361_license_cleanup.py
Normal file
21
awx/main/migrations/0099_v361_license_cleanup.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def _cleanup_license_setting(apps, schema_editor):
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
for license in Setting.objects.filter(key='LICENSE').all():
|
||||
for k in ('rh_username', 'rh_password'):
|
||||
license.value.pop(k, None)
|
||||
license.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0098_v360_rename_cyberark_aim_credential_type'),
|
||||
]
|
||||
|
||||
operations = [migrations.RunPython(_cleanup_license_setting)]
|
||||
18
awx/main/migrations/0100_v370_projectupdate_job_tags.py
Normal file
18
awx/main/migrations/0100_v370_projectupdate_job_tags.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 2.2.4 on 2019-11-01 18:46
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0099_v361_license_cleanup'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='projectupdate',
|
||||
name='job_tags',
|
||||
field=models.CharField(blank=True, default='', help_text='Parts of the project update playbook that will be run.', max_length=1024),
|
||||
),
|
||||
]
|
||||
@@ -1,15 +0,0 @@
|
||||
import logging
|
||||
|
||||
from django.db.models import Q
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
|
||||
def remove_azure_credentials(apps, schema_editor):
|
||||
'''Azure is not supported as of 3.2 and greater. Instead, azure_rm is
|
||||
supported.
|
||||
'''
|
||||
Credential = apps.get_model('main', 'Credential')
|
||||
logger.debug("Removing all Azure Credentials from database.")
|
||||
Credential.objects.filter(kind='azure').delete()
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
from awx.main import utils
|
||||
from awx.main.models import CredentialType
|
||||
from awx.main.utils.encryption import encrypt_field, decrypt_field
|
||||
from django.db.models import Q
|
||||
|
||||
|
||||
@@ -61,16 +59,6 @@ def _disassociate_non_insights_projects(apps, cred):
|
||||
apps.get_model('main', 'Project').objects.filter(~Q(scm_type='insights') & Q(credential=cred)).update(credential=None)
|
||||
|
||||
|
||||
def migrate_to_v2_credentials(apps, schema_editor):
|
||||
# TODO: remove once legacy/EOL'd Towers no longer support this upgrade path
|
||||
pass
|
||||
|
||||
|
||||
def migrate_job_credentials(apps, schema_editor):
|
||||
# TODO: remove once legacy/EOL'd Towers no longer support this upgrade path
|
||||
pass
|
||||
|
||||
|
||||
def add_vault_id_field(apps, schema_editor):
|
||||
# this is no longer necessary; schemas are defined in code
|
||||
pass
|
||||
@@ -81,21 +69,11 @@ def remove_vault_id_field(apps, schema_editor):
|
||||
pass
|
||||
|
||||
|
||||
def create_rhv_tower_credtype(apps, schema_editor):
|
||||
# this is no longer necessary; schemas are defined in code
|
||||
pass
|
||||
|
||||
|
||||
def add_tower_verify_field(apps, schema_editor):
|
||||
# this is no longer necessary; schemas are defined in code
|
||||
pass
|
||||
|
||||
|
||||
def add_azure_cloud_environment_field(apps, schema_editor):
|
||||
# this is no longer necessary; schemas are defined in code
|
||||
pass
|
||||
|
||||
|
||||
def remove_become_methods(apps, schema_editor):
|
||||
# this is no longer necessary; schemas are defined in code
|
||||
pass
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import logging
|
||||
|
||||
from django.db.models import Q
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
from awx.main.utils.common import parse_yaml_or_json
|
||||
@@ -8,64 +7,6 @@ from awx.main.utils.common import parse_yaml_or_json
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
|
||||
def remove_manual_inventory_sources(apps, schema_editor):
|
||||
'''Previously we would automatically create inventory sources after
|
||||
Group creation and we would use the parent Group as our interface for the user.
|
||||
During that process we would create InventorySource that had a source of "manual".
|
||||
'''
|
||||
# TODO: use this in the 3.3 data migrations
|
||||
InventorySource = apps.get_model('main', 'InventorySource')
|
||||
# see models/inventory.py SOURCE_CHOICES - ('', _('Manual'))
|
||||
logger.debug("Removing all Manual InventorySource from database.")
|
||||
InventorySource.objects.filter(source='').delete()
|
||||
|
||||
|
||||
def remove_rax_inventory_sources(apps, schema_editor):
|
||||
'''Rackspace inventory sources are not supported since 3.2, remove them.
|
||||
'''
|
||||
InventorySource = apps.get_model('main', 'InventorySource')
|
||||
logger.debug("Removing all Rackspace InventorySource from database.")
|
||||
InventorySource.objects.filter(source='rax').delete()
|
||||
|
||||
|
||||
def rename_inventory_sources(apps, schema_editor):
|
||||
'''Rename existing InventorySource entries using the following format.
|
||||
{{ inventory_source.name }} - {{ inventory.module }} - {{ number }}
|
||||
The number will be incremented for each InventorySource for the organization.
|
||||
'''
|
||||
Organization = apps.get_model('main', 'Organization')
|
||||
InventorySource = apps.get_model('main', 'InventorySource')
|
||||
|
||||
for org in Organization.objects.iterator():
|
||||
for i, invsrc in enumerate(InventorySource.objects.filter(Q(inventory__organization=org) |
|
||||
Q(deprecated_group__inventory__organization=org)).distinct().all()):
|
||||
|
||||
inventory = invsrc.deprecated_group.inventory if invsrc.deprecated_group else invsrc.inventory
|
||||
name = '{0} - {1} - {2}'.format(invsrc.name, inventory.name, i)
|
||||
logger.debug("Renaming InventorySource({0}) {1} -> {2}".format(
|
||||
invsrc.pk, invsrc.name, name
|
||||
))
|
||||
invsrc.name = name
|
||||
invsrc.save()
|
||||
|
||||
|
||||
def remove_inventory_source_with_no_inventory_link(apps, schema_editor):
|
||||
'''If we cannot determine the Inventory for which an InventorySource exists
|
||||
we can safely remove it.
|
||||
'''
|
||||
InventorySource = apps.get_model('main', 'InventorySource')
|
||||
logger.debug("Removing all InventorySource that have no link to an Inventory from database.")
|
||||
InventorySource.objects.filter(Q(inventory__organization=None) & Q(deprecated_group__inventory=None)).delete()
|
||||
|
||||
|
||||
def remove_azure_inventory_sources(apps, schema_editor):
|
||||
'''Azure inventory sources are not supported since 3.2, remove them.
|
||||
'''
|
||||
InventorySource = apps.get_model('main', 'InventorySource')
|
||||
logger.debug("Removing all Azure InventorySource from database.")
|
||||
InventorySource.objects.filter(source='azure').delete()
|
||||
|
||||
|
||||
def _get_instance_id(from_dict, new_id, default=''):
|
||||
'''logic mostly duplicated with inventory_import command Command._get_instance_id
|
||||
frozen in time here, for purposes of migrations
|
||||
|
||||
@@ -1,79 +1,12 @@
|
||||
import logging
|
||||
import json
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.conf.migrations._reencrypt import (
|
||||
decrypt_field,
|
||||
should_decrypt_field,
|
||||
)
|
||||
from awx.main.utils.encryption import encrypt_field
|
||||
|
||||
from awx.main.notifications.email_backend import CustomEmailBackend
|
||||
from awx.main.notifications.slack_backend import SlackBackend
|
||||
from awx.main.notifications.twilio_backend import TwilioBackend
|
||||
from awx.main.notifications.pagerduty_backend import PagerDutyBackend
|
||||
from awx.main.notifications.hipchat_backend import HipChatBackend
|
||||
from awx.main.notifications.mattermost_backend import MattermostBackend
|
||||
from awx.main.notifications.webhook_backend import WebhookBackend
|
||||
from awx.main.notifications.irc_backend import IrcBackend
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
__all__ = ['replace_aesecb_fernet']
|
||||
|
||||
|
||||
NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend),
|
||||
('slack', _('Slack'), SlackBackend),
|
||||
('twilio', _('Twilio'), TwilioBackend),
|
||||
('pagerduty', _('Pagerduty'), PagerDutyBackend),
|
||||
('hipchat', _('HipChat'), HipChatBackend),
|
||||
('mattermost', _('Mattermost'), MattermostBackend),
|
||||
('webhook', _('Webhook'), WebhookBackend),
|
||||
('irc', _('IRC'), IrcBackend)]
|
||||
|
||||
|
||||
PASSWORD_FIELDS = ('password', 'security_token', 'ssh_key_data', 'ssh_key_unlock',
|
||||
'become_password', 'vault_password', 'secret', 'authorize_password')
|
||||
|
||||
|
||||
def replace_aesecb_fernet(apps, schema_editor):
|
||||
_notification_templates(apps)
|
||||
_credentials(apps)
|
||||
_unified_jobs(apps)
|
||||
|
||||
|
||||
def _notification_templates(apps):
|
||||
NotificationTemplate = apps.get_model('main', 'NotificationTemplate')
|
||||
for nt in NotificationTemplate.objects.all():
|
||||
CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NOTIFICATION_TYPES])
|
||||
notification_class = CLASS_FOR_NOTIFICATION_TYPE[nt.notification_type]
|
||||
for field in filter(lambda x: notification_class.init_parameters[x]['type'] == "password",
|
||||
notification_class.init_parameters):
|
||||
if should_decrypt_field(nt.notification_configuration[field]):
|
||||
nt.notification_configuration[field] = decrypt_field(nt, 'notification_configuration', subfield=field)
|
||||
nt.notification_configuration[field] = encrypt_field(nt, 'notification_configuration', subfield=field)
|
||||
nt.save()
|
||||
|
||||
|
||||
def _credentials(apps):
|
||||
for credential in apps.get_model('main', 'Credential').objects.all():
|
||||
for field_name in PASSWORD_FIELDS:
|
||||
value = getattr(credential, field_name)
|
||||
if should_decrypt_field(value):
|
||||
value = decrypt_field(credential, field_name)
|
||||
setattr(credential, field_name, value)
|
||||
setattr(credential, field_name, encrypt_field(credential, field_name))
|
||||
credential.save()
|
||||
|
||||
|
||||
def _unified_jobs(apps):
|
||||
UnifiedJob = apps.get_model('main', 'UnifiedJob')
|
||||
for uj in UnifiedJob.objects.all():
|
||||
if uj.start_args is not None:
|
||||
if should_decrypt_field(uj.start_args):
|
||||
uj.start_args = decrypt_field(uj, 'start_args')
|
||||
uj.start_args = encrypt_field(uj, 'start_args')
|
||||
uj.save()
|
||||
__all__ = []
|
||||
|
||||
|
||||
def blank_old_start_args(apps, schema_editor):
|
||||
@@ -91,53 +24,3 @@ def blank_old_start_args(apps, schema_editor):
|
||||
logger.debug('Blanking job args for %s', uj.pk)
|
||||
uj.start_args = ''
|
||||
uj.save()
|
||||
|
||||
|
||||
def encrypt_survey_passwords(apps, schema_editor):
|
||||
_encrypt_survey_passwords(
|
||||
apps.get_model('main', 'Job'),
|
||||
apps.get_model('main', 'JobTemplate'),
|
||||
apps.get_model('main', 'WorkflowJob'),
|
||||
apps.get_model('main', 'WorkflowJobTemplate'),
|
||||
)
|
||||
|
||||
|
||||
def _encrypt_survey_passwords(Job, JobTemplate, WorkflowJob, WorkflowJobTemplate):
|
||||
from awx.main.utils.encryption import encrypt_value
|
||||
for _type in (JobTemplate, WorkflowJobTemplate):
|
||||
for jt in _type.objects.exclude(survey_spec={}):
|
||||
changed = False
|
||||
if jt.survey_spec.get('spec', []):
|
||||
for field in jt.survey_spec['spec']:
|
||||
if field.get('type') == 'password' and field.get('default', ''):
|
||||
default = field['default']
|
||||
if default.startswith('$encrypted$'):
|
||||
if default == '$encrypted$':
|
||||
# If you have a survey_spec with a literal
|
||||
# '$encrypted$' as the default, you have
|
||||
# encountered a known bug in awx/Tower
|
||||
# https://github.com/ansible/ansible-tower/issues/7800
|
||||
logger.error(
|
||||
'{}.pk={} survey_spec has ambiguous $encrypted$ default for {}, needs attention...'.format(jt, jt.pk, field['variable'])
|
||||
)
|
||||
field['default'] = ''
|
||||
changed = True
|
||||
continue
|
||||
field['default'] = encrypt_value(field['default'], pk=None)
|
||||
changed = True
|
||||
if changed:
|
||||
jt.save()
|
||||
|
||||
for _type in (Job, WorkflowJob):
|
||||
for job in _type.objects.defer('result_stdout_text').exclude(survey_passwords={}).iterator():
|
||||
changed = False
|
||||
for key in job.survey_passwords:
|
||||
if key in job.extra_vars:
|
||||
extra_vars = json.loads(job.extra_vars)
|
||||
if not extra_vars.get(key, '') or extra_vars[key].startswith('$encrypted$'):
|
||||
continue
|
||||
extra_vars[key] = encrypt_value(extra_vars[key], pk=None)
|
||||
job.extra_vars = json.dumps(extra_vars)
|
||||
changed = True
|
||||
if changed:
|
||||
job.save()
|
||||
|
||||
@@ -1,89 +1,9 @@
|
||||
import logging
|
||||
|
||||
from django.utils.timezone import now
|
||||
from django.utils.text import slugify
|
||||
|
||||
from awx.main.models.base import PERM_INVENTORY_SCAN, PERM_INVENTORY_DEPLOY
|
||||
from awx.main import utils
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
|
||||
def _create_fact_scan_project(ContentType, Project, org):
|
||||
ct = ContentType.objects.get_for_model(Project)
|
||||
name = u"Tower Fact Scan - {}".format(org.name if org else "No Organization")
|
||||
proj = Project(name=name,
|
||||
scm_url='https://github.com/ansible/awx-facts-playbooks',
|
||||
scm_type='git',
|
||||
scm_update_on_launch=True,
|
||||
scm_update_cache_timeout=86400,
|
||||
organization=org,
|
||||
created=now(),
|
||||
modified=now(),
|
||||
polymorphic_ctype=ct)
|
||||
proj.save()
|
||||
|
||||
slug_name = slugify(str(name)).replace(u'-', u'_')
|
||||
proj.local_path = u'_%d__%s' % (int(proj.pk), slug_name)
|
||||
|
||||
proj.save()
|
||||
return proj
|
||||
|
||||
|
||||
def _create_fact_scan_projects(ContentType, Project, orgs):
|
||||
return {org.id : _create_fact_scan_project(ContentType, Project, org) for org in orgs}
|
||||
|
||||
|
||||
def _get_tower_scan_job_templates(JobTemplate):
|
||||
return JobTemplate.objects.filter(job_type=PERM_INVENTORY_SCAN, project__isnull=True) \
|
||||
.prefetch_related('inventory__organization')
|
||||
|
||||
|
||||
def _get_orgs(Organization, job_template_ids):
|
||||
return Organization.objects.filter(inventories__jobtemplates__in=job_template_ids).distinct()
|
||||
|
||||
|
||||
def _migrate_scan_job_templates(apps):
|
||||
JobTemplate = apps.get_model('main', 'JobTemplate')
|
||||
Organization = apps.get_model('main', 'Organization')
|
||||
ContentType = apps.get_model('contenttypes', 'ContentType')
|
||||
Project = apps.get_model('main', 'Project')
|
||||
|
||||
project_no_org = None
|
||||
|
||||
# A scan job template with a custom project will retain the custom project.
|
||||
JobTemplate.objects.filter(job_type=PERM_INVENTORY_SCAN, project__isnull=False).update(use_fact_cache=True, job_type=PERM_INVENTORY_DEPLOY)
|
||||
|
||||
# Scan jobs templates using Tower's default scan playbook will now point at
|
||||
# the same playbook but in a github repo.
|
||||
jts = _get_tower_scan_job_templates(JobTemplate)
|
||||
if jts.count() == 0:
|
||||
return
|
||||
|
||||
orgs = _get_orgs(Organization, jts.values_list('id'))
|
||||
if orgs.count() == 0:
|
||||
return
|
||||
|
||||
org_proj_map = _create_fact_scan_projects(ContentType, Project, orgs)
|
||||
for jt in jts:
|
||||
if jt.inventory and jt.inventory.organization:
|
||||
jt.project_id = org_proj_map[jt.inventory.organization.id].id
|
||||
# Job Templates without an Organization; through related Inventory
|
||||
else:
|
||||
if not project_no_org:
|
||||
project_no_org = _create_fact_scan_project(ContentType, Project, None)
|
||||
jt.project_id = project_no_org.id
|
||||
jt.job_type = PERM_INVENTORY_DEPLOY
|
||||
jt.playbook = "scan_facts.yml"
|
||||
jt.use_fact_cache = True
|
||||
jt.save()
|
||||
|
||||
|
||||
def migrate_scan_job_templates(apps, schema_editor):
|
||||
_migrate_scan_job_templates(apps)
|
||||
|
||||
|
||||
def remove_scan_type_nodes(apps, schema_editor):
|
||||
WorkflowJobTemplateNode = apps.get_model('main', 'WorkflowJobTemplateNode')
|
||||
WorkflowJobNode = apps.get_model('main', 'WorkflowJobNode')
|
||||
|
||||
@@ -295,7 +295,10 @@ class PrimordialModel(HasEditsMixin, CreatedModifiedModel):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
r = super(PrimordialModel, self).__init__(*args, **kwargs)
|
||||
self._prior_values_store = self._get_fields_snapshot()
|
||||
if self.pk:
|
||||
self._prior_values_store = self._get_fields_snapshot()
|
||||
else:
|
||||
self._prior_values_store = {}
|
||||
return r
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
@@ -86,6 +86,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
unique_together = (('organization', 'name', 'credential_type'))
|
||||
|
||||
PASSWORD_FIELDS = ['inputs']
|
||||
FIELDS_TO_PRESERVE_AT_COPY = ['input_sources']
|
||||
|
||||
credential_type = models.ForeignKey(
|
||||
'CredentialType',
|
||||
@@ -1162,6 +1163,8 @@ class CredentialInputSource(PrimordialModel):
|
||||
unique_together = (('target_credential', 'input_field_name'),)
|
||||
ordering = ('target_credential', 'source_credential', 'input_field_name',)
|
||||
|
||||
FIELDS_TO_PRESERVE_AT_COPY = ['source_credential', 'metadata', 'input_field_name']
|
||||
|
||||
target_credential = models.ForeignKey(
|
||||
'Credential',
|
||||
related_name='input_sources',
|
||||
|
||||
@@ -270,6 +270,11 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
|
||||
.filter(capacity__gt=0, enabled=True)
|
||||
.values_list('hostname', flat=True)))
|
||||
|
||||
def set_default_policy_fields(self):
|
||||
self.policy_instance_list = []
|
||||
self.policy_instance_minimum = 0
|
||||
self.policy_instance_percentage = 0
|
||||
|
||||
|
||||
class TowerScheduleState(SingletonModel):
|
||||
schedule_last_run = models.DateTimeField(auto_now_add=True)
|
||||
@@ -289,6 +294,8 @@ def on_instance_group_saved(sender, instance, created=False, raw=False, **kwargs
|
||||
if created or instance.has_policy_changes():
|
||||
if not instance.is_containerized:
|
||||
schedule_policy_task()
|
||||
elif created or instance.is_containerized:
|
||||
instance.set_default_policy_fields()
|
||||
|
||||
|
||||
@receiver(post_save, sender=Instance)
|
||||
|
||||
@@ -61,6 +61,7 @@ from awx.main.models.notifications import (
|
||||
)
|
||||
from awx.main.models.credential.injectors import _openstack_data
|
||||
from awx.main.utils import _inventory_updates, region_sorting, get_licenser
|
||||
from awx.main.utils.safe_yaml import sanitize_jinja
|
||||
|
||||
|
||||
__all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate',
|
||||
@@ -754,6 +755,13 @@ class Host(CommonModelNameNotUnique, RelatedJobsMixin):
|
||||
update_host_smart_inventory_memberships.delay()
|
||||
connection.on_commit(on_commit)
|
||||
|
||||
def clean_name(self):
|
||||
try:
|
||||
sanitize_jinja(self.name)
|
||||
except ValueError as e:
|
||||
raise ValidationError(str(e) + ": {}".format(self.name))
|
||||
return self.name
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self._update_host_smart_inventory_memeberships()
|
||||
super(Host, self).save(*args, **kwargs)
|
||||
@@ -2036,9 +2044,25 @@ class azure_rm(PluginFileInjector):
|
||||
for key, loc in old_filterables:
|
||||
value = source_vars.get(key, None)
|
||||
if value and isinstance(value, str):
|
||||
user_filters.append('{} not in {}'.format(
|
||||
loc, value.split(',')
|
||||
))
|
||||
# tags can be list of key:value pairs
|
||||
# e.g. 'Creator:jmarshall, peanutbutter:jelly'
|
||||
# or tags can be a list of keys
|
||||
# e.g. 'Creator, peanutbutter'
|
||||
if key == "tags":
|
||||
# grab each key value pair
|
||||
for kvpair in value.split(','):
|
||||
# split into key and value
|
||||
kv = kvpair.split(':')
|
||||
# filter out any host that does not have key
|
||||
# in their tags.keys() variable
|
||||
user_filters.append('"{}" not in tags.keys()'.format(kv[0].strip()))
|
||||
# if a value is provided, check that the key:value pair matches
|
||||
if len(kv) > 1:
|
||||
user_filters.append('tags["{}"] != "{}"'.format(kv[0].strip(), kv[1].strip()))
|
||||
else:
|
||||
user_filters.append('{} not in {}'.format(
|
||||
loc, value.split(',')
|
||||
))
|
||||
if user_filters:
|
||||
ret.setdefault('exclude_host_filters', [])
|
||||
ret['exclude_host_filters'].extend(user_filters)
|
||||
|
||||
@@ -634,7 +634,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
else:
|
||||
# If for some reason we can't count the hosts then lets assume the impact as forks
|
||||
if self.inventory is not None:
|
||||
count_hosts = self.inventory.hosts.count()
|
||||
count_hosts = self.inventory.total_hosts
|
||||
if self.job_slice_count > 1:
|
||||
# Integer division intentional
|
||||
count_hosts = (count_hosts + self.job_slice_count - self.job_slice_number) // self.job_slice_count
|
||||
@@ -900,6 +900,9 @@ class LaunchTimeConfigBase(BaseModel):
|
||||
data[prompt_name] = self.display_extra_vars()
|
||||
else:
|
||||
data[prompt_name] = self.extra_vars
|
||||
# Depending on model, field type may save and return as string
|
||||
if isinstance(data[prompt_name], str):
|
||||
data[prompt_name] = parse_yaml_or_json(data[prompt_name])
|
||||
if self.survey_passwords and not display:
|
||||
data['survey_passwords'] = self.survey_passwords
|
||||
else:
|
||||
@@ -1103,8 +1106,8 @@ class SystemJobOptions(BaseModel):
|
||||
SYSTEM_JOB_TYPE = [
|
||||
('cleanup_jobs', _('Remove jobs older than a certain number of days')),
|
||||
('cleanup_activitystream', _('Remove activity stream entries older than a certain number of days')),
|
||||
('clearsessions', _('Removes expired browser sessions from the database')),
|
||||
('cleartokens', _('Removes expired OAuth 2 access tokens and refresh tokens'))
|
||||
('cleanup_sessions', _('Removes expired browser sessions from the database')),
|
||||
('cleanup_tokens', _('Removes expired OAuth 2 access tokens and refresh tokens'))
|
||||
]
|
||||
|
||||
class Meta:
|
||||
@@ -1179,18 +1182,19 @@ class SystemJobTemplate(UnifiedJobTemplate, SystemJobOptions):
|
||||
for key in unallowed_vars:
|
||||
rejected[key] = data.pop(key)
|
||||
|
||||
if 'days' in data:
|
||||
try:
|
||||
if type(data['days']) is bool:
|
||||
raise ValueError
|
||||
if float(data['days']) != int(data['days']):
|
||||
raise ValueError
|
||||
days = int(data['days'])
|
||||
if days < 0:
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
errors_list.append(_("days must be a positive integer."))
|
||||
rejected['days'] = data.pop('days')
|
||||
if self.job_type in ('cleanup_jobs', 'cleanup_activitystream'):
|
||||
if 'days' in data:
|
||||
try:
|
||||
if isinstance(data['days'], (bool, type(None))):
|
||||
raise ValueError
|
||||
if float(data['days']) != int(data['days']):
|
||||
raise ValueError
|
||||
days = int(data['days'])
|
||||
if days < 0:
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
errors_list.append(_("days must be a positive integer."))
|
||||
rejected['days'] = data.pop('days')
|
||||
|
||||
if errors_list:
|
||||
errors['extra_vars'] = errors_list
|
||||
|
||||
@@ -73,7 +73,7 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
||||
notification_configuration = prevent_search(JSONField(blank=False))
|
||||
|
||||
def default_messages():
|
||||
return {'started': None, 'success': None, 'error': None}
|
||||
return {'started': None, 'success': None, 'error': None, 'workflow_approval': None}
|
||||
|
||||
messages = JSONField(
|
||||
null=True,
|
||||
@@ -92,25 +92,6 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
||||
def get_message(self, condition):
|
||||
return self.messages.get(condition, {})
|
||||
|
||||
def build_notification_message(self, event_type, context):
|
||||
env = sandbox.ImmutableSandboxedEnvironment()
|
||||
templates = self.get_message(event_type)
|
||||
msg_template = templates.get('message', {})
|
||||
|
||||
try:
|
||||
notification_subject = env.from_string(msg_template).render(**context)
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||
notification_subject = ''
|
||||
|
||||
|
||||
msg_body = templates.get('body', {})
|
||||
try:
|
||||
notification_body = env.from_string(msg_body).render(**context)
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||
notification_body = ''
|
||||
|
||||
return (notification_subject, notification_body)
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:notification_template_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
@@ -128,19 +109,34 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
||||
old_messages = old_nt.messages
|
||||
new_messages = self.messages
|
||||
|
||||
def merge_messages(local_old_messages, local_new_messages, local_event):
|
||||
if local_new_messages.get(local_event, {}) and local_old_messages.get(local_event, {}):
|
||||
local_old_event_msgs = local_old_messages[local_event]
|
||||
local_new_event_msgs = local_new_messages[local_event]
|
||||
for msg_type in ['message', 'body']:
|
||||
if msg_type not in local_new_event_msgs and local_old_event_msgs.get(msg_type, None):
|
||||
local_new_event_msgs[msg_type] = local_old_event_msgs[msg_type]
|
||||
if old_messages is not None and new_messages is not None:
|
||||
for event in ['started', 'success', 'error']:
|
||||
for event in ('started', 'success', 'error', 'workflow_approval'):
|
||||
if not new_messages.get(event, {}) and old_messages.get(event, {}):
|
||||
new_messages[event] = old_messages[event]
|
||||
continue
|
||||
if new_messages.get(event, {}) and old_messages.get(event, {}):
|
||||
old_event_msgs = old_messages[event]
|
||||
new_event_msgs = new_messages[event]
|
||||
for msg_type in ['message', 'body']:
|
||||
if msg_type not in new_event_msgs and old_event_msgs.get(msg_type, None):
|
||||
new_event_msgs[msg_type] = old_event_msgs[msg_type]
|
||||
|
||||
if event == 'workflow_approval' and old_messages.get('workflow_approval', None):
|
||||
new_messages.setdefault('workflow_approval', {})
|
||||
for subevent in ('running', 'approved', 'timed_out', 'denied'):
|
||||
old_wfa_messages = old_messages['workflow_approval']
|
||||
new_wfa_messages = new_messages['workflow_approval']
|
||||
if not new_wfa_messages.get(subevent, {}) and old_wfa_messages.get(subevent, {}):
|
||||
new_wfa_messages[subevent] = old_wfa_messages[subevent]
|
||||
continue
|
||||
if old_wfa_messages:
|
||||
merge_messages(old_wfa_messages, new_wfa_messages, subevent)
|
||||
else:
|
||||
merge_messages(old_messages, new_messages, event)
|
||||
new_messages.setdefault(event, None)
|
||||
|
||||
|
||||
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
|
||||
self.notification_class.init_parameters):
|
||||
if self.notification_configuration[field].startswith("$encrypted$"):
|
||||
@@ -169,12 +165,12 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
||||
def recipients(self):
|
||||
return self.notification_configuration[self.notification_class.recipient_parameter]
|
||||
|
||||
def generate_notification(self, subject, message):
|
||||
def generate_notification(self, msg, body):
|
||||
notification = Notification(notification_template=self,
|
||||
notification_type=self.notification_type,
|
||||
recipients=smart_str(self.recipients),
|
||||
subject=subject,
|
||||
body=message)
|
||||
subject=msg,
|
||||
body=body)
|
||||
notification.save()
|
||||
return notification
|
||||
|
||||
@@ -273,6 +269,7 @@ class JobNotificationMixin(object):
|
||||
'timeout', 'use_fact_cache', 'launch_type', 'status', 'failed', 'started', 'finished',
|
||||
'elapsed', 'job_explanation', 'execution_node', 'controller_node', 'allow_simultaneous',
|
||||
'scm_revision', 'diff_mode', 'job_slice_number', 'job_slice_count', 'custom_virtualenv',
|
||||
'approval_status', 'approval_node_name', 'workflow_url',
|
||||
{'host_status_counts': ['skipped', 'ok', 'changed', 'failures', 'dark']},
|
||||
{'playbook_counts': ['play_count', 'task_count']},
|
||||
{'summary_fields': [{'inventory': ['id', 'name', 'description', 'has_active_failures',
|
||||
@@ -370,7 +367,10 @@ class JobNotificationMixin(object):
|
||||
'verbosity': 0},
|
||||
'job_friendly_name': 'Job',
|
||||
'url': 'https://towerhost/#/jobs/playbook/1010',
|
||||
'job_summary_dict': """{'url': 'https://towerhost/$/jobs/playbook/13',
|
||||
'approval_status': 'approved',
|
||||
'approval_node_name': 'Approve Me',
|
||||
'workflow_url': 'https://towerhost/#/workflows/1010',
|
||||
'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
|
||||
'traceback': '',
|
||||
'status': 'running',
|
||||
'started': '2019-08-07T21:46:38.362630+00:00',
|
||||
@@ -389,14 +389,14 @@ class JobNotificationMixin(object):
|
||||
return context
|
||||
|
||||
def context(self, serialized_job):
|
||||
"""Returns a context that can be used for rendering notification messages.
|
||||
Context contains whitelisted content retrieved from a serialized job object
|
||||
"""Returns a dictionary that can be used for rendering notification messages.
|
||||
The context will contain whitelisted content retrieved from a serialized job object
|
||||
(see JobNotificationMixin.JOB_FIELDS_WHITELIST), the job's friendly name,
|
||||
and a url to the job run."""
|
||||
context = {'job': {},
|
||||
'job_friendly_name': self.get_notification_friendly_name(),
|
||||
'url': self.get_ui_url(),
|
||||
'job_summary_dict': json.dumps(self.notification_data(), indent=4)}
|
||||
'job_metadata': json.dumps(self.notification_data(), indent=4)}
|
||||
|
||||
def build_context(node, fields, whitelisted_fields):
|
||||
for safe_field in whitelisted_fields:
|
||||
@@ -434,32 +434,33 @@ class JobNotificationMixin(object):
|
||||
context = self.context(job_serialization)
|
||||
|
||||
msg_template = body_template = None
|
||||
msg = body = ''
|
||||
|
||||
# Use custom template if available
|
||||
if nt.messages:
|
||||
templates = nt.messages.get(self.STATUS_TO_TEMPLATE_TYPE[status], {}) or {}
|
||||
msg_template = templates.get('message', {})
|
||||
body_template = templates.get('body', {})
|
||||
template = nt.messages.get(self.STATUS_TO_TEMPLATE_TYPE[status], {}) or {}
|
||||
msg_template = template.get('message', None)
|
||||
body_template = template.get('body', None)
|
||||
# If custom template not provided, look up default template
|
||||
default_template = nt.notification_class.default_messages[self.STATUS_TO_TEMPLATE_TYPE[status]]
|
||||
if not msg_template:
|
||||
msg_template = default_template.get('message', None)
|
||||
if not body_template:
|
||||
body_template = default_template.get('body', None)
|
||||
|
||||
if msg_template:
|
||||
try:
|
||||
notification_subject = env.from_string(msg_template).render(**context)
|
||||
msg = env.from_string(msg_template).render(**context)
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||
notification_subject = ''
|
||||
else:
|
||||
notification_subject = u"{} #{} '{}' {}: {}".format(self.get_notification_friendly_name(),
|
||||
self.id,
|
||||
self.name,
|
||||
status,
|
||||
self.get_ui_url())
|
||||
notification_body = self.notification_data()
|
||||
notification_body['friendly_name'] = self.get_notification_friendly_name()
|
||||
msg = ''
|
||||
|
||||
if body_template:
|
||||
try:
|
||||
notification_body['body'] = env.from_string(body_template).render(**context)
|
||||
body = env.from_string(body_template).render(**context)
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||
notification_body['body'] = ''
|
||||
body = ''
|
||||
|
||||
return (notification_subject, notification_body)
|
||||
return (msg, body)
|
||||
|
||||
def send_notification_templates(self, status):
|
||||
from awx.main.tasks import send_notifications # avoid circular import
|
||||
@@ -475,16 +476,13 @@ class JobNotificationMixin(object):
|
||||
return
|
||||
|
||||
for nt in set(notification_templates.get(self.STATUS_TO_TEMPLATE_TYPE[status], [])):
|
||||
try:
|
||||
(notification_subject, notification_body) = self.build_notification_message(nt, status)
|
||||
except AttributeError:
|
||||
raise NotImplementedError("build_notification_message() does not exist" % status)
|
||||
(msg, body) = self.build_notification_message(nt, status)
|
||||
|
||||
# Use kwargs to force late-binding
|
||||
# https://stackoverflow.com/a/3431699/10669572
|
||||
def send_it(local_nt=nt, local_subject=notification_subject, local_body=notification_body):
|
||||
def send_it(local_nt=nt, local_msg=msg, local_body=body):
|
||||
def _func():
|
||||
send_notifications.delay([local_nt.generate_notification(local_subject, local_body).id],
|
||||
send_notifications.delay([local_nt.generate_notification(local_msg, local_body).id],
|
||||
job_id=self.id)
|
||||
return _func
|
||||
connection.on_commit(send_it())
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
# Python
|
||||
import logging
|
||||
import re
|
||||
|
||||
# Django
|
||||
@@ -22,6 +23,9 @@ DATA_URI_RE = re.compile(r'.*') # FIXME
|
||||
__all__ = ['OAuth2AccessToken', 'OAuth2Application']
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.models.oauth')
|
||||
|
||||
|
||||
class OAuth2Application(AbstractApplication):
|
||||
|
||||
class Meta:
|
||||
@@ -120,15 +124,27 @@ class OAuth2AccessToken(AbstractAccessToken):
|
||||
def is_valid(self, scopes=None):
|
||||
valid = super(OAuth2AccessToken, self).is_valid(scopes)
|
||||
if valid:
|
||||
try:
|
||||
self.validate_external_users()
|
||||
except oauth2.AccessDeniedError:
|
||||
logger.exception(f'Failed to authenticate {self.user.username}')
|
||||
return False
|
||||
self.last_used = now()
|
||||
connection.on_commit(lambda: self.save(update_fields=['last_used']))
|
||||
|
||||
def _update_last_used():
|
||||
if OAuth2AccessToken.objects.filter(pk=self.pk).exists():
|
||||
self.save(update_fields=['last_used'])
|
||||
connection.on_commit(_update_last_used)
|
||||
return valid
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
def validate_external_users(self):
|
||||
if self.user and settings.ALLOW_OAUTH2_FOR_EXTERNAL_USERS is False:
|
||||
external_account = get_external_account(self.user)
|
||||
if external_account is not None:
|
||||
raise oauth2.AccessDeniedError(_(
|
||||
'OAuth2 Tokens cannot be created by users associated with an external authentication provider ({})'
|
||||
).format(external_account))
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.validate_external_users()
|
||||
super(OAuth2AccessToken, self).save(*args, **kwargs)
|
||||
|
||||
@@ -483,6 +483,12 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage
|
||||
choices=PROJECT_UPDATE_JOB_TYPE_CHOICES,
|
||||
default='check',
|
||||
)
|
||||
job_tags = models.CharField(
|
||||
max_length=1024,
|
||||
blank=True,
|
||||
default='',
|
||||
help_text=_('Parts of the project update playbook that will be run.'),
|
||||
)
|
||||
scm_revision = models.CharField(
|
||||
max_length=1024,
|
||||
blank=True,
|
||||
@@ -587,3 +593,24 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage
|
||||
if not selected_groups:
|
||||
return self.global_instance_groups
|
||||
return selected_groups
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
added_update_fields = []
|
||||
if not self.job_tags:
|
||||
job_tags = ['update_{}'.format(self.scm_type)]
|
||||
if self.job_type == 'run':
|
||||
job_tags.append('install_roles')
|
||||
job_tags.append('install_collections')
|
||||
self.job_tags = ','.join(job_tags)
|
||||
added_update_fields.append('job_tags')
|
||||
if self.scm_delete_on_update and 'delete' not in self.job_tags and self.job_type == 'check':
|
||||
self.job_tags = ','.join([self.job_tags, 'delete'])
|
||||
added_update_fields.append('job_tags')
|
||||
elif (not self.scm_delete_on_update) and 'delete' in self.job_tags:
|
||||
job_tags = self.job_tags.split(',')
|
||||
job_tags.remove('delete')
|
||||
self.job_tags = ','.join(job_tags)
|
||||
added_update_fields.append('job_tags')
|
||||
if 'update_fields' in kwargs:
|
||||
kwargs['update_fields'].extend(added_update_fields)
|
||||
return super(ProjectUpdate, self).save(*args, **kwargs)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import json
|
||||
import logging
|
||||
from copy import copy
|
||||
from urllib.parse import urljoin
|
||||
@@ -16,6 +17,9 @@ from django.core.exceptions import ObjectDoesNotExist
|
||||
# Django-CRUM
|
||||
from crum import get_current_user
|
||||
|
||||
from jinja2 import sandbox
|
||||
from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import (prevent_search, accepts_json, UnifiedJobTemplate,
|
||||
@@ -763,22 +767,45 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
|
||||
connection.on_commit(send_it())
|
||||
|
||||
def build_approval_notification_message(self, nt, approval_status):
|
||||
subject = []
|
||||
workflow_url = urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.workflow_job.id))
|
||||
subject.append(('The approval node "{}"').format(self.workflow_approval_template.name))
|
||||
if approval_status == 'running':
|
||||
subject.append(('needs review. This node can be viewed at: {}').format(workflow_url))
|
||||
if approval_status == 'approved':
|
||||
subject.append(('was approved. {}').format(workflow_url))
|
||||
if approval_status == 'timed_out':
|
||||
subject.append(('has timed out. {}').format(workflow_url))
|
||||
elif approval_status == 'denied':
|
||||
subject.append(('was denied. {}').format(workflow_url))
|
||||
subject = " ".join(subject)
|
||||
body = self.notification_data()
|
||||
body['body'] = subject
|
||||
env = sandbox.ImmutableSandboxedEnvironment()
|
||||
|
||||
return subject, body
|
||||
context = self.context(approval_status)
|
||||
|
||||
msg_template = body_template = None
|
||||
msg = body = ''
|
||||
|
||||
# Use custom template if available
|
||||
if nt.messages and nt.messages.get('workflow_approval', None):
|
||||
template = nt.messages['workflow_approval'].get(approval_status, {})
|
||||
msg_template = template.get('message', None)
|
||||
body_template = template.get('body', None)
|
||||
# If custom template not provided, look up default template
|
||||
default_template = nt.notification_class.default_messages['workflow_approval'][approval_status]
|
||||
if not msg_template:
|
||||
msg_template = default_template.get('message', None)
|
||||
if not body_template:
|
||||
body_template = default_template.get('body', None)
|
||||
|
||||
if msg_template:
|
||||
try:
|
||||
msg = env.from_string(msg_template).render(**context)
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||
msg = ''
|
||||
|
||||
if body_template:
|
||||
try:
|
||||
body = env.from_string(body_template).render(**context)
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||
body = ''
|
||||
|
||||
return (msg, body)
|
||||
|
||||
def context(self, approval_status):
|
||||
workflow_url = urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.workflow_job.id))
|
||||
return {'approval_status': approval_status,
|
||||
'approval_node_name': self.workflow_approval_template.name,
|
||||
'workflow_url': workflow_url,
|
||||
'job_metadata': json.dumps(self.notification_data(), indent=4)}
|
||||
|
||||
@property
|
||||
def workflow_job_template(self):
|
||||
|
||||
@@ -1,21 +1,10 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import json
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.core.mail.backends.base import BaseEmailBackend
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
class AWXBaseEmailBackend(BaseEmailBackend):
|
||||
|
||||
def format_body(self, body):
|
||||
if "body" in body:
|
||||
body_actual = body['body']
|
||||
else:
|
||||
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
|
||||
body['friendly_name'], body['id'], body['status'], body['url'])
|
||||
)
|
||||
body_actual += json.dumps(body, indent=4)
|
||||
return body_actual
|
||||
return body
|
||||
|
||||
29
awx/main/notifications/custom_notification_base.py
Normal file
29
awx/main/notifications/custom_notification_base.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Copyright (c) 2019 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
|
||||
class CustomNotificationBase(object):
|
||||
DEFAULT_MSG = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
DEFAULT_BODY = "{{ job_friendly_name }} #{{ job.id }} had status {{ job.status }}, view details at {{ url }}\n\n{{ job_metadata }}"
|
||||
|
||||
DEFAULT_APPROVAL_RUNNING_MSG = 'The approval node "{{ approval_node_name }}" needs review. This node can be viewed at: {{ workflow_url }}'
|
||||
DEFAULT_APPROVAL_RUNNING_BODY = ('The approval node "{{ approval_node_name }}" needs review. '
|
||||
'This approval node can be viewed at: {{ workflow_url }}\n\n{{ job_metadata }}')
|
||||
|
||||
DEFAULT_APPROVAL_APPROVED_MSG = 'The approval node "{{ approval_node_name }}" was approved. {{ workflow_url }}'
|
||||
DEFAULT_APPROVAL_APPROVED_BODY = 'The approval node "{{ approval_node_name }}" was approved. {{ workflow_url }}\n\n{{ job_metadata }}'
|
||||
|
||||
DEFAULT_APPROVAL_TIMEOUT_MSG = 'The approval node "{{ approval_node_name }}" has timed out. {{ workflow_url }}'
|
||||
DEFAULT_APPROVAL_TIMEOUT_BODY = 'The approval node "{{ approval_node_name }}" has timed out. {{ workflow_url }}\n\n{{ job_metadata }}'
|
||||
|
||||
DEFAULT_APPROVAL_DENIED_MSG = 'The approval node "{{ approval_node_name }}" was denied. {{ workflow_url }}'
|
||||
DEFAULT_APPROVAL_DENIED_BODY = 'The approval node "{{ approval_node_name }}" was denied. {{ workflow_url }}\n\n{{ job_metadata }}'
|
||||
|
||||
|
||||
default_messages = {"started": {"message": DEFAULT_MSG, "body": None},
|
||||
"success": {"message": DEFAULT_MSG, "body": None},
|
||||
"error": {"message": DEFAULT_MSG, "body": None},
|
||||
"workflow_approval": {"running": {"message": DEFAULT_APPROVAL_RUNNING_MSG, "body": None},
|
||||
"approved": {"message": DEFAULT_APPROVAL_APPROVED_MSG, "body": None},
|
||||
"timed_out": {"message": DEFAULT_APPROVAL_TIMEOUT_MSG, "body": None},
|
||||
"denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": None}}}
|
||||
@@ -1,14 +1,27 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import json
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.core.mail.backends.smtp import EmailBackend
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
DEFAULT_MSG = CustomNotificationBase.DEFAULT_MSG
|
||||
DEFAULT_BODY = CustomNotificationBase.DEFAULT_BODY
|
||||
|
||||
DEFAULT_APPROVAL_RUNNING_MSG = CustomNotificationBase.DEFAULT_APPROVAL_RUNNING_MSG
|
||||
DEFAULT_APPROVAL_RUNNING_BODY = CustomNotificationBase.DEFAULT_APPROVAL_RUNNING_BODY
|
||||
|
||||
DEFAULT_APPROVAL_APPROVED_MSG = CustomNotificationBase.DEFAULT_APPROVAL_APPROVED_MSG
|
||||
DEFAULT_APPROVAL_APPROVED_BODY = CustomNotificationBase.DEFAULT_APPROVAL_APPROVED_BODY
|
||||
|
||||
DEFAULT_APPROVAL_TIMEOUT_MSG = CustomNotificationBase.DEFAULT_APPROVAL_TIMEOUT_MSG
|
||||
DEFAULT_APPROVAL_TIMEOUT_BODY = CustomNotificationBase.DEFAULT_APPROVAL_TIMEOUT_BODY
|
||||
|
||||
DEFAULT_APPROVAL_DENIED_MSG = CustomNotificationBase.DEFAULT_APPROVAL_DENIED_MSG
|
||||
DEFAULT_APPROVAL_DENIED_BODY = CustomNotificationBase.DEFAULT_APPROVAL_DENIED_BODY
|
||||
|
||||
|
||||
class CustomEmailBackend(EmailBackend):
|
||||
class CustomEmailBackend(EmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"host": {"label": "Host", "type": "string"},
|
||||
"port": {"label": "Port", "type": "int"},
|
||||
@@ -19,22 +32,17 @@ class CustomEmailBackend(EmailBackend):
|
||||
"sender": {"label": "Sender Email", "type": "string"},
|
||||
"recipients": {"label": "Recipient List", "type": "list"},
|
||||
"timeout": {"label": "Timeout", "type": "int", "default": 30}}
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
DEFAULT_BODY = smart_text(_("{{ job_friendly_name }} #{{ job.id }} had status {{ job.status }}, view details at {{ url }}\n\n{{ job_summary_dict }}"))
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
|
||||
"success": {"message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
|
||||
"error": {"message": DEFAULT_SUBJECT, "body": DEFAULT_BODY}}
|
||||
recipient_parameter = "recipients"
|
||||
sender_parameter = "sender"
|
||||
|
||||
default_messages = {"started": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"success": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"error": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"workflow_approval": {"running": {"message": DEFAULT_APPROVAL_RUNNING_MSG, "body": DEFAULT_APPROVAL_RUNNING_BODY},
|
||||
"approved": {"message": DEFAULT_APPROVAL_APPROVED_MSG, "body": DEFAULT_APPROVAL_APPROVED_BODY},
|
||||
"timed_out": {"message": DEFAULT_APPROVAL_TIMEOUT_MSG, "body": DEFAULT_APPROVAL_TIMEOUT_BODY},
|
||||
"denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": DEFAULT_APPROVAL_DENIED_BODY}}}
|
||||
|
||||
def format_body(self, body):
|
||||
if "body" in body:
|
||||
body_actual = body['body']
|
||||
else:
|
||||
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
|
||||
body['friendly_name'], body['id'], body['status'], body['url'])
|
||||
)
|
||||
body_actual += json.dumps(body, indent=4)
|
||||
return body_actual
|
||||
# leave body unchanged (expect a string)
|
||||
return body
|
||||
|
||||
@@ -8,24 +8,21 @@ import dateutil.parser as dp
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.grafana_backend')
|
||||
|
||||
|
||||
class GrafanaBackend(AWXBaseEmailBackend):
|
||||
class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"grafana_url": {"label": "Grafana URL", "type": "string"},
|
||||
"grafana_key": {"label": "Grafana API Key", "type": "password"}}
|
||||
recipient_parameter = "grafana_url"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
||||
"success": {"message": DEFAULT_SUBJECT},
|
||||
"error": {"message": DEFAULT_SUBJECT}}
|
||||
|
||||
def __init__(self, grafana_key,dashboardId=None, panelId=None, annotation_tags=None, grafana_no_verify_ssl=False, isRegion=True,
|
||||
fail_silently=False, **kwargs):
|
||||
super(GrafanaBackend, self).__init__(fail_silently=fail_silently)
|
||||
|
||||
@@ -7,12 +7,14 @@ import requests
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.hipchat_backend')
|
||||
|
||||
|
||||
class HipChatBackend(AWXBaseEmailBackend):
|
||||
class HipChatBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"token": {"label": "Token", "type": "password"},
|
||||
"rooms": {"label": "Destination Rooms", "type": "list"},
|
||||
@@ -23,11 +25,6 @@ class HipChatBackend(AWXBaseEmailBackend):
|
||||
recipient_parameter = "rooms"
|
||||
sender_parameter = "message_from"
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
||||
"success": {"message": DEFAULT_SUBJECT},
|
||||
"error": {"message": DEFAULT_SUBJECT}}
|
||||
|
||||
def __init__(self, token, color, api_url, notify, fail_silently=False, **kwargs):
|
||||
super(HipChatBackend, self).__init__(fail_silently=fail_silently)
|
||||
self.token = token
|
||||
|
||||
@@ -9,12 +9,14 @@ import irc.client
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.irc_backend')
|
||||
|
||||
|
||||
class IrcBackend(AWXBaseEmailBackend):
|
||||
class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"server": {"label": "IRC Server Address", "type": "string"},
|
||||
"port": {"label": "IRC Server Port", "type": "int"},
|
||||
@@ -25,11 +27,6 @@ class IrcBackend(AWXBaseEmailBackend):
|
||||
recipient_parameter = "targets"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
||||
"success": {"message": DEFAULT_SUBJECT},
|
||||
"error": {"message": DEFAULT_SUBJECT}}
|
||||
|
||||
def __init__(self, server, port, nickname, password, use_ssl, fail_silently=False, **kwargs):
|
||||
super(IrcBackend, self).__init__(fail_silently=fail_silently)
|
||||
self.server = server
|
||||
|
||||
@@ -7,23 +7,20 @@ import json
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.mattermost_backend')
|
||||
|
||||
|
||||
class MattermostBackend(AWXBaseEmailBackend):
|
||||
class MattermostBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"mattermost_url": {"label": "Target URL", "type": "string"},
|
||||
"mattermost_no_verify_ssl": {"label": "Verify SSL", "type": "bool"}}
|
||||
recipient_parameter = "mattermost_url"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
||||
"success": {"message": DEFAULT_SUBJECT},
|
||||
"error": {"message": DEFAULT_SUBJECT}}
|
||||
|
||||
def __init__(self, mattermost_no_verify_ssl=False, mattermost_channel=None, mattermost_username=None,
|
||||
mattermost_icon_url=None, fail_silently=False, **kwargs):
|
||||
super(MattermostBackend, self).__init__(fail_silently=fail_silently)
|
||||
|
||||
@@ -1,17 +1,23 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import pygerduty
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
DEFAULT_BODY = CustomNotificationBase.DEFAULT_BODY
|
||||
DEFAULT_MSG = CustomNotificationBase.DEFAULT_MSG
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.pagerduty_backend')
|
||||
|
||||
|
||||
class PagerDutyBackend(AWXBaseEmailBackend):
|
||||
class PagerDutyBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"subdomain": {"label": "Pagerduty subdomain", "type": "string"},
|
||||
"token": {"label": "API Token", "type": "password"},
|
||||
@@ -20,11 +26,14 @@ class PagerDutyBackend(AWXBaseEmailBackend):
|
||||
recipient_parameter = "service_key"
|
||||
sender_parameter = "client_name"
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
DEFAULT_BODY = "{{ job_summary_dict }}"
|
||||
default_messages = {"started": { "message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
|
||||
"success": { "message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
|
||||
"error": { "message": DEFAULT_SUBJECT, "body": DEFAULT_BODY}}
|
||||
DEFAULT_BODY = "{{ job_metadata }}"
|
||||
default_messages = {"started": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"success": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"error": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"workflow_approval": {"running": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"approved": {"message": DEFAULT_MSG,"body": DEFAULT_BODY},
|
||||
"timed_out": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"denied": {"message": DEFAULT_MSG, "body": DEFAULT_BODY}}}
|
||||
|
||||
def __init__(self, subdomain, token, fail_silently=False, **kwargs):
|
||||
super(PagerDutyBackend, self).__init__(fail_silently=fail_silently)
|
||||
@@ -32,6 +41,16 @@ class PagerDutyBackend(AWXBaseEmailBackend):
|
||||
self.token = token
|
||||
|
||||
def format_body(self, body):
|
||||
# cast to dict if possible # TODO: is it true that this can be a dict or str?
|
||||
try:
|
||||
potential_body = json.loads(body)
|
||||
if isinstance(potential_body, dict):
|
||||
body = potential_body
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
# but it's okay if this is also just a string
|
||||
|
||||
return body
|
||||
|
||||
def send_messages(self, messages):
|
||||
|
||||
@@ -7,22 +7,20 @@ import json
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.rocketchat_backend')
|
||||
|
||||
|
||||
class RocketChatBackend(AWXBaseEmailBackend):
|
||||
class RocketChatBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"rocketchat_url": {"label": "Target URL", "type": "string"},
|
||||
"rocketchat_no_verify_ssl": {"label": "Verify SSL", "type": "bool"}}
|
||||
recipient_parameter = "rocketchat_url"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
||||
"success": {"message": DEFAULT_SUBJECT},
|
||||
"error": {"message": DEFAULT_SUBJECT}}
|
||||
|
||||
def __init__(self, rocketchat_no_verify_ssl=False, rocketchat_username=None, rocketchat_icon_url=None, fail_silently=False, **kwargs):
|
||||
super(RocketChatBackend, self).__init__(fail_silently=fail_silently)
|
||||
|
||||
@@ -6,24 +6,21 @@ from slackclient import SlackClient
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.slack_backend')
|
||||
WEBSOCKET_TIMEOUT = 30
|
||||
|
||||
|
||||
class SlackBackend(AWXBaseEmailBackend):
|
||||
class SlackBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"token": {"label": "Token", "type": "password"},
|
||||
"channels": {"label": "Destination Channels", "type": "list"}}
|
||||
recipient_parameter = "channels"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
||||
"success": {"message": DEFAULT_SUBJECT},
|
||||
"error": {"message": DEFAULT_SUBJECT}}
|
||||
|
||||
def __init__(self, token, hex_color="", fail_silently=False, **kwargs):
|
||||
super(SlackBackend, self).__init__(fail_silently=fail_silently)
|
||||
self.token = token
|
||||
@@ -50,6 +47,7 @@ class SlackBackend(AWXBaseEmailBackend):
|
||||
else:
|
||||
ret = connection.api_call("chat.postMessage",
|
||||
channel=r,
|
||||
as_user=True,
|
||||
text=m.subject)
|
||||
logger.debug(ret)
|
||||
if ret['ok']:
|
||||
|
||||
@@ -7,12 +7,14 @@ from twilio.rest import Client
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.twilio_backend')
|
||||
|
||||
|
||||
class TwilioBackend(AWXBaseEmailBackend):
|
||||
class TwilioBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"account_sid": {"label": "Account SID", "type": "string"},
|
||||
"account_token": {"label": "Account Token", "type": "password"},
|
||||
@@ -21,11 +23,6 @@ class TwilioBackend(AWXBaseEmailBackend):
|
||||
recipient_parameter = "to_numbers"
|
||||
sender_parameter = "from_number"
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
||||
"success": {"message": DEFAULT_SUBJECT},
|
||||
"error": {"message": DEFAULT_SUBJECT}}
|
||||
|
||||
def __init__(self, account_sid, account_token, fail_silently=False, **kwargs):
|
||||
super(TwilioBackend, self).__init__(fail_silently=fail_silently)
|
||||
self.account_sid = account_sid
|
||||
|
||||
@@ -7,13 +7,15 @@ import requests
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.utils import get_awx_version
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.webhook_backend')
|
||||
|
||||
|
||||
class WebhookBackend(AWXBaseEmailBackend):
|
||||
class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"url": {"label": "Target URL", "type": "string"},
|
||||
"http_method": {"label": "HTTP Method", "type": "string", "default": "POST"},
|
||||
@@ -24,10 +26,16 @@ class WebhookBackend(AWXBaseEmailBackend):
|
||||
recipient_parameter = "url"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_BODY = "{{ job_summary_dict }}"
|
||||
DEFAULT_BODY = "{{ job_metadata }}"
|
||||
default_messages = {"started": {"body": DEFAULT_BODY},
|
||||
"success": {"body": DEFAULT_BODY},
|
||||
"error": {"body": DEFAULT_BODY}}
|
||||
"error": {"body": DEFAULT_BODY},
|
||||
"workflow_approval": {
|
||||
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. '
|
||||
'This node can be viewed at: {{ workflow_url }}"}'},
|
||||
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
||||
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
||||
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'}}}
|
||||
|
||||
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
|
||||
self.http_method = http_method
|
||||
@@ -38,15 +46,13 @@ class WebhookBackend(AWXBaseEmailBackend):
|
||||
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
|
||||
|
||||
def format_body(self, body):
|
||||
# If `body` has body field, attempt to use this as the main body,
|
||||
# otherwise, leave it as a sub-field
|
||||
if isinstance(body, dict) and 'body' in body and isinstance(body['body'], str):
|
||||
try:
|
||||
potential_body = json.loads(body['body'])
|
||||
if isinstance(potential_body, dict):
|
||||
body = potential_body
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
# expect body to be a string representing a dict
|
||||
try:
|
||||
potential_body = json.loads(body)
|
||||
if isinstance(potential_body, dict):
|
||||
body = potential_body
|
||||
except json.JSONDecodeError:
|
||||
body = {}
|
||||
return body
|
||||
|
||||
def send_messages(self, messages):
|
||||
|
||||
@@ -12,10 +12,12 @@ class UriCleaner(object):
|
||||
|
||||
@staticmethod
|
||||
def remove_sensitive(cleartext):
|
||||
# exclude_list contains the items that will _not_ be redacted
|
||||
exclude_list = [settings.PUBLIC_GALAXY_SERVER['url']]
|
||||
if settings.PRIMARY_GALAXY_URL:
|
||||
exclude_list = [settings.PRIMARY_GALAXY_URL] + [server['url'] for server in settings.FALLBACK_GALAXY_SERVERS]
|
||||
else:
|
||||
exclude_list = [server['url'] for server in settings.FALLBACK_GALAXY_SERVERS]
|
||||
exclude_list += [settings.PRIMARY_GALAXY_URL]
|
||||
if settings.FALLBACK_GALAXY_SERVERS:
|
||||
exclude_list += [server['url'] for server in settings.FALLBACK_GALAXY_SERVERS]
|
||||
redactedtext = cleartext
|
||||
text_index = 0
|
||||
while True:
|
||||
|
||||
@@ -15,7 +15,6 @@ class DependencyGraph(object):
|
||||
INVENTORY_UPDATES = 'inventory_updates'
|
||||
|
||||
JOB_TEMPLATE_JOBS = 'job_template_jobs'
|
||||
JOB_PROJECT_IDS = 'job_project_ids'
|
||||
JOB_INVENTORY_IDS = 'job_inventory_ids'
|
||||
|
||||
SYSTEM_JOB = 'system_job'
|
||||
@@ -41,8 +40,6 @@ class DependencyGraph(object):
|
||||
Track runnable job related project and inventory to ensure updates
|
||||
don't run while a job needing those resources is running.
|
||||
'''
|
||||
# project_id -> True / False
|
||||
self.data[self.JOB_PROJECT_IDS] = {}
|
||||
# inventory_id -> True / False
|
||||
self.data[self.JOB_INVENTORY_IDS] = {}
|
||||
|
||||
@@ -66,7 +63,7 @@ class DependencyGraph(object):
|
||||
|
||||
def get_now(self):
|
||||
return tz_now()
|
||||
|
||||
|
||||
def mark_system_job(self):
|
||||
self.data[self.SYSTEM_JOB] = False
|
||||
|
||||
@@ -81,15 +78,13 @@ class DependencyGraph(object):
|
||||
|
||||
def mark_job_template_job(self, job):
|
||||
self.data[self.JOB_INVENTORY_IDS][job.inventory_id] = False
|
||||
self.data[self.JOB_PROJECT_IDS][job.project_id] = False
|
||||
self.data[self.JOB_TEMPLATE_JOBS][job.job_template_id] = False
|
||||
|
||||
def mark_workflow_job(self, job):
|
||||
self.data[self.WORKFLOW_JOB_TEMPLATES_JOBS][job.workflow_job_template_id] = False
|
||||
|
||||
def can_project_update_run(self, job):
|
||||
return self.data[self.JOB_PROJECT_IDS].get(job.project_id, True) and \
|
||||
self.data[self.PROJECT_UPDATES].get(job.project_id, True)
|
||||
return self.data[self.PROJECT_UPDATES].get(job.project_id, True)
|
||||
|
||||
def can_inventory_update_run(self, job):
|
||||
return self.data[self.JOB_INVENTORY_IDS].get(job.inventory_source.inventory_id, True) and \
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
import collections
|
||||
import os
|
||||
import stat
|
||||
import time
|
||||
import yaml
|
||||
import tempfile
|
||||
import logging
|
||||
from base64 import b64encode
|
||||
|
||||
@@ -88,8 +84,17 @@ class PodManager(object):
|
||||
|
||||
@cached_property
|
||||
def kube_api(self):
|
||||
my_client = config.new_client_from_config(config_file=self.kube_config)
|
||||
return client.CoreV1Api(api_client=my_client)
|
||||
# this feels a little janky, but it's what k8s' own code does
|
||||
# internally when it reads kube config files from disk:
|
||||
# https://github.com/kubernetes-client/python-base/blob/0b208334ef0247aad9afcaae8003954423b61a0d/config/kube_config.py#L643
|
||||
loader = config.kube_config.KubeConfigLoader(
|
||||
config_dict=self.kube_config
|
||||
)
|
||||
cfg = type.__call__(client.Configuration)
|
||||
loader.load_and_set(cfg)
|
||||
return client.CoreV1Api(api_client=client.ApiClient(
|
||||
configuration=cfg
|
||||
))
|
||||
|
||||
@property
|
||||
def pod_name(self):
|
||||
@@ -168,16 +173,10 @@ def generate_tmp_kube_config(credential, namespace):
|
||||
"current-context": host_input
|
||||
}
|
||||
|
||||
if credential.get_input('verify_ssl'):
|
||||
if credential.get_input('verify_ssl') and 'ssl_ca_cert' in credential.inputs:
|
||||
config["clusters"][0]["cluster"]["certificate-authority-data"] = b64encode(
|
||||
credential.get_input('ssl_ca_cert').encode() # encode to bytes
|
||||
).decode() # decode the base64 data into a str
|
||||
else:
|
||||
config["clusters"][0]["cluster"]["insecure-skip-tls-verify"] = True
|
||||
|
||||
fd, path = tempfile.mkstemp(prefix='kubeconfig')
|
||||
with open(path, 'wb') as temp:
|
||||
temp.write(yaml.dump(config).encode())
|
||||
temp.flush()
|
||||
os.chmod(temp.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
return path
|
||||
return config
|
||||
|
||||
@@ -252,19 +252,30 @@ class TaskManager():
|
||||
logger.debug('Submitting isolated {} to queue {} controlled by {}.'.format(
|
||||
task.log_format, task.execution_node, controller_node))
|
||||
elif rampart_group.is_containerized:
|
||||
# find one real, non-containerized instance with capacity to
|
||||
# act as the controller for k8s API interaction
|
||||
match = None
|
||||
for group in InstanceGroup.objects.all():
|
||||
if group.is_containerized or group.controller_id:
|
||||
continue
|
||||
match = group.fit_task_to_most_remaining_capacity_instance(task)
|
||||
if match:
|
||||
break
|
||||
task.instance_group = rampart_group
|
||||
if not task.supports_isolation():
|
||||
# project updates and inventory updates don't *actually* run in pods,
|
||||
# so just pick *any* non-isolated, non-containerized host and use it
|
||||
for group in InstanceGroup.objects.all():
|
||||
if group.is_containerized or group.controller_id:
|
||||
continue
|
||||
match = group.find_largest_idle_instance()
|
||||
if match:
|
||||
task.execution_node = match.hostname
|
||||
logger.debug('Submitting containerized {} to queue {}.'.format(
|
||||
task.log_format, task.execution_node))
|
||||
break
|
||||
if match is None:
|
||||
logger.warn(
|
||||
'No available capacity to run containerized <{}>.'.format(task.log_format)
|
||||
)
|
||||
else:
|
||||
if task.supports_isolation():
|
||||
task.controller_node = match.hostname
|
||||
else:
|
||||
# project updates and inventory updates don't *actually* run in pods,
|
||||
# so just pick *any* non-isolated, non-containerized host and use it
|
||||
# as the execution node
|
||||
task.execution_node = match.hostname
|
||||
logger.debug('Submitting containerized {} to queue {}.'.format(
|
||||
task.log_format, task.execution_node))
|
||||
else:
|
||||
task.instance_group = rampart_group
|
||||
if instance is not None:
|
||||
|
||||
@@ -5,11 +5,15 @@ import logging
|
||||
# AWX
|
||||
from awx.main.scheduler import TaskManager
|
||||
from awx.main.dispatch.publish import task
|
||||
from awx.main.utils.db import migration_in_progress_check_or_relase
|
||||
|
||||
logger = logging.getLogger('awx.main.scheduler')
|
||||
|
||||
|
||||
@task()
|
||||
def run_task_manager():
|
||||
if migration_in_progress_check_or_relase():
|
||||
logger.debug("Not running task manager because migration is in progress.")
|
||||
return
|
||||
logger.debug("Running Tower task manager.")
|
||||
TaskManager().schedule()
|
||||
|
||||
@@ -22,10 +22,6 @@ import yaml
|
||||
import fcntl
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
try:
|
||||
import psutil
|
||||
except Exception:
|
||||
psutil = None
|
||||
import urllib.parse as urlparse
|
||||
|
||||
# Django
|
||||
@@ -34,7 +30,6 @@ from django.db import transaction, DatabaseError, IntegrityError
|
||||
from django.db.models.fields.related import ForeignKey
|
||||
from django.utils.timezone import now, timedelta
|
||||
from django.utils.encoding import smart_str
|
||||
from django.core.mail import send_mail
|
||||
from django.contrib.auth.models import User
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.cache import cache
|
||||
@@ -72,12 +67,11 @@ from awx.main.isolated import manager as isolated_manager
|
||||
from awx.main.dispatch.publish import task
|
||||
from awx.main.dispatch import get_local_queuename, reaper
|
||||
from awx.main.utils import (get_ssh_version, update_scm_url,
|
||||
get_licenser,
|
||||
ignore_inventory_computed_fields,
|
||||
ignore_inventory_group_removal, extract_ansible_vars, schedule_task_manager,
|
||||
get_awx_version)
|
||||
from awx.main.utils.ansible import read_ansible_config
|
||||
from awx.main.utils.common import get_ansible_version, _get_ansible_version, get_custom_venv_choices
|
||||
from awx.main.utils.common import _get_ansible_version, get_custom_venv_choices
|
||||
from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja
|
||||
from awx.main.utils.reload import stop_local_services
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
@@ -92,7 +86,7 @@ from rest_framework.exceptions import PermissionDenied
|
||||
__all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate',
|
||||
'RunAdHocCommand', 'handle_work_error', 'handle_work_success', 'apply_cluster_membership_policies',
|
||||
'update_inventory_computed_fields', 'update_host_smart_inventory_memberships',
|
||||
'send_notifications', 'run_administrative_checks', 'purge_old_stdout_files']
|
||||
'send_notifications', 'purge_old_stdout_files']
|
||||
|
||||
HIDDEN_PASSWORD = '**********'
|
||||
|
||||
@@ -269,6 +263,12 @@ def apply_cluster_membership_policies():
|
||||
logger.debug('Cluster policy computation finished in {} seconds'.format(time.time() - started_compute))
|
||||
|
||||
|
||||
@task(queue='tower_broadcast_all', exchange_type='fanout')
|
||||
def set_migration_flag():
|
||||
logger.debug('Received migration-in-progress signal, will serve redirect.')
|
||||
cache.set('migration_in_progress', True)
|
||||
|
||||
|
||||
@task(queue='tower_broadcast_all', exchange_type='fanout')
|
||||
def handle_setting_changes(setting_keys):
|
||||
orig_len = len(setting_keys)
|
||||
@@ -356,28 +356,6 @@ def gather_analytics():
|
||||
os.remove(tgz)
|
||||
|
||||
|
||||
@task()
|
||||
def run_administrative_checks():
|
||||
logger.warn("Running administrative checks.")
|
||||
if not settings.TOWER_ADMIN_ALERTS:
|
||||
return
|
||||
validation_info = get_licenser().validate()
|
||||
if validation_info['license_type'] != 'open' and validation_info.get('instance_count', 0) < 1:
|
||||
return
|
||||
used_percentage = float(validation_info.get('current_instances', 0)) / float(validation_info.get('instance_count', 100))
|
||||
tower_admin_emails = User.objects.filter(is_superuser=True).values_list('email', flat=True)
|
||||
if (used_percentage * 100) > 90:
|
||||
send_mail("Ansible Tower host usage over 90%",
|
||||
_("Ansible Tower host usage over 90%"),
|
||||
tower_admin_emails,
|
||||
fail_silently=True)
|
||||
if validation_info.get('date_warning', False):
|
||||
send_mail("Ansible Tower license will expire soon",
|
||||
_("Ansible Tower license will expire soon"),
|
||||
tower_admin_emails,
|
||||
fail_silently=True)
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
def purge_old_stdout_files():
|
||||
nowtime = time.time()
|
||||
@@ -1364,7 +1342,7 @@ class BaseTask(object):
|
||||
|
||||
ansible_runner.utils.dump_artifacts(params)
|
||||
isolated_manager_instance = isolated_manager.IsolatedManager(
|
||||
cancelled_callback=lambda: self.update_model(self.instance.pk).cancel_flag,
|
||||
canceled_callback=lambda: self.update_model(self.instance.pk).cancel_flag,
|
||||
check_callback=self.check_handler,
|
||||
pod_manager=pod_manager
|
||||
)
|
||||
@@ -1423,7 +1401,6 @@ class BaseTask(object):
|
||||
def deploy_container_group_pod(self, task):
|
||||
from awx.main.scheduler.kubernetes import PodManager # Avoid circular import
|
||||
pod_manager = PodManager(self.instance)
|
||||
self.cleanup_paths.append(pod_manager.kube_config)
|
||||
try:
|
||||
log_name = task.log_format
|
||||
logger.debug(f"Launching pod for {log_name}.")
|
||||
@@ -1452,7 +1429,7 @@ class BaseTask(object):
|
||||
self.update_model(task.pk, execution_node=pod_manager.pod_name)
|
||||
return pod_manager
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1763,14 +1740,16 @@ class RunJob(BaseTask):
|
||||
|
||||
project_path = job.project.get_project_path(check_if_exists=False)
|
||||
job_revision = job.project.scm_revision
|
||||
needs_sync = True
|
||||
sync_needs = []
|
||||
all_sync_needs = ['update_{}'.format(job.project.scm_type), 'install_roles', 'install_collections']
|
||||
if not job.project.scm_type:
|
||||
# manual projects are not synced, user has responsibility for that
|
||||
needs_sync = False
|
||||
pass # manual projects are not synced, user has responsibility for that
|
||||
elif not os.path.exists(project_path):
|
||||
logger.debug('Performing fresh clone of {} on this instance.'.format(job.project))
|
||||
sync_needs = all_sync_needs
|
||||
elif not job.project.scm_revision:
|
||||
logger.debug('Revision not known for {}, will sync with remote'.format(job.project))
|
||||
sync_needs = all_sync_needs
|
||||
elif job.project.scm_type == 'git':
|
||||
git_repo = git.Repo(project_path)
|
||||
try:
|
||||
@@ -1781,23 +1760,27 @@ class RunJob(BaseTask):
|
||||
if desired_revision == current_revision:
|
||||
job_revision = desired_revision
|
||||
logger.info('Skipping project sync for {} because commit is locally available'.format(job.log_format))
|
||||
needs_sync = False
|
||||
else:
|
||||
sync_needs = all_sync_needs
|
||||
except (ValueError, BadGitName):
|
||||
logger.debug('Needed commit for {} not in local source tree, will sync with remote'.format(job.log_format))
|
||||
sync_needs = all_sync_needs
|
||||
else:
|
||||
sync_needs = all_sync_needs
|
||||
# Galaxy requirements are not supported for manual projects
|
||||
if not needs_sync and job.project.scm_type:
|
||||
if not sync_needs and job.project.scm_type:
|
||||
# see if we need a sync because of presence of roles
|
||||
galaxy_req_path = os.path.join(project_path, 'roles', 'requirements.yml')
|
||||
if os.path.exists(galaxy_req_path):
|
||||
logger.debug('Running project sync for {} because of galaxy role requirements.'.format(job.log_format))
|
||||
needs_sync = True
|
||||
sync_needs.append('install_roles')
|
||||
|
||||
galaxy_collections_req_path = os.path.join(project_path, 'collections', 'requirements.yml')
|
||||
if os.path.exists(galaxy_collections_req_path):
|
||||
logger.debug('Running project sync for {} because of galaxy collections requirements.'.format(job.log_format))
|
||||
needs_sync = True
|
||||
sync_needs.append('install_collections')
|
||||
|
||||
if needs_sync:
|
||||
if sync_needs:
|
||||
pu_ig = job.instance_group
|
||||
pu_en = job.execution_node
|
||||
if job.is_isolated() is True:
|
||||
@@ -1807,6 +1790,7 @@ class RunJob(BaseTask):
|
||||
sync_metafields = dict(
|
||||
launch_type="sync",
|
||||
job_type='run',
|
||||
job_tags=','.join(sync_needs),
|
||||
status='running',
|
||||
instance_group = pu_ig,
|
||||
execution_node=pu_en,
|
||||
@@ -1814,6 +1798,8 @@ class RunJob(BaseTask):
|
||||
)
|
||||
if job.scm_branch and job.scm_branch != job.project.scm_branch:
|
||||
sync_metafields['scm_branch'] = job.scm_branch
|
||||
if 'update_' not in sync_metafields['job_tags']:
|
||||
sync_metafields['scm_revision'] = job_revision
|
||||
local_project_sync = job.project.create_project_update(_eager_fields=sync_metafields)
|
||||
# save the associated job before calling run() so that a
|
||||
# cancel() call on the job can cancel the project update
|
||||
@@ -1958,10 +1944,17 @@ class RunProjectUpdate(BaseTask):
|
||||
env['TMP'] = settings.AWX_PROOT_BASE_PATH
|
||||
env['PROJECT_UPDATE_ID'] = str(project_update.pk)
|
||||
env['ANSIBLE_CALLBACK_PLUGINS'] = self.get_path_to('..', 'plugins', 'callback')
|
||||
env['ANSIBLE_GALAXY_IGNORE'] = True
|
||||
# Set up the fallback server, which is the normal Ansible Galaxy by default
|
||||
galaxy_servers = list(settings.FALLBACK_GALAXY_SERVERS)
|
||||
# If private galaxy URL is non-blank, that means this feature is enabled
|
||||
if settings.GALAXY_IGNORE_CERTS:
|
||||
env['ANSIBLE_GALAXY_IGNORE'] = True
|
||||
# Set up the public Galaxy server, if enabled
|
||||
if settings.PUBLIC_GALAXY_ENABLED:
|
||||
galaxy_servers = [settings.PUBLIC_GALAXY_SERVER]
|
||||
else:
|
||||
galaxy_servers = []
|
||||
# Set up fallback Galaxy servers, if configured
|
||||
if settings.FALLBACK_GALAXY_SERVERS:
|
||||
galaxy_servers = settings.FALLBACK_GALAXY_SERVERS + galaxy_servers
|
||||
# Set up the primary Galaxy server, if configured
|
||||
if settings.PRIMARY_GALAXY_URL:
|
||||
galaxy_servers = [{'id': 'primary_galaxy'}] + galaxy_servers
|
||||
for key in GALAXY_SERVER_FIELDS:
|
||||
@@ -2031,8 +2024,8 @@ class RunProjectUpdate(BaseTask):
|
||||
args = []
|
||||
if getattr(settings, 'PROJECT_UPDATE_VVV', False):
|
||||
args.append('-vvv')
|
||||
else:
|
||||
args.append('-v')
|
||||
if project_update.job_tags:
|
||||
args.extend(['-t', project_update.job_tags])
|
||||
return args
|
||||
|
||||
def build_extra_vars_file(self, project_update, private_data_dir):
|
||||
@@ -2046,28 +2039,16 @@ class RunProjectUpdate(BaseTask):
|
||||
scm_branch = project_update.project.scm_revision
|
||||
elif not scm_branch:
|
||||
scm_branch = {'hg': 'tip'}.get(project_update.scm_type, 'HEAD')
|
||||
if project_update.job_type == 'check':
|
||||
roles_enabled = False
|
||||
collections_enabled = False
|
||||
else:
|
||||
roles_enabled = getattr(settings, 'AWX_ROLES_ENABLED', True)
|
||||
collections_enabled = getattr(settings, 'AWX_COLLECTIONS_ENABLED', True)
|
||||
# collections were introduced in Ansible version 2.8
|
||||
if Version(get_ansible_version()) <= Version('2.8'):
|
||||
collections_enabled = False
|
||||
extra_vars.update({
|
||||
'project_path': project_update.get_project_path(check_if_exists=False),
|
||||
'insights_url': settings.INSIGHTS_URL_BASE,
|
||||
'awx_license_type': get_license(show_key=False).get('license_type', 'UNLICENSED'),
|
||||
'awx_version': get_awx_version(),
|
||||
'scm_type': project_update.scm_type,
|
||||
'scm_url': scm_url,
|
||||
'scm_branch': scm_branch,
|
||||
'scm_clean': project_update.scm_clean,
|
||||
'scm_delete_on_update': project_update.scm_delete_on_update if project_update.job_type == 'check' else False,
|
||||
'scm_full_checkout': True if project_update.job_type == 'run' else False,
|
||||
'roles_enabled': roles_enabled,
|
||||
'collections_enabled': collections_enabled,
|
||||
'roles_enabled': settings.AWX_ROLES_ENABLED,
|
||||
'collections_enabled': settings.AWX_COLLECTIONS_ENABLED,
|
||||
})
|
||||
if project_update.job_type != 'check' and self.job_private_data_dir:
|
||||
extra_vars['collections_destination'] = os.path.join(self.job_private_data_dir, 'requirements_collections')
|
||||
@@ -2179,7 +2160,7 @@ class RunProjectUpdate(BaseTask):
|
||||
try:
|
||||
instance.refresh_from_db(fields=['cancel_flag'])
|
||||
if instance.cancel_flag:
|
||||
logger.debug("ProjectUpdate({0}) was cancelled".format(instance.pk))
|
||||
logger.debug("ProjectUpdate({0}) was canceled".format(instance.pk))
|
||||
return
|
||||
fcntl.lockf(self.lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
break
|
||||
@@ -2208,7 +2189,10 @@ class RunProjectUpdate(BaseTask):
|
||||
project_path = instance.project.get_project_path(check_if_exists=False)
|
||||
if os.path.exists(project_path):
|
||||
git_repo = git.Repo(project_path)
|
||||
self.original_branch = git_repo.active_branch
|
||||
if git_repo.head.is_detached:
|
||||
self.original_branch = git_repo.head.commit
|
||||
else:
|
||||
self.original_branch = git_repo.active_branch
|
||||
|
||||
@staticmethod
|
||||
def make_local_copy(project_path, destination_folder, scm_type, scm_revision):
|
||||
@@ -2240,26 +2224,29 @@ class RunProjectUpdate(BaseTask):
|
||||
copy_tree(project_path, destination_folder)
|
||||
|
||||
def post_run_hook(self, instance, status):
|
||||
if self.job_private_data_dir:
|
||||
# copy project folder before resetting to default branch
|
||||
# because some git-tree-specific resources (like submodules) might matter
|
||||
self.make_local_copy(
|
||||
instance.get_project_path(check_if_exists=False), os.path.join(self.job_private_data_dir, 'project'),
|
||||
instance.scm_type, self.playbook_new_revision
|
||||
)
|
||||
if self.original_branch:
|
||||
# for git project syncs, non-default branches can be problems
|
||||
# restore to branch the repo was on before this run
|
||||
try:
|
||||
self.original_branch.checkout()
|
||||
except Exception:
|
||||
# this could have failed due to dirty tree, but difficult to predict all cases
|
||||
logger.exception('Failed to restore project repo to prior state after {}'.format(instance.log_format))
|
||||
self.release_lock(instance)
|
||||
# To avoid hangs, very important to release lock even if errors happen here
|
||||
try:
|
||||
if self.playbook_new_revision:
|
||||
instance.scm_revision = self.playbook_new_revision
|
||||
instance.save(update_fields=['scm_revision'])
|
||||
if self.job_private_data_dir:
|
||||
# copy project folder before resetting to default branch
|
||||
# because some git-tree-specific resources (like submodules) might matter
|
||||
self.make_local_copy(
|
||||
instance.get_project_path(check_if_exists=False), os.path.join(self.job_private_data_dir, 'project'),
|
||||
instance.scm_type, instance.scm_revision
|
||||
)
|
||||
if self.original_branch:
|
||||
# for git project syncs, non-default branches can be problems
|
||||
# restore to branch the repo was on before this run
|
||||
try:
|
||||
self.original_branch.checkout()
|
||||
except Exception:
|
||||
# this could have failed due to dirty tree, but difficult to predict all cases
|
||||
logger.exception('Failed to restore project repo to prior state after {}'.format(instance.log_format))
|
||||
finally:
|
||||
self.release_lock(instance)
|
||||
p = instance.project
|
||||
if self.playbook_new_revision:
|
||||
instance.scm_revision = self.playbook_new_revision
|
||||
instance.save(update_fields=['scm_revision'])
|
||||
if instance.job_type == 'check' and status not in ('failed', 'canceled',):
|
||||
if self.playbook_new_revision:
|
||||
p.scm_revision = self.playbook_new_revision
|
||||
@@ -2354,6 +2341,27 @@ class RunInventoryUpdate(BaseTask):
|
||||
env[str(env_k)] = str(inventory_update.source_vars_dict[env_k])
|
||||
elif inventory_update.source == 'file':
|
||||
raise NotImplementedError('Cannot update file sources through the task system.')
|
||||
|
||||
if inventory_update.source == 'scm' and inventory_update.source_project_update:
|
||||
env_key = 'ANSIBLE_COLLECTIONS_PATHS'
|
||||
config_setting = 'collections_paths'
|
||||
folder = 'requirements_collections'
|
||||
default = '~/.ansible/collections:/usr/share/ansible/collections'
|
||||
|
||||
config_values = read_ansible_config(os.path.join(private_data_dir, 'project'), [config_setting])
|
||||
|
||||
paths = default.split(':')
|
||||
if env_key in env:
|
||||
for path in env[env_key].split(':'):
|
||||
if path not in paths:
|
||||
paths = [env[env_key]] + paths
|
||||
elif config_setting in config_values:
|
||||
for path in config_values[config_setting].split(':'):
|
||||
if path not in paths:
|
||||
paths = [config_values[config_setting]] + paths
|
||||
paths = [os.path.join(private_data_dir, folder)] + paths
|
||||
env[env_key] = os.pathsep.join(paths)
|
||||
|
||||
return env
|
||||
|
||||
def write_args_file(self, private_data_dir, args):
|
||||
@@ -2452,7 +2460,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
# Use the vendored script path
|
||||
inventory_path = self.get_path_to('..', 'plugins', 'inventory', injector.script_name)
|
||||
elif src == 'scm':
|
||||
inventory_path = inventory_update.get_actual_source_path()
|
||||
inventory_path = os.path.join(private_data_dir, 'project', inventory_update.source_path)
|
||||
elif src == 'custom':
|
||||
handle, inventory_path = tempfile.mkstemp(dir=private_data_dir)
|
||||
f = os.fdopen(handle, 'w')
|
||||
@@ -2473,7 +2481,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
'''
|
||||
src = inventory_update.source
|
||||
if src == 'scm' and inventory_update.source_project_update:
|
||||
return inventory_update.source_project_update.get_project_path(check_if_exists=False)
|
||||
return os.path.join(private_data_dir, 'project')
|
||||
if src in CLOUD_PROVIDERS:
|
||||
injector = None
|
||||
if src in InventorySource.injectors:
|
||||
@@ -2499,6 +2507,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
_eager_fields=dict(
|
||||
launch_type="sync",
|
||||
job_type='run',
|
||||
job_tags='update_{},install_collections'.format(source_project.scm_type), # roles are never valid for inventory
|
||||
status='running',
|
||||
execution_node=inventory_update.execution_node,
|
||||
instance_group = inventory_update.instance_group,
|
||||
@@ -2509,8 +2518,10 @@ class RunInventoryUpdate(BaseTask):
|
||||
|
||||
project_update_task = local_project_sync._get_task_class()
|
||||
try:
|
||||
project_update_task().run(local_project_sync.id)
|
||||
inventory_update.inventory_source.scm_last_revision = local_project_sync.project.scm_revision
|
||||
sync_task = project_update_task(job_private_data_dir=private_data_dir)
|
||||
sync_task.run(local_project_sync.id)
|
||||
local_project_sync.refresh_from_db()
|
||||
inventory_update.inventory_source.scm_last_revision = local_project_sync.scm_revision
|
||||
inventory_update.inventory_source.save(update_fields=['scm_last_revision'])
|
||||
except Exception:
|
||||
inventory_update = self.update_model(
|
||||
@@ -2518,6 +2529,13 @@ class RunInventoryUpdate(BaseTask):
|
||||
job_explanation=('Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' %
|
||||
('project_update', local_project_sync.name, local_project_sync.id)))
|
||||
raise
|
||||
elif inventory_update.source == 'scm' and inventory_update.launch_type == 'scm' and source_project:
|
||||
# This follows update, not sync, so make copy here
|
||||
project_path = source_project.get_project_path(check_if_exists=False)
|
||||
RunProjectUpdate.make_local_copy(
|
||||
project_path, os.path.join(private_data_dir, 'project'),
|
||||
source_project.scm_type, source_project.scm_revision
|
||||
)
|
||||
|
||||
|
||||
@task()
|
||||
@@ -2725,10 +2743,11 @@ class RunSystemJob(BaseTask):
|
||||
json_vars = {}
|
||||
else:
|
||||
json_vars = json.loads(system_job.extra_vars)
|
||||
if 'days' in json_vars:
|
||||
args.extend(['--days', str(json_vars.get('days', 60))])
|
||||
if 'dry_run' in json_vars and json_vars['dry_run']:
|
||||
args.extend(['--dry-run'])
|
||||
if system_job.job_type in ('cleanup_jobs', 'cleanup_activitystream'):
|
||||
if 'days' in json_vars:
|
||||
args.extend(['--days', str(json_vars.get('days', 60))])
|
||||
if 'dry_run' in json_vars and json_vars['dry_run']:
|
||||
args.extend(['--dry-run'])
|
||||
if system_job.job_type == 'cleanup_jobs':
|
||||
args.extend(['--jobs', '--project-updates', '--inventory-updates',
|
||||
'--management-jobs', '--ad-hoc-commands', '--workflow-jobs',
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
- name: Hello World Sample
|
||||
hosts: all
|
||||
tasks:
|
||||
- name: Hello Message
|
||||
debug:
|
||||
msg: "Hello World!"
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
- name: Hello World Sample
|
||||
hosts: all
|
||||
tasks:
|
||||
- name: Hello Message
|
||||
debug:
|
||||
msg: "Hello World!"
|
||||
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
---
|
||||
- hosts: all
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
---
|
||||
- import_playbook: foo
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
---
|
||||
- include: foo
|
||||
|
||||
@@ -3,6 +3,10 @@ conditional_groups:
|
||||
default_host_filters: []
|
||||
exclude_host_filters:
|
||||
- resource_group not in ['foo_resources', 'bar_resources']
|
||||
- '"Creator" not in tags.keys()'
|
||||
- tags["Creator"] != "jmarshall"
|
||||
- '"peanutbutter" not in tags.keys()'
|
||||
- tags["peanutbutter"] != "jelly"
|
||||
- location not in ['southcentralus', 'westus']
|
||||
fail_on_template_errors: false
|
||||
hostvar_expressions:
|
||||
|
||||
@@ -7,4 +7,5 @@ locations = southcentralus,westus
|
||||
base_source_var = value_of_var
|
||||
use_private_ip = True
|
||||
resource_groups = foo_resources,bar_resources
|
||||
tags = Creator:jmarshall, peanutbutter:jelly
|
||||
|
||||
|
||||
@@ -25,6 +25,8 @@ EXPECTED_VALUES = {
|
||||
'awx_custom_virtualenvs_total':0.0,
|
||||
'awx_running_jobs_total':0.0,
|
||||
'awx_instance_capacity':100.0,
|
||||
'awx_instance_consumed_capacity':0.0,
|
||||
'awx_instance_remaining_capacity':100.0,
|
||||
'awx_instance_cpu':0.0,
|
||||
'awx_instance_memory':0.0,
|
||||
'awx_instance_info':1.0,
|
||||
|
||||
@@ -1439,3 +1439,15 @@ def test_create_credential_with_invalid_url_xfail(post, organization, admin, url
|
||||
assert response.status_code == status
|
||||
if status != 201:
|
||||
assert response.data['inputs']['server_url'] == [msg]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_external_credential_rbac_test_endpoint(post, alice, external_credential):
|
||||
url = reverse('api:credential_external_test', kwargs={'pk': external_credential.pk})
|
||||
data = {'metadata': {'key': 'some_key'}}
|
||||
|
||||
external_credential.read_role.members.add(alice)
|
||||
assert post(url, data, alice).status_code == 403
|
||||
|
||||
external_credential.use_role.members.add(alice)
|
||||
assert post(url, data, alice).status_code == 202
|
||||
|
||||
@@ -85,14 +85,35 @@ def test_update_credential_type_in_use_xfail(patch, delete, admin):
|
||||
Credential(credential_type=_type, name='My Custom Cred').save()
|
||||
|
||||
url = reverse('api:credential_type_detail', kwargs={'pk': _type.pk})
|
||||
response = patch(url, {'name': 'Some Other Name'}, admin)
|
||||
assert response.status_code == 200
|
||||
patch(url, {'name': 'Some Other Name'}, admin, expect=200)
|
||||
|
||||
url = reverse('api:credential_type_detail', kwargs={'pk': _type.pk})
|
||||
response = patch(url, {'inputs': {}}, admin)
|
||||
assert response.status_code == 403
|
||||
response = patch(url, {'inputs': {}}, admin, expect=403)
|
||||
assert response.data['detail'] == 'Modifications to inputs are not allowed for credential types that are in use'
|
||||
|
||||
assert delete(url, admin).status_code == 403
|
||||
response = delete(url, admin, expect=403)
|
||||
assert response.data['detail'] == 'Credential types that are in use cannot be deleted'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_credential_type_unvalidated_inputs(post, patch, admin):
|
||||
simple_inputs = {'fields': [
|
||||
{'id': 'api_token', 'label': 'fooo'}
|
||||
]}
|
||||
response = post(
|
||||
url=reverse('api:credential_type_list'),
|
||||
data={'name': 'foo', 'kind': 'cloud', 'inputs': simple_inputs},
|
||||
user=admin,
|
||||
expect=201
|
||||
)
|
||||
# validation adds the type field to the input
|
||||
_type = CredentialType.objects.get(pk=response.data['id'])
|
||||
Credential(credential_type=_type, name='My Custom Cred').save()
|
||||
|
||||
# should not raise an error because we should only compare
|
||||
# post-validation values to other post-validation values
|
||||
url = reverse('api:credential_type_detail', kwargs={'pk': _type.id})
|
||||
patch(url, {'inputs': simple_inputs}, admin, expect=200)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -460,3 +481,12 @@ def test_create_with_undefined_template_variable_xfail(post, admin):
|
||||
}, admin)
|
||||
assert response.status_code == 400
|
||||
assert "'api_tolkien' is undefined" in json.dumps(response.data)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_credential_type_rbac_external_test(post, alice, admin, credentialtype_external):
|
||||
# only admins may use the credential type test endpoint
|
||||
url = reverse('api:credential_type_external_test', kwargs={'pk': credentialtype_external.pk})
|
||||
data = {'inputs': {}, 'metadata': {}}
|
||||
assert post(url, data, admin).status_code == 202
|
||||
assert post(url, data, alice).status_code == 403
|
||||
|
||||
45
awx/main/tests/functional/api/test_events.py
Normal file
45
awx/main/tests/functional/api/test_events.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import AdHocCommand, AdHocCommandEvent, JobEvent
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('truncate, expected', [
|
||||
(True, False),
|
||||
(False, True),
|
||||
])
|
||||
def test_job_events_sublist_truncation(get, organization_factory, job_template_factory, truncate, expected):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
jt = job_template_factory("jt", organization=objs.organization,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
job = jt.create_unified_job()
|
||||
JobEvent.create_from_data(job_id=job.pk, uuid='abc123', event='runner_on_start',
|
||||
stdout='a' * 1025)
|
||||
|
||||
url = reverse('api:job_job_events_list', kwargs={'pk': job.pk})
|
||||
if not truncate:
|
||||
url += '?no_truncate=1'
|
||||
|
||||
response = get(url, user=objs.superusers.admin, expect=200)
|
||||
assert (len(response.data['results'][0]['stdout']) == 1025) == expected
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('truncate, expected', [
|
||||
(True, False),
|
||||
(False, True),
|
||||
])
|
||||
def test_ad_hoc_events_sublist_truncation(get, organization_factory, job_template_factory, truncate, expected):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
adhoc = AdHocCommand()
|
||||
adhoc.save()
|
||||
AdHocCommandEvent.create_from_data(ad_hoc_command_id=adhoc.pk, uuid='abc123', event='runner_on_start',
|
||||
stdout='a' * 1025)
|
||||
|
||||
url = reverse('api:ad_hoc_command_ad_hoc_command_events_list', kwargs={'pk': adhoc.pk})
|
||||
if not truncate:
|
||||
url += '?no_truncate=1'
|
||||
|
||||
response = get(url, user=objs.superusers.admin, expect=200)
|
||||
assert (len(response.data['results'][0]['stdout']) == 1025) == expected
|
||||
@@ -117,3 +117,10 @@ def test_handle_content_type(post, admin):
|
||||
admin,
|
||||
content_type='text/html',
|
||||
expect=415)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_basic_not_found(get, admin_user):
|
||||
root_url = reverse('api:api_v2_root_view')
|
||||
r = get(root_url + 'fooooooo', user=admin_user, expect=404)
|
||||
assert r.data.get('detail') == 'The requested resource could not be found.'
|
||||
|
||||
@@ -45,6 +45,14 @@ def isolated_instance_group(instance_group, instance):
|
||||
return ig
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def containerized_instance_group(instance_group, kube_credential):
|
||||
ig = InstanceGroup(name="container")
|
||||
ig.credential = kube_credential
|
||||
ig.save()
|
||||
return ig
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def create_job_factory(job_factory, instance_group):
|
||||
def fn(status='running'):
|
||||
@@ -240,3 +248,47 @@ def test_instance_group_order_persistence(get, post, admin, source_model):
|
||||
resp = get(url, admin)
|
||||
assert resp.data['count'] == total
|
||||
assert [ig['name'] for ig in resp.data['results']] == [ig.name for ig in before]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_instance_group_update_fields(patch, instance, instance_group, admin, containerized_instance_group):
|
||||
# policy_instance_ variables can only be updated in instance groups that are NOT containerized
|
||||
# instance group (not containerized)
|
||||
ig_url = reverse("api:instance_group_detail", kwargs={'pk': instance_group.pk})
|
||||
assert not instance_group.is_containerized
|
||||
assert not containerized_instance_group.is_isolated
|
||||
resp = patch(ig_url, {'policy_instance_percentage':15}, admin, expect=200)
|
||||
assert 15 == resp.data['policy_instance_percentage']
|
||||
resp = patch(ig_url, {'policy_instance_minimum':15}, admin, expect=200)
|
||||
assert 15 == resp.data['policy_instance_minimum']
|
||||
resp = patch(ig_url, {'policy_instance_list':[instance.hostname]}, admin)
|
||||
assert [instance.hostname] == resp.data['policy_instance_list']
|
||||
|
||||
# containerized instance group
|
||||
cg_url = reverse("api:instance_group_detail", kwargs={'pk': containerized_instance_group.pk})
|
||||
assert containerized_instance_group.is_containerized
|
||||
assert not containerized_instance_group.is_isolated
|
||||
resp = patch(cg_url, {'policy_instance_percentage':15}, admin, expect=400)
|
||||
assert ["Containerized instances may not be managed via the API"] == resp.data['policy_instance_percentage']
|
||||
resp = patch(cg_url, {'policy_instance_minimum':15}, admin, expect=400)
|
||||
assert ["Containerized instances may not be managed via the API"] == resp.data['policy_instance_minimum']
|
||||
resp = patch(cg_url, {'policy_instance_list':[instance.hostname]}, admin)
|
||||
assert ["Containerized instances may not be managed via the API"] == resp.data['policy_instance_list']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_containerized_group_default_fields(instance_group, kube_credential):
|
||||
ig = InstanceGroup(name="test_policy_field_defaults")
|
||||
ig.policy_instance_list = [1]
|
||||
ig.policy_instance_minimum = 5
|
||||
ig.policy_instance_percentage = 5
|
||||
ig.save()
|
||||
assert ig.policy_instance_list == [1]
|
||||
assert ig.policy_instance_minimum == 5
|
||||
assert ig.policy_instance_percentage == 5
|
||||
ig.credential = kube_credential
|
||||
ig.save()
|
||||
assert ig.policy_instance_list == []
|
||||
assert ig.policy_instance_minimum == 0
|
||||
assert ig.policy_instance_percentage == 0
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import pytest
|
||||
import base64
|
||||
import contextlib
|
||||
import json
|
||||
from unittest import mock
|
||||
|
||||
from django.db import connection
|
||||
from django.test.utils import override_settings
|
||||
@@ -14,6 +16,18 @@ from awx.sso.models import UserEnterpriseAuth
|
||||
from oauth2_provider.models import RefreshToken
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def immediate_on_commit():
|
||||
"""
|
||||
Context manager executing transaction.on_commit() hooks immediately as
|
||||
if the connection was in auto-commit mode.
|
||||
"""
|
||||
def on_commit(func):
|
||||
func()
|
||||
with mock.patch('django.db.connection.on_commit', side_effect=on_commit) as patch:
|
||||
yield patch
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_personal_access_token_creation(oauth_application, post, alice):
|
||||
url = drf_reverse('api:oauth_authorization_root_view') + 'token/'
|
||||
@@ -54,6 +68,41 @@ def test_token_creation_disabled_for_external_accounts(oauth_application, post,
|
||||
assert AccessToken.objects.count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_existing_token_disabled_for_external_accounts(oauth_application, get, post, admin):
|
||||
UserEnterpriseAuth(user=admin, provider='radius').save()
|
||||
url = drf_reverse('api:oauth_authorization_root_view') + 'token/'
|
||||
with override_settings(RADIUS_SERVER='example.org', ALLOW_OAUTH2_FOR_EXTERNAL_USERS=True):
|
||||
resp = post(
|
||||
url,
|
||||
data='grant_type=password&username=admin&password=admin&scope=read',
|
||||
content_type='application/x-www-form-urlencoded',
|
||||
HTTP_AUTHORIZATION='Basic ' + smart_str(base64.b64encode(smart_bytes(':'.join([
|
||||
oauth_application.client_id, oauth_application.client_secret
|
||||
])))),
|
||||
status=201
|
||||
)
|
||||
token = json.loads(resp.content)['access_token']
|
||||
assert AccessToken.objects.count() == 1
|
||||
|
||||
with immediate_on_commit():
|
||||
resp = get(
|
||||
drf_reverse('api:user_me_list', kwargs={'version': 'v2'}),
|
||||
HTTP_AUTHORIZATION='Bearer ' + token,
|
||||
status=200
|
||||
)
|
||||
assert json.loads(resp.content)['results'][0]['username'] == 'admin'
|
||||
|
||||
with override_settings(RADIUS_SERVER='example.org', ALLOW_OAUTH2_FOR_EXTERNAL_USER=False):
|
||||
with immediate_on_commit():
|
||||
resp = get(
|
||||
drf_reverse('api:user_me_list', kwargs={'version': 'v2'}),
|
||||
HTTP_AUTHORIZATION='Bearer ' + token,
|
||||
status=401
|
||||
)
|
||||
assert b'To establish a login session' in resp.content
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_pat_creation_no_default_scope(oauth_application, post, admin):
|
||||
# tests that the default scope is overriden
|
||||
|
||||
@@ -0,0 +1,173 @@
|
||||
import json
|
||||
|
||||
from cryptography.fernet import InvalidToken
|
||||
from django.test.utils import override_settings
|
||||
from django.conf import settings
|
||||
import pytest
|
||||
|
||||
from awx.main import models
|
||||
from awx.conf.models import Setting
|
||||
from awx.main.management.commands import regenerate_secret_key
|
||||
from awx.main.utils.encryption import encrypt_field, decrypt_field, encrypt_value
|
||||
|
||||
|
||||
PREFIX = '$encrypted$UTF8$AESCBC$'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestKeyRegeneration:
|
||||
|
||||
def test_encrypted_ssh_password(self, credential):
|
||||
# test basic decryption
|
||||
assert credential.inputs['password'].startswith(PREFIX)
|
||||
assert credential.get_input('password') == 'secret'
|
||||
|
||||
# re-key the credential
|
||||
new_key = regenerate_secret_key.Command().handle()
|
||||
new_cred = models.Credential.objects.get(pk=credential.pk)
|
||||
assert credential.inputs['password'] != new_cred.inputs['password']
|
||||
|
||||
# verify that the old SECRET_KEY doesn't work
|
||||
with pytest.raises(InvalidToken):
|
||||
new_cred.get_input('password')
|
||||
|
||||
# verify that the new SECRET_KEY *does* work
|
||||
with override_settings(SECRET_KEY=new_key):
|
||||
assert new_cred.get_input('password') == 'secret'
|
||||
|
||||
def test_encrypted_setting_values(self):
|
||||
# test basic decryption
|
||||
settings.LOG_AGGREGATOR_PASSWORD = 'sensitive'
|
||||
s = Setting.objects.filter(key='LOG_AGGREGATOR_PASSWORD').first()
|
||||
assert s.value.startswith(PREFIX)
|
||||
assert settings.LOG_AGGREGATOR_PASSWORD == 'sensitive'
|
||||
|
||||
# re-key the setting value
|
||||
new_key = regenerate_secret_key.Command().handle()
|
||||
new_setting = Setting.objects.filter(key='LOG_AGGREGATOR_PASSWORD').first()
|
||||
assert s.value != new_setting.value
|
||||
|
||||
# wipe out the local cache so the value is pulled from the DB again
|
||||
settings.cache.delete('LOG_AGGREGATOR_PASSWORD')
|
||||
|
||||
# verify that the old SECRET_KEY doesn't work
|
||||
with pytest.raises(InvalidToken):
|
||||
settings.LOG_AGGREGATOR_PASSWORD
|
||||
|
||||
# verify that the new SECRET_KEY *does* work
|
||||
with override_settings(SECRET_KEY=new_key):
|
||||
assert settings.LOG_AGGREGATOR_PASSWORD == 'sensitive'
|
||||
|
||||
def test_encrypted_notification_secrets(self, notification_template_with_encrypt):
|
||||
# test basic decryption
|
||||
nt = notification_template_with_encrypt
|
||||
nc = nt.notification_configuration
|
||||
assert nc['token'].startswith(PREFIX)
|
||||
|
||||
Slack = nt.CLASS_FOR_NOTIFICATION_TYPE[nt.notification_type]
|
||||
class TestBackend(Slack):
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
assert kw['token'] == 'token'
|
||||
|
||||
def send_messages(self, messages):
|
||||
pass
|
||||
|
||||
nt.CLASS_FOR_NOTIFICATION_TYPE['test'] = TestBackend
|
||||
nt.notification_type = 'test'
|
||||
nt.send('Subject', 'Body')
|
||||
|
||||
# re-key the notification config
|
||||
new_key = regenerate_secret_key.Command().handle()
|
||||
new_nt = models.NotificationTemplate.objects.get(pk=nt.pk)
|
||||
assert nt.notification_configuration['token'] != new_nt.notification_configuration['token']
|
||||
|
||||
# verify that the old SECRET_KEY doesn't work
|
||||
with pytest.raises(InvalidToken):
|
||||
new_nt.CLASS_FOR_NOTIFICATION_TYPE['test'] = TestBackend
|
||||
new_nt.notification_type = 'test'
|
||||
new_nt.send('Subject', 'Body')
|
||||
|
||||
# verify that the new SECRET_KEY *does* work
|
||||
with override_settings(SECRET_KEY=new_key):
|
||||
new_nt.send('Subject', 'Body')
|
||||
|
||||
def test_job_start_args(self, job_factory):
|
||||
# test basic decryption
|
||||
job = job_factory()
|
||||
job.start_args = json.dumps({'foo': 'bar'})
|
||||
job.start_args = encrypt_field(job, field_name='start_args')
|
||||
job.save()
|
||||
assert job.start_args.startswith(PREFIX)
|
||||
|
||||
# re-key the start_args
|
||||
new_key = regenerate_secret_key.Command().handle()
|
||||
new_job = models.Job.objects.get(pk=job.pk)
|
||||
assert new_job.start_args != job.start_args
|
||||
|
||||
# verify that the old SECRET_KEY doesn't work
|
||||
with pytest.raises(InvalidToken):
|
||||
decrypt_field(new_job, field_name='start_args')
|
||||
|
||||
# verify that the new SECRET_KEY *does* work
|
||||
with override_settings(SECRET_KEY=new_key):
|
||||
assert json.loads(
|
||||
decrypt_field(new_job, field_name='start_args')
|
||||
) == {'foo': 'bar'}
|
||||
|
||||
@pytest.mark.parametrize('cls', ('JobTemplate', 'WorkflowJobTemplate'))
|
||||
def test_survey_spec(self, inventory, project, survey_spec_factory, cls):
|
||||
params = {}
|
||||
if cls == 'JobTemplate':
|
||||
params['inventory'] = inventory
|
||||
params['project'] = project
|
||||
# test basic decryption
|
||||
jt = getattr(models, cls).objects.create(
|
||||
name='Example Template',
|
||||
survey_spec=survey_spec_factory([{
|
||||
'variable': 'secret_key',
|
||||
'default': encrypt_value('donttell', pk=None),
|
||||
'type': 'password'
|
||||
}]),
|
||||
survey_enabled=True,
|
||||
**params
|
||||
)
|
||||
job = jt.create_unified_job()
|
||||
assert jt.survey_spec['spec'][0]['default'].startswith(PREFIX)
|
||||
assert job.survey_passwords == {'secret_key': '$encrypted$'}
|
||||
assert json.loads(job.decrypted_extra_vars())['secret_key'] == 'donttell'
|
||||
|
||||
# re-key the extra_vars
|
||||
new_key = regenerate_secret_key.Command().handle()
|
||||
new_job = models.UnifiedJob.objects.get(pk=job.pk)
|
||||
assert new_job.extra_vars != job.extra_vars
|
||||
|
||||
# verify that the old SECRET_KEY doesn't work
|
||||
with pytest.raises(InvalidToken):
|
||||
new_job.decrypted_extra_vars()
|
||||
|
||||
# verify that the new SECRET_KEY *does* work
|
||||
with override_settings(SECRET_KEY=new_key):
|
||||
assert json.loads(
|
||||
new_job.decrypted_extra_vars()
|
||||
)['secret_key'] == 'donttell'
|
||||
|
||||
def test_oauth2_application_client_secret(self, oauth_application):
|
||||
# test basic decryption
|
||||
secret = oauth_application.client_secret
|
||||
assert len(secret) == 128
|
||||
|
||||
# re-key the client_secret
|
||||
new_key = regenerate_secret_key.Command().handle()
|
||||
|
||||
# verify that the old SECRET_KEY doesn't work
|
||||
with pytest.raises(InvalidToken):
|
||||
models.OAuth2Application.objects.get(
|
||||
pk=oauth_application.pk
|
||||
).client_secret
|
||||
|
||||
# verify that the new SECRET_KEY *does* work
|
||||
with override_settings(SECRET_KEY=new_key):
|
||||
assert models.OAuth2Application.objects.get(
|
||||
pk=oauth_application.pk
|
||||
).client_secret == secret
|
||||
@@ -8,6 +8,8 @@ from unittest.mock import PropertyMock
|
||||
|
||||
# Django
|
||||
from django.urls import resolve
|
||||
from django.http import Http404
|
||||
from django.core.handlers.exception import response_for_exception
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.backends.sqlite3.base import SQLiteCursorWrapper
|
||||
@@ -120,6 +122,22 @@ def project_playbooks():
|
||||
mocked.start()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def run_computed_fields_right_away(request):
|
||||
|
||||
def run_me(inventory_id, should_update_hosts=True):
|
||||
i = Inventory.objects.get(id=inventory_id)
|
||||
i.update_computed_fields(update_hosts=should_update_hosts)
|
||||
|
||||
mocked = mock.patch(
|
||||
'awx.main.signals.update_inventory_computed_fields.delay',
|
||||
new=run_me
|
||||
)
|
||||
mocked.start()
|
||||
|
||||
request.addfinalizer(mocked.stop)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@mock.patch.object(Project, "update", lambda self, **kwargs: None)
|
||||
def project(instance, organization):
|
||||
@@ -278,14 +296,21 @@ def credentialtype_external():
|
||||
}],
|
||||
'required': ['url', 'token', 'key'],
|
||||
}
|
||||
external_type = CredentialType(
|
||||
kind='external',
|
||||
managed_by_tower=True,
|
||||
name='External Service',
|
||||
inputs=external_type_inputs
|
||||
)
|
||||
external_type.save()
|
||||
return external_type
|
||||
|
||||
class MockPlugin(object):
|
||||
def backend(self, **kwargs):
|
||||
return 'secret'
|
||||
|
||||
with mock.patch('awx.main.models.credential.CredentialType.plugin', new_callable=PropertyMock) as mock_plugin:
|
||||
mock_plugin.return_value = MockPlugin()
|
||||
external_type = CredentialType(
|
||||
kind='external',
|
||||
managed_by_tower=True,
|
||||
name='External Service',
|
||||
inputs=external_type_inputs
|
||||
)
|
||||
external_type.save()
|
||||
yield external_type
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -581,8 +606,12 @@ def _request(verb):
|
||||
if 'format' not in kwargs and 'content_type' not in kwargs:
|
||||
kwargs['format'] = 'json'
|
||||
|
||||
view, view_args, view_kwargs = resolve(urllib.parse.urlparse(url)[2])
|
||||
request = getattr(APIRequestFactory(), verb)(url, **kwargs)
|
||||
request_error = None
|
||||
try:
|
||||
view, view_args, view_kwargs = resolve(urllib.parse.urlparse(url)[2])
|
||||
except Http404 as e:
|
||||
request_error = e
|
||||
if isinstance(kwargs.get('cookies', None), dict):
|
||||
for key, value in kwargs['cookies'].items():
|
||||
request.COOKIES[key] = value
|
||||
@@ -591,7 +620,10 @@ def _request(verb):
|
||||
if user:
|
||||
force_authenticate(request, user=user)
|
||||
|
||||
response = view(request, *view_args, **view_kwargs)
|
||||
if not request_error:
|
||||
response = view(request, *view_args, **view_kwargs)
|
||||
else:
|
||||
response = response_for_exception(request, request_error)
|
||||
if middleware:
|
||||
middleware.process_response(request, response)
|
||||
if expect:
|
||||
|
||||
@@ -87,7 +87,10 @@ class TestJobNotificationMixin(object):
|
||||
'use_fact_cache': bool,
|
||||
'verbosity': int},
|
||||
'job_friendly_name': str,
|
||||
'job_summary_dict': str,
|
||||
'job_metadata': str,
|
||||
'approval_status': str,
|
||||
'approval_node_name': str,
|
||||
'workflow_url': str,
|
||||
'url': str}
|
||||
|
||||
|
||||
@@ -144,5 +147,3 @@ class TestJobNotificationMixin(object):
|
||||
|
||||
context_stub = JobNotificationMixin.context_stub()
|
||||
check_structure_and_completeness(TestJobNotificationMixin.CONTEXT_STRUCTURE, context_stub)
|
||||
|
||||
|
||||
|
||||
@@ -281,15 +281,18 @@ class TestTaskImpact:
|
||||
return job
|
||||
return r
|
||||
|
||||
def test_limit_task_impact(self, job_host_limit):
|
||||
def test_limit_task_impact(self, job_host_limit, run_computed_fields_right_away):
|
||||
job = job_host_limit(5, 2)
|
||||
job.inventory.refresh_from_db() # FIXME: computed fields operates on reloaded inventory
|
||||
assert job.inventory.total_hosts == 5
|
||||
assert job.task_impact == 2 + 1 # forks becomes constraint
|
||||
|
||||
def test_host_task_impact(self, job_host_limit):
|
||||
def test_host_task_impact(self, job_host_limit, run_computed_fields_right_away):
|
||||
job = job_host_limit(3, 5)
|
||||
job.inventory.refresh_from_db() # FIXME: computed fields operates on reloaded inventory
|
||||
assert job.task_impact == 3 + 1 # hosts becomes constraint
|
||||
|
||||
def test_shard_task_impact(self, slice_job_factory):
|
||||
def test_shard_task_impact(self, slice_job_factory, run_computed_fields_right_away):
|
||||
# factory creates on host per slice
|
||||
workflow_job = slice_job_factory(3, jt_kwargs={'forks': 50}, spawn=True)
|
||||
# arrange the jobs by their number
|
||||
@@ -308,4 +311,5 @@ class TestTaskImpact:
|
||||
len(jobs[0].inventory.get_script_data(slice_number=i + 1, slice_count=3)['all']['hosts'])
|
||||
for i in range(3)
|
||||
] == [2, 1, 1]
|
||||
jobs[0].inventory.refresh_from_db() # FIXME: computed fields operates on reloaded inventory
|
||||
assert [job.task_impact for job in jobs] == [3, 2, 2]
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import subprocess
|
||||
import yaml
|
||||
import base64
|
||||
|
||||
from unittest import mock # noqa
|
||||
@@ -51,6 +50,5 @@ def test_kubectl_ssl_verification(containerized_job):
|
||||
cred.inputs['ssl_ca_cert'] = cert.stdout
|
||||
cred.save()
|
||||
pm = PodManager(containerized_job)
|
||||
config = yaml.load(open(pm.kube_config), Loader=yaml.FullLoader)
|
||||
ca_data = config['clusters'][0]['cluster']['certificate-authority-data']
|
||||
ca_data = pm.kube_config['clusters'][0]['cluster']['certificate-authority-data']
|
||||
assert cert.stdout == base64.b64decode(ca_data.encode())
|
||||
|
||||
@@ -4,6 +4,7 @@ import json
|
||||
from datetime import timedelta
|
||||
|
||||
from awx.main.scheduler import TaskManager
|
||||
from awx.main.scheduler.dependency_graph import DependencyGraph
|
||||
from awx.main.utils import encrypt_field
|
||||
from awx.main.models import WorkflowJobTemplate, JobTemplate
|
||||
|
||||
@@ -326,3 +327,29 @@ def test_shared_dependencies_launch(default_instance_group, job_template_factory
|
||||
iu = [x for x in ii.inventory_updates.all()]
|
||||
assert len(pu) == 1
|
||||
assert len(iu) == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_not_blocking_project_update(default_instance_group, job_template_factory):
|
||||
objects = job_template_factory('jt', organization='org1', project='proj',
|
||||
inventory='inv', credential='cred',
|
||||
jobs=["job"])
|
||||
job = objects.jobs["job"]
|
||||
job.instance_group = default_instance_group
|
||||
job.status = "running"
|
||||
job.save()
|
||||
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
task_manager = TaskManager()
|
||||
task_manager._schedule()
|
||||
|
||||
proj = objects.project
|
||||
project_update = proj.create_project_update()
|
||||
project_update.instance_group = default_instance_group
|
||||
project_update.status = "pending"
|
||||
project_update.save()
|
||||
assert not task_manager.is_job_blocked(project_update)
|
||||
|
||||
dependency_graph = DependencyGraph(None)
|
||||
dependency_graph.add_job(job)
|
||||
assert not dependency_graph.is_job_blocked(project_update)
|
||||
|
||||
@@ -54,7 +54,8 @@ INI_TEST_VARS = {
|
||||
},
|
||||
'azure_rm': {
|
||||
'use_private_ip': True,
|
||||
'resource_groups': 'foo_resources,bar_resources'
|
||||
'resource_groups': 'foo_resources,bar_resources',
|
||||
'tags': 'Creator:jmarshall, peanutbutter:jelly'
|
||||
},
|
||||
'satellite6': {
|
||||
'satellite6_group_patterns': 'foo_group_patterns',
|
||||
@@ -264,6 +265,7 @@ def test_inventory_update_injected_content(this_kind, script_or_plugin, inventor
|
||||
assert envvars.pop('ANSIBLE_INVENTORY_ENABLED') == ('auto' if use_plugin else 'script')
|
||||
set_files = bool(os.getenv("MAKE_INVENTORY_REFERENCE_FILES", 'false').lower()[0] not in ['f', '0'])
|
||||
env, content = read_content(private_data_dir, envvars, inventory_update)
|
||||
env.pop('ANSIBLE_COLLECTIONS_PATHS', None) # collection paths not relevant to this test
|
||||
base_dir = os.path.join(DATA, script_or_plugin)
|
||||
if not os.path.exists(base_dir):
|
||||
os.mkdir(base_dir)
|
||||
|
||||
@@ -2,52 +2,10 @@ import pytest
|
||||
from unittest import mock
|
||||
|
||||
from awx.main.migrations import _inventory_source as invsrc
|
||||
from awx.main.models import InventorySource
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_inv_src_manual_removal(inventory_source):
|
||||
inventory_source.source = ''
|
||||
inventory_source.save()
|
||||
|
||||
assert InventorySource.objects.filter(pk=inventory_source.pk).exists()
|
||||
invsrc.remove_manual_inventory_sources(apps, None)
|
||||
assert not InventorySource.objects.filter(pk=inventory_source.pk).exists()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_rax_inv_src_removal(inventory_source):
|
||||
inventory_source.source = 'rax'
|
||||
inventory_source.save()
|
||||
|
||||
assert InventorySource.objects.filter(pk=inventory_source.pk).exists()
|
||||
invsrc.remove_rax_inventory_sources(apps, None)
|
||||
assert not InventorySource.objects.filter(pk=inventory_source.pk).exists()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_inv_src_rename(inventory_source_factory):
|
||||
inv_src01 = inventory_source_factory('t1')
|
||||
|
||||
invsrc.rename_inventory_sources(apps, None)
|
||||
|
||||
inv_src01.refresh_from_db()
|
||||
# inv-is-t1 is generated in the inventory_source_factory
|
||||
assert inv_src01.name == 't1 - inv-is-t1 - 0'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_azure_inv_src_removal(inventory_source):
|
||||
inventory_source.source = 'azure'
|
||||
inventory_source.save()
|
||||
|
||||
assert InventorySource.objects.filter(pk=inventory_source.pk).exists()
|
||||
invsrc.remove_azure_inventory_sources(apps, None)
|
||||
assert not InventorySource.objects.filter(pk=inventory_source.pk).exists()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('vars,id_var,result', [
|
||||
({'foo': {'bar': '1234'}}, 'foo.bar', '1234'),
|
||||
({'cat': 'meow'}, 'cat', 'meow'),
|
||||
|
||||
@@ -43,7 +43,7 @@ def test_basic_parameterization(get, post, user, organization):
|
||||
assert 'url' in response.data['notification_configuration']
|
||||
assert 'headers' in response.data['notification_configuration']
|
||||
assert 'messages' in response.data
|
||||
assert response.data['messages'] == {'started': None, 'success': None, 'error': None}
|
||||
assert response.data['messages'] == {'started': None, 'success': None, 'error': None, 'workflow_approval': None}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -19,6 +19,8 @@ from awx.main.models import (
|
||||
Credential
|
||||
)
|
||||
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
from crum import impersonate
|
||||
|
||||
|
||||
@@ -252,7 +254,8 @@ class TestJobRelaunchAccess:
|
||||
|
||||
assert 'job_var' in job.launch_config.extra_data
|
||||
assert bob.can_access(Job, 'start', job, validate_license=False)
|
||||
assert not alice.can_access(Job, 'start', job, validate_license=False)
|
||||
with pytest.raises(PermissionDenied):
|
||||
alice.can_access(Job, 'start', job, validate_license=False)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -7,6 +7,8 @@ from awx.main.access import (
|
||||
# WorkflowJobNodeAccess
|
||||
)
|
||||
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
from awx.main.models import InventorySource, JobLaunchConfig
|
||||
|
||||
|
||||
@@ -169,7 +171,8 @@ class TestWorkflowJobAccess:
|
||||
wfjt.ask_inventory_on_launch = True
|
||||
wfjt.save()
|
||||
JobLaunchConfig.objects.create(job=workflow_job, inventory=inventory)
|
||||
assert not WorkflowJobAccess(rando).can_start(workflow_job)
|
||||
with pytest.raises(PermissionDenied):
|
||||
WorkflowJobAccess(rando).can_start(workflow_job)
|
||||
inventory.use_role.members.add(rando)
|
||||
assert WorkflowJobAccess(rando).can_start(workflow_job)
|
||||
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
import pytest
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
from awx.main.models.base import PERM_INVENTORY_SCAN, PERM_INVENTORY_DEPLOY
|
||||
from awx.main.models import (
|
||||
JobTemplate,
|
||||
Project,
|
||||
Inventory,
|
||||
Organization,
|
||||
)
|
||||
|
||||
from awx.main.migrations._scan_jobs import _migrate_scan_job_templates
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def organizations():
|
||||
return [Organization.objects.create(name=u"org-\xe9-{}".format(x)) for x in range(3)]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventories(organizations):
|
||||
return [Inventory.objects.create(name=u"inv-\xe9-{}".format(x),
|
||||
organization=organizations[x]) for x in range(3)]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_templates_scan(inventories):
|
||||
return [JobTemplate.objects.create(name=u"jt-\xe9-scan-{}".format(x),
|
||||
job_type=PERM_INVENTORY_SCAN,
|
||||
inventory=inventories[x]) for x in range(3)]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_templates_deploy(inventories):
|
||||
return [JobTemplate.objects.create(name=u"jt-\xe9-deploy-{}".format(x),
|
||||
job_type=PERM_INVENTORY_DEPLOY,
|
||||
inventory=inventories[x]) for x in range(3)]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def project_custom(organizations):
|
||||
return Project.objects.create(name=u"proj-\xe9-scan_custom",
|
||||
scm_url='https://giggity.com',
|
||||
organization=organizations[0])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_templates_custom_scan_project(project_custom):
|
||||
return [JobTemplate.objects.create(name=u"jt-\xe9-scan-custom-{}".format(x),
|
||||
project=project_custom,
|
||||
job_type=PERM_INVENTORY_SCAN) for x in range(3)]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_template_scan_no_org():
|
||||
return JobTemplate.objects.create(name=u"jt-\xe9-scan-no-org",
|
||||
job_type=PERM_INVENTORY_SCAN)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_scan_jobs_migration(job_templates_scan, job_templates_deploy, job_templates_custom_scan_project, project_custom, job_template_scan_no_org):
|
||||
_migrate_scan_job_templates(apps)
|
||||
|
||||
# Ensure there are no scan job templates after the migration
|
||||
assert 0 == JobTemplate.objects.filter(job_type=PERM_INVENTORY_SCAN).count()
|
||||
|
||||
# Ensure special No Organization proj created
|
||||
# And No Organization project is associated with correct jt
|
||||
proj = Project.objects.get(name="Tower Fact Scan - No Organization")
|
||||
assert proj.id == JobTemplate.objects.get(id=job_template_scan_no_org.id).project.id
|
||||
|
||||
# Ensure per-org projects were created
|
||||
projs = Project.objects.filter(name__startswith="Tower Fact Scan")
|
||||
assert projs.count() == 4
|
||||
|
||||
# Ensure scan job templates with Tower project are migrated
|
||||
for i, jt_old in enumerate(job_templates_scan):
|
||||
jt = JobTemplate.objects.get(id=jt_old.id)
|
||||
assert PERM_INVENTORY_DEPLOY == jt.job_type
|
||||
assert jt.use_fact_cache is True
|
||||
assert projs[i] == jt.project
|
||||
|
||||
# Ensure scan job templates with custom projects are migrated
|
||||
for jt_old in job_templates_custom_scan_project:
|
||||
jt = JobTemplate.objects.get(id=jt_old.id)
|
||||
assert PERM_INVENTORY_DEPLOY == jt.job_type
|
||||
assert jt.use_fact_cache is True
|
||||
assert project_custom == jt.project
|
||||
|
||||
# Ensure other job template aren't touched
|
||||
for jt_old in job_templates_deploy:
|
||||
jt = JobTemplate.objects.get(id=jt_old.id)
|
||||
assert PERM_INVENTORY_DEPLOY == jt.job_type
|
||||
assert jt.project is None
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user