mirror of
https://github.com/ansible/awx.git
synced 2026-02-08 13:04:43 -03:30
Compare commits
919 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d645d0894a | ||
|
|
4575cae458 | ||
|
|
6982a8aee7 | ||
|
|
fa1091d089 | ||
|
|
c605705b39 | ||
|
|
ccc2a616c1 | ||
|
|
51184ba20d | ||
|
|
db33c0e4fa | ||
|
|
e9728f2a78 | ||
|
|
5cdf2f88da | ||
|
|
93e940adfc | ||
|
|
64776f97cf | ||
|
|
fc080732d4 | ||
|
|
d02364a833 | ||
|
|
176da040d9 | ||
|
|
f2b4d87152 | ||
|
|
17798edbc4 | ||
|
|
5e6ee4a371 | ||
|
|
288fea8960 | ||
|
|
dca9daf719 | ||
|
|
634504c7a1 | ||
|
|
c019d873b9 | ||
|
|
e4a21b67c7 | ||
|
|
2e6c484a50 | ||
|
|
f8b64f2222 | ||
|
|
6060b62acd | ||
|
|
0dcf6a2b1f | ||
|
|
452c1b53f7 | ||
|
|
42d2f72683 | ||
|
|
57e8ba7f3c | ||
|
|
c882cda586 | ||
|
|
784d18705c | ||
|
|
36996584f9 | ||
|
|
0160dbe8bc | ||
|
|
28994d4b0b | ||
|
|
9b09344bae | ||
|
|
84ba383199 | ||
|
|
6dcd87afec | ||
|
|
243ab58902 | ||
|
|
6c877a15e3 | ||
|
|
2ccf0a0004 | ||
|
|
c69db02762 | ||
|
|
59e1c6d492 | ||
|
|
35c27c8b16 | ||
|
|
91edac0d84 | ||
|
|
ae1bd9d1e9 | ||
|
|
cf168b27d2 | ||
|
|
8cb7b388dc | ||
|
|
171f0d6340 | ||
|
|
aff31ac02f | ||
|
|
a23754897e | ||
|
|
3094b67664 | ||
|
|
98d3f3dc8a | ||
|
|
6f2a07a7df | ||
|
|
54ac1905b3 | ||
|
|
1bdae2d1f7 | ||
|
|
2bc2e26cc7 | ||
|
|
5010602e6b | ||
|
|
c103a813bf | ||
|
|
e097bc61c8 | ||
|
|
2ea63eeca0 | ||
|
|
52336c0fe8 | ||
|
|
220354241b | ||
|
|
1ae8fdc15c | ||
|
|
4bbdce3478 | ||
|
|
d25e6249fd | ||
|
|
71d7bac261 | ||
|
|
acba5306c6 | ||
|
|
fca9245536 | ||
|
|
47031da65b | ||
|
|
b024d91c66 | ||
|
|
da7002cf0c | ||
|
|
f4f1762805 | ||
|
|
ad5857e06b | ||
|
|
12d735ec8f | ||
|
|
1e9173e8ef | ||
|
|
4809c40f3c | ||
|
|
4e9ec271c5 | ||
|
|
6cd6a42e20 | ||
|
|
f234c0f771 | ||
|
|
3f49d2c455 | ||
|
|
a0fb9bef3a | ||
|
|
ccaaee61f0 | ||
|
|
70269d9a0d | ||
|
|
ab6322a8f7 | ||
|
|
8bc6367e1e | ||
|
|
b74bf9f266 | ||
|
|
321aa3b01d | ||
|
|
7f1096f711 | ||
|
|
2b6cfd7b3d | ||
|
|
b2b33605cc | ||
|
|
d06b0de74b | ||
|
|
6dfc714c75 | ||
|
|
cf5d3d55f0 | ||
|
|
e91d383165 | ||
|
|
72d19b93a0 | ||
|
|
ff1c96b0e0 | ||
|
|
6aaf906594 | ||
|
|
da7baced50 | ||
|
|
2b10c0f3f2 | ||
|
|
01788263e2 | ||
|
|
8daceabd26 | ||
|
|
712b07c136 | ||
|
|
8fbfed5c55 | ||
|
|
c4a3c0aac1 | ||
|
|
365f897059 | ||
|
|
7b1158ee8e | ||
|
|
d8814b7162 | ||
|
|
9af3fa557b | ||
|
|
e0d8d35090 | ||
|
|
7e83ddc968 | ||
|
|
bbbacd62ae | ||
|
|
a6fd3d0c09 | ||
|
|
edf0d4bf85 | ||
|
|
5ab09686c9 | ||
|
|
4ed4d85b91 | ||
|
|
e066b688fc | ||
|
|
15111dd24a | ||
|
|
31a96d20ab | ||
|
|
9a70ac88c0 | ||
|
|
2ec5dda1d8 | ||
|
|
dab80fb842 | ||
|
|
a6404bdd0d | ||
|
|
ee5199f77a | ||
|
|
7f409c6487 | ||
|
|
491e4c709e | ||
|
|
480c8516ab | ||
|
|
9eda4efb74 | ||
|
|
a517b15c26 | ||
|
|
609528e8a3 | ||
|
|
e17ee4b58f | ||
|
|
3dc8a10e85 | ||
|
|
e893017e00 | ||
|
|
4a1c121792 | ||
|
|
d39ad9d9ce | ||
|
|
07a5e17284 | ||
|
|
583d1390d2 | ||
|
|
638f8eae21 | ||
|
|
1d7bd835e6 | ||
|
|
4f90406e91 | ||
|
|
53b4dd5dbf | ||
|
|
491f4824b0 | ||
|
|
91721e09df | ||
|
|
2828d31141 | ||
|
|
d10e727b3c | ||
|
|
f57cf03f4b | ||
|
|
b319f47048 | ||
|
|
432daa6139 | ||
|
|
835c26f6cb | ||
|
|
f1c2a95f0d | ||
|
|
58e84a40e5 | ||
|
|
9c04e08b4d | ||
|
|
bda1abab8d | ||
|
|
8356327c2b | ||
|
|
cafac2338d | ||
|
|
e5dfc62dce | ||
|
|
11edd43af3 | ||
|
|
27d0111a27 | ||
|
|
58367811a0 | ||
|
|
3277d3afe0 | ||
|
|
45136b6503 | ||
|
|
e9af6af97c | ||
|
|
f86d647571 | ||
|
|
f02357ca16 | ||
|
|
e64b087e9f | ||
|
|
5bb7e69a4d | ||
|
|
a8aed53c10 | ||
|
|
b19539069c | ||
|
|
0c0e172caf | ||
|
|
312cf13777 | ||
|
|
c6033399d0 | ||
|
|
85f118c17d | ||
|
|
0de805ac67 | ||
|
|
c7426fbff4 | ||
|
|
3cbd52a56e | ||
|
|
97a635ef49 | ||
|
|
155ed75f15 | ||
|
|
a664c5eabe | ||
|
|
8b23c6e19a | ||
|
|
a5d9bbb1e6 | ||
|
|
c262df0dfe | ||
|
|
3f113129a9 | ||
|
|
df7e034b96 | ||
|
|
bd8b3a4f74 | ||
|
|
d01088d33e | ||
|
|
0012602b30 | ||
|
|
8ecc1f37f0 | ||
|
|
0ab44e70f9 | ||
|
|
95c9e8e068 | ||
|
|
c49e64e62c | ||
|
|
00c9d756e8 | ||
|
|
16812542f8 | ||
|
|
0bcd1db239 | ||
|
|
9edbcdc7b0 | ||
|
|
9ab58e9757 | ||
|
|
1fae3534a1 | ||
|
|
a038f9fd78 | ||
|
|
ff1e1b2010 | ||
|
|
d6134fb194 | ||
|
|
570ffad52b | ||
|
|
1cf02e1e17 | ||
|
|
2f350cfda7 | ||
|
|
8e2622d117 | ||
|
|
7dd241fcff | ||
|
|
c6a28756f2 | ||
|
|
94eb1aacb8 | ||
|
|
ffb1707e74 | ||
|
|
5e797a5ad5 | ||
|
|
4c92e0af77 | ||
|
|
24b9a6a38d | ||
|
|
857683e548 | ||
|
|
4bf96362cc | ||
|
|
5001d3158d | ||
|
|
ce5bb9197e | ||
|
|
309e89e0f0 | ||
|
|
e27dbfcc0b | ||
|
|
7df448a348 | ||
|
|
e220e9d8d7 | ||
|
|
c8a29bac66 | ||
|
|
11d39bd8cc | ||
|
|
1376b8a149 | ||
|
|
750208b2da | ||
|
|
9d81b00772 | ||
|
|
c7be94c2f2 | ||
|
|
1adf5ee51d | ||
|
|
da998fb196 | ||
|
|
b559860c78 | ||
|
|
a31e2bdac1 | ||
|
|
62e4ebb85d | ||
|
|
aa4f5ccca9 | ||
|
|
fdddba18be | ||
|
|
ad89c5eea7 | ||
|
|
e15bb4de44 | ||
|
|
5f2e1c9705 | ||
|
|
8c5b0cbd57 | ||
|
|
73272e338b | ||
|
|
86ef81cebf | ||
|
|
cd18ec408c | ||
|
|
90c5efa336 | ||
|
|
4134d0b516 | ||
|
|
2123092bdc | ||
|
|
ba7b53b38e | ||
|
|
cac5417916 | ||
|
|
b318f24490 | ||
|
|
c2743d8678 | ||
|
|
60ca843b71 | ||
|
|
766f863655 | ||
|
|
e85fe6a3b7 | ||
|
|
0b190c2d0d | ||
|
|
c7d73c4583 | ||
|
|
7ad2c03480 | ||
|
|
9e44fea7b5 | ||
|
|
baf5bbc53a | ||
|
|
20c24eb275 | ||
|
|
5cf84ddb60 | ||
|
|
85781d0bc1 | ||
|
|
cbed525547 | ||
|
|
31c14b005c | ||
|
|
7574541037 | ||
|
|
e5184e0ed1 | ||
|
|
6282b5bacb | ||
|
|
038fd9271d | ||
|
|
8e26e4edd5 | ||
|
|
027e79b7f5 | ||
|
|
cf89108edf | ||
|
|
e06bf9f87e | ||
|
|
e87055095c | ||
|
|
e672e68a02 | ||
|
|
263c44a09b | ||
|
|
08839e1381 | ||
|
|
faffbc3e65 | ||
|
|
8e296bbf8c | ||
|
|
03d59e1616 | ||
|
|
9efa7b84df | ||
|
|
8e1f7695b1 | ||
|
|
d6adab576f | ||
|
|
a803cedd7c | ||
|
|
d5bdf554f1 | ||
|
|
8f75382b81 | ||
|
|
b93164e1ed | ||
|
|
31bdde00c9 | ||
|
|
ed52e8348b | ||
|
|
008fe42b4d | ||
|
|
d9dbbe6748 | ||
|
|
16ebfe3a63 | ||
|
|
08df2cad68 | ||
|
|
ca039f5338 | ||
|
|
4b83bda306 | ||
|
|
7fc4e8d20a | ||
|
|
4c697ae477 | ||
|
|
844b8a803f | ||
|
|
7fe32ab607 | ||
|
|
cc27c95187 | ||
|
|
9745bfbdb4 | ||
|
|
d6708b2b59 | ||
|
|
c202574ae3 | ||
|
|
7efacb69aa | ||
|
|
a076e84a33 | ||
|
|
bc6648b518 | ||
|
|
ff67d65065 | ||
|
|
aab8495998 | ||
|
|
0f1a92bd51 | ||
|
|
f10dc16014 | ||
|
|
bbc4ec48b9 | ||
|
|
fff8664219 | ||
|
|
bc8f5ad015 | ||
|
|
9fade47bbf | ||
|
|
5ad922a861 | ||
|
|
f34bd632d8 | ||
|
|
d239d55d2a | ||
|
|
d9ad906167 | ||
|
|
d40ab38745 | ||
|
|
8acd4376d9 | ||
|
|
a7a194296c | ||
|
|
166635ac79 | ||
|
|
f10296b1b7 | ||
|
|
e0e9c8321b | ||
|
|
52b145dbf6 | ||
|
|
0594bdf650 | ||
|
|
ba9758ccc7 | ||
|
|
02b13fd4ae | ||
|
|
06c62c4861 | ||
|
|
132555485c | ||
|
|
f1a9e68985 | ||
|
|
c09039e963 | ||
|
|
85c99cc38a | ||
|
|
576ff1007e | ||
|
|
922e779a86 | ||
|
|
8bda048e6d | ||
|
|
093bf6877b | ||
|
|
d59d8562db | ||
|
|
c566c332f9 | ||
|
|
cb4a3a799e | ||
|
|
5a5b46aea0 | ||
|
|
85909c4264 | ||
|
|
9d2c877143 | ||
|
|
8e94c0686a | ||
|
|
52447f59c1 | ||
|
|
04eed02428 | ||
|
|
b45b9333e1 | ||
|
|
62659aefc2 | ||
|
|
a52ccd1086 | ||
|
|
bd9a196ef9 | ||
|
|
64b04e6347 | ||
|
|
15e70d2173 | ||
|
|
b981f3eed6 | ||
|
|
2c1c2f452d | ||
|
|
cf1c9a0559 | ||
|
|
ed3f49a69d | ||
|
|
74b398f920 | ||
|
|
ae0c9ead40 | ||
|
|
d6a0f929a8 | ||
|
|
f5358f748e | ||
|
|
c9e889ca82 | ||
|
|
f502fbfad6 | ||
|
|
b8fe3f648e | ||
|
|
8d3ecf708b | ||
|
|
9289ade1ec | ||
|
|
958c8a4177 | ||
|
|
59413e0a8f | ||
|
|
ad1e7c46c3 | ||
|
|
8fabb1f10d | ||
|
|
895c71f62c | ||
|
|
32a57e9a97 | ||
|
|
584777e21e | ||
|
|
61a756c59d | ||
|
|
b547a8c3ca | ||
|
|
007f33c186 | ||
|
|
aab1cd68b0 | ||
|
|
92cc9a9213 | ||
|
|
b9c675e3a2 | ||
|
|
bd5003ca98 | ||
|
|
d3b0edf75a | ||
|
|
9421781cc7 | ||
|
|
a9059edc65 | ||
|
|
7850e3a835 | ||
|
|
34d02011db | ||
|
|
353692a0ba | ||
|
|
90451e551d | ||
|
|
2457926f0a | ||
|
|
cf27ac295a | ||
|
|
7e40673dd0 | ||
|
|
daa6f35d02 | ||
|
|
cdcf2fa4c2 | ||
|
|
275765b8fc | ||
|
|
2786395808 | ||
|
|
731982c736 | ||
|
|
d2214acd6d | ||
|
|
9d593f0715 | ||
|
|
9e778b24c7 | ||
|
|
bdd28bcb3b | ||
|
|
19a6c70858 | ||
|
|
393474f33d | ||
|
|
b41e6394c5 | ||
|
|
f36f10a702 | ||
|
|
fccd6a2286 | ||
|
|
ea2312259f | ||
|
|
0e2b7767f5 | ||
|
|
82505cd43a | ||
|
|
5b17ce5729 | ||
|
|
82b313c767 | ||
|
|
fec67a3545 | ||
|
|
202af079eb | ||
|
|
f78d7637a4 | ||
|
|
71bd257191 | ||
|
|
82064eb4dc | ||
|
|
bbd625f3aa | ||
|
|
3725ccb43b | ||
|
|
8b22c86b10 | ||
|
|
e6a5d18ebe | ||
|
|
eca191f7f5 | ||
|
|
35fe127891 | ||
|
|
db1ad2de95 | ||
|
|
ac12a9cfe1 | ||
|
|
dacda644ac | ||
|
|
cfa407e001 | ||
|
|
329630ce2a | ||
|
|
1122d28a1b | ||
|
|
a9b299cd98 | ||
|
|
6c1488ed00 | ||
|
|
1f62d223a2 | ||
|
|
da23c4e949 | ||
|
|
6d00d43273 | ||
|
|
77b68e0eb7 | ||
|
|
945d100302 | ||
|
|
c0fd70f189 | ||
|
|
ba4e79fd3a | ||
|
|
db0bd471c3 | ||
|
|
616fe285fa | ||
|
|
b4b2cf76f6 | ||
|
|
4aeda635ff | ||
|
|
7e8c00ee24 | ||
|
|
27c4e35ee4 | ||
|
|
80a17987ff | ||
|
|
10a6a29a07 | ||
|
|
b80eafe4a1 | ||
|
|
6c443a0a6a | ||
|
|
55378c635e | ||
|
|
a4047e414f | ||
|
|
d549877ebd | ||
|
|
28a119ca96 | ||
|
|
758529d7dd | ||
|
|
075d1a2521 | ||
|
|
69924c9544 | ||
|
|
b858001c8f | ||
|
|
82be87566f | ||
|
|
52b8b7676a | ||
|
|
204c05aa3b | ||
|
|
ac34b24868 | ||
|
|
ffe89820e3 | ||
|
|
062c4908c9 | ||
|
|
b6b70e55fb | ||
|
|
6aa6471b7c | ||
|
|
e14d4ddec6 | ||
|
|
84dcda0a61 | ||
|
|
df24f5d28f | ||
|
|
fea7f914d2 | ||
|
|
d4c8167b1b | ||
|
|
a4873d97d8 | ||
|
|
efe4ea6575 | ||
|
|
b415c31b4f | ||
|
|
e91462d085 | ||
|
|
e85ff83be6 | ||
|
|
d500c1bb40 | ||
|
|
885841caea | ||
|
|
f7396cf81a | ||
|
|
286da3a7eb | ||
|
|
40b03eb6ef | ||
|
|
c76c531b7a | ||
|
|
75d3359b6f | ||
|
|
4ad5054222 | ||
|
|
aa34984d7c | ||
|
|
08594682a4 | ||
|
|
d73abda5d1 | ||
|
|
3bc91f123e | ||
|
|
41ba5c0968 | ||
|
|
e8e3a601b2 | ||
|
|
b96c03e456 | ||
|
|
5e9448a854 | ||
|
|
6b17e86f30 | ||
|
|
00337990db | ||
|
|
1a33ae61a7 | ||
|
|
5f7bfaa20a | ||
|
|
178a2c7c49 | ||
|
|
58e5f02129 | ||
|
|
dd6c97ed87 | ||
|
|
7aa424b210 | ||
|
|
e0a363beb8 | ||
|
|
48eb502161 | ||
|
|
151de89c26 | ||
|
|
f5c151d5c4 | ||
|
|
17b34b1e36 | ||
|
|
ee1d118752 | ||
|
|
245931f603 | ||
|
|
095aa77857 | ||
|
|
bb1397a3d4 | ||
|
|
5848f0360a | ||
|
|
83fc2187cc | ||
|
|
4dba9916dc | ||
|
|
8836ed44ce | ||
|
|
992c414737 | ||
|
|
66a8186995 | ||
|
|
fa15696ffe | ||
|
|
82a0dc0024 | ||
|
|
d4b20b7340 | ||
|
|
c0ad5a7768 | ||
|
|
d9ac291115 | ||
|
|
6b86cf6e86 | ||
|
|
771ef275d4 | ||
|
|
2310413dc0 | ||
|
|
edb9d6b16c | ||
|
|
7973a18103 | ||
|
|
747a2283d6 | ||
|
|
9d269d59d6 | ||
|
|
b0c530402f | ||
|
|
50a54c9214 | ||
|
|
8f97dbf781 | ||
|
|
a7a99ed141 | ||
|
|
d6116490c6 | ||
|
|
ff8e896b0f | ||
|
|
fc70d8b321 | ||
|
|
a61306580a | ||
|
|
afe38b8e68 | ||
|
|
505dcf9dd2 | ||
|
|
1d2123a4f9 | ||
|
|
4adf9bab67 | ||
|
|
adac87adf2 | ||
|
|
7dd8e35e8c | ||
|
|
554a63d8fc | ||
|
|
da149d931c | ||
|
|
3182197287 | ||
|
|
9ed4e1682d | ||
|
|
5aa6a94710 | ||
|
|
96689f45c8 | ||
|
|
ce6a276e1f | ||
|
|
8eb1484129 | ||
|
|
1ddf9fd1ed | ||
|
|
17a8e08d93 | ||
|
|
f835c8650b | ||
|
|
aa5a4d42c7 | ||
|
|
57fd6b7280 | ||
|
|
7eb7aad491 | ||
|
|
e2b8adcd09 | ||
|
|
13450fdbf9 | ||
|
|
6be2d84adb | ||
|
|
d2a5af44de | ||
|
|
75bb7aae14 | ||
|
|
98619c5e23 | ||
|
|
35afa37417 | ||
|
|
2f0f692f4a | ||
|
|
5271c993ac | ||
|
|
38112bae22 | ||
|
|
30a6efdb93 | ||
|
|
bffc1bfdd4 | ||
|
|
a7bf31d423 | ||
|
|
1ae1011ccb | ||
|
|
83183cd7ce | ||
|
|
934d7d62ef | ||
|
|
5dce6258e6 | ||
|
|
87f6065a05 | ||
|
|
4ca0d8c72a | ||
|
|
ba8bd25da2 | ||
|
|
df0bd0797c | ||
|
|
3aa7ee8d17 | ||
|
|
aeaab41120 | ||
|
|
1eb61ba5ce | ||
|
|
91d0c47120 | ||
|
|
b96b69360f | ||
|
|
b034295c99 | ||
|
|
c6e47a0a16 | ||
|
|
ca782a495d | ||
|
|
71c625bd83 | ||
|
|
5c2bc09f9d | ||
|
|
58c4ae6a00 | ||
|
|
37509af868 | ||
|
|
41f649b5a2 | ||
|
|
5ef995cd7d | ||
|
|
a32981242b | ||
|
|
c821996051 | ||
|
|
a37a18c0bf | ||
|
|
b11374157d | ||
|
|
2d6743635e | ||
|
|
590341ed7d | ||
|
|
ae980b9a82 | ||
|
|
94b557d8aa | ||
|
|
5dc9ca222f | ||
|
|
79166209ee | ||
|
|
45a8992254 | ||
|
|
def043c383 | ||
|
|
7b6dc13078 | ||
|
|
ee2709a898 | ||
|
|
d9d56b4b50 | ||
|
|
7580f9c2b9 | ||
|
|
d4f4983a89 | ||
|
|
24cedcc560 | ||
|
|
64014faf02 | ||
|
|
9b228d7d2d | ||
|
|
1349449e1e | ||
|
|
185e9a09e0 | ||
|
|
623e0f7cc9 | ||
|
|
8569bf71af | ||
|
|
6471481d02 | ||
|
|
139703aafb | ||
|
|
65aeb2b68a | ||
|
|
0b26297177 | ||
|
|
3e8de196ac | ||
|
|
b14323c985 | ||
|
|
2e04969f17 | ||
|
|
2edca4f357 | ||
|
|
d192297987 | ||
|
|
0bb7e06761 | ||
|
|
4213ca1424 | ||
|
|
3db3430129 | ||
|
|
f7592f6ae7 | ||
|
|
ac82751dfa | ||
|
|
f46db65bad | ||
|
|
d0ffc3f626 | ||
|
|
e9ec80d86b | ||
|
|
56723c3203 | ||
|
|
fdbafe42ab | ||
|
|
7c0554bf7b | ||
|
|
49df11d478 | ||
|
|
5ef7003395 | ||
|
|
640e528fdc | ||
|
|
06e09550af | ||
|
|
74c7c7b532 | ||
|
|
62bc1a8662 | ||
|
|
9469bbc06f | ||
|
|
7cdde96c3c | ||
|
|
bc2a63c415 | ||
|
|
9c6c9c3708 | ||
|
|
6a2e3d2915 | ||
|
|
85977be23c | ||
|
|
3855393cd3 | ||
|
|
bd6e5c2529 | ||
|
|
8a5914affd | ||
|
|
979adfd16c | ||
|
|
5f2381e9ad | ||
|
|
99027e4b30 | ||
|
|
150c0a9fdc | ||
|
|
2c825b792f | ||
|
|
2f9b0733bb | ||
|
|
962668389a | ||
|
|
0699e44b53 | ||
|
|
788a2e5fc8 | ||
|
|
16e6b3f148 | ||
|
|
6140308675 | ||
|
|
518a25430d | ||
|
|
b6ffde75ef | ||
|
|
8d44ab55f1 | ||
|
|
6e9804b713 | ||
|
|
94fa745859 | ||
|
|
eacc7b8eb0 | ||
|
|
73c3b8849b | ||
|
|
bb474b0797 | ||
|
|
c94ebba0b3 | ||
|
|
af90a78df5 | ||
|
|
de68de7f9a | ||
|
|
04ba7aaf89 | ||
|
|
454f76c066 | ||
|
|
9b281bbc8a | ||
|
|
870b76dc59 | ||
|
|
f2676064fd | ||
|
|
4b62f4845a | ||
|
|
bc6edf7af3 | ||
|
|
3dd69a06e7 | ||
|
|
cb8c9567b0 | ||
|
|
d30d51d72c | ||
|
|
693e588a25 | ||
|
|
0f42782feb | ||
|
|
90cac2ec35 | ||
|
|
8343552dfc | ||
|
|
c229e586da | ||
|
|
778b306208 | ||
|
|
415592219c | ||
|
|
3a7756393e | ||
|
|
1a7148dc80 | ||
|
|
d42ffd7353 | ||
|
|
9f8d975a19 | ||
|
|
955bb4a44c | ||
|
|
71511b66ac | ||
|
|
e97fc54deb | ||
|
|
ee27313b42 | ||
|
|
439727f1bd | ||
|
|
76325eefd3 | ||
|
|
4b8a06801c | ||
|
|
38b506bb94 | ||
|
|
61f6e3c4d2 | ||
|
|
640e5391f3 | ||
|
|
1316ace475 | ||
|
|
3282caf629 | ||
|
|
b3b53a8ce4 | ||
|
|
8dd4379bf2 | ||
|
|
b79c686336 | ||
|
|
d08c601690 | ||
|
|
86f8d648cc | ||
|
|
34bdb6d1c3 | ||
|
|
9548c8ae19 | ||
|
|
e147869d75 | ||
|
|
362339d89c | ||
|
|
ebbcefd7df | ||
|
|
6a9940c027 | ||
|
|
8c755dd316 | ||
|
|
fcfd59ebe2 | ||
|
|
ce1d9793ce | ||
|
|
f35ad41e17 | ||
|
|
d52aa11422 | ||
|
|
c628a54c79 | ||
|
|
5d840af223 | ||
|
|
1f149bb086 | ||
|
|
508535be66 | ||
|
|
cb69cac62d | ||
|
|
3ea4a32940 | ||
|
|
d6f93737c4 | ||
|
|
eb0c4fd4d4 | ||
|
|
36571a1275 | ||
|
|
b2b475d1a6 | ||
|
|
01177f3632 | ||
|
|
ac530e1328 | ||
|
|
9605d8049d | ||
|
|
d2e335c7c5 | ||
|
|
678fba1ffb | ||
|
|
34e1b8be1d | ||
|
|
86029934ad | ||
|
|
ec3edb07e8 | ||
|
|
fcc61a5752 | ||
|
|
860715d088 | ||
|
|
e2be392f31 | ||
|
|
f49d566f17 | ||
|
|
83e413b0bf | ||
|
|
e68349b6b5 | ||
|
|
14cc203945 | ||
|
|
ca5de6378a | ||
|
|
1ebe91cbf7 | ||
|
|
154cda7501 | ||
|
|
badba581fd | ||
|
|
a6a50f0eb1 | ||
|
|
b00fc29cdc | ||
|
|
13f628f73d | ||
|
|
e1bdbeaa5c | ||
|
|
b3c264bf21 | ||
|
|
53992d41d5 | ||
|
|
686d4fe26f | ||
|
|
774a4e32cc | ||
|
|
22441d280e | ||
|
|
fe850c247f | ||
|
|
c651029cdb | ||
|
|
54d50d71ab | ||
|
|
ef4f1df9bb | ||
|
|
b5fa1606bd | ||
|
|
3139bc9248 | ||
|
|
a6c0793695 | ||
|
|
5a24e223b7 | ||
|
|
e3c1189f56 | ||
|
|
fdf9dd733b | ||
|
|
9697e1befb | ||
|
|
84a8559ea0 | ||
|
|
8ac8fb9016 | ||
|
|
01bb32ebb0 | ||
|
|
711c240baf | ||
|
|
291528d823 | ||
|
|
1406ea3026 | ||
|
|
e8581f6892 | ||
|
|
fbf182de28 | ||
|
|
5fab9e418b | ||
|
|
d39b931377 | ||
|
|
9028afab07 | ||
|
|
d3b413c125 | ||
|
|
ab1db04164 | ||
|
|
3b89e894db | ||
|
|
bdbbb2a4a2 | ||
|
|
9e8d0758c8 | ||
|
|
9f0657e19a | ||
|
|
6bc09028ca | ||
|
|
e1c7cd7e9f | ||
|
|
806648af89 | ||
|
|
3e3940efd5 | ||
|
|
662033db44 | ||
|
|
eba69142f1 | ||
|
|
8a29276a7d | ||
|
|
611f163289 | ||
|
|
113622c05e | ||
|
|
608567795d | ||
|
|
29fd399b06 | ||
|
|
04f458c007 | ||
|
|
38a7d62745 | ||
|
|
53925f5e98 | ||
|
|
2474a3a2ea | ||
|
|
900fcbf87e | ||
|
|
846e67ee6a | ||
|
|
98daee4823 | ||
|
|
5ed97e0f65 | ||
|
|
e854b179e4 | ||
|
|
26e320582a | ||
|
|
ee864b2df3 | ||
|
|
e309ad25e4 | ||
|
|
55244834a3 | ||
|
|
4e1fbb3e91 | ||
|
|
fe43bab174 | ||
|
|
ed692018cd | ||
|
|
311860e027 | ||
|
|
b5225bd80d | ||
|
|
60fc952716 | ||
|
|
9d93cf8021 | ||
|
|
fe0db4e329 | ||
|
|
3c0a0e1f4a | ||
|
|
70057bc0f2 | ||
|
|
b2e8e3cc3d | ||
|
|
804ec0dde9 | ||
|
|
3e6131c509 | ||
|
|
e7bb5ac3e4 | ||
|
|
f2ccce3478 | ||
|
|
036567817e | ||
|
|
ec1e93cc69 | ||
|
|
04ab736f09 | ||
|
|
ffc6e2218e | ||
|
|
253e0765bd | ||
|
|
2d78534223 | ||
|
|
34645523fd | ||
|
|
90c7514303 | ||
|
|
3c22f99234 | ||
|
|
d8fbf1e21a | ||
|
|
168e03ea0e | ||
|
|
3eecda4edc | ||
|
|
c0d9600b66 | ||
|
|
087b68aa65 | ||
|
|
83ee4fb289 | ||
|
|
51a724451c | ||
|
|
6309c0a426 | ||
|
|
14ef06854d | ||
|
|
ad2e58cd43 | ||
|
|
2ea280bbaf | ||
|
|
788c5d3741 | ||
|
|
0e5abb5fa3 | ||
|
|
5dbcafc392 | ||
|
|
e53c979344 | ||
|
|
2527a78874 | ||
|
|
871f2cf9c5 | ||
|
|
f3dc4abe37 | ||
|
|
7646185e2c | ||
|
|
a9a1c6eb6d | ||
|
|
4b1706401f | ||
|
|
8192b79b1f | ||
|
|
8868fa6416 | ||
|
|
71215d3d03 | ||
|
|
5193209ca7 | ||
|
|
d73dd02e5b | ||
|
|
e23c1477da | ||
|
|
19d6941034 | ||
|
|
d4b2cacb3e | ||
|
|
2f7476c804 | ||
|
|
39ee60a913 | ||
|
|
da43b9b84c | ||
|
|
eaf3a28d57 | ||
|
|
14f8ef4f44 | ||
|
|
ee47e98c50 | ||
|
|
edb7ddb9ae | ||
|
|
f69f43e3ba | ||
|
|
d9a7859a05 | ||
|
|
ff32a5286e | ||
|
|
629e2e89b9 | ||
|
|
ea015190de | ||
|
|
6762702868 | ||
|
|
3e97608914 | ||
|
|
a95394b135 | ||
|
|
276b577103 | ||
|
|
b1666f2692 | ||
|
|
1f8f4e184b | ||
|
|
844f3fde72 | ||
|
|
a3a5db1c44 | ||
|
|
28630cb7fa | ||
|
|
820605b0ca | ||
|
|
bc79093102 | ||
|
|
eb2fc80114 | ||
|
|
eb2de51f86 | ||
|
|
7aa94a9bb5 | ||
|
|
be6f5e18ae | ||
|
|
9777b79818 | ||
|
|
25aa9bc43e | ||
|
|
a79de2b4ed | ||
|
|
7480baf256 | ||
|
|
5babab7af4 | ||
|
|
4e73f4b778 | ||
|
|
4f05955724 | ||
|
|
ebc369bfef | ||
|
|
10d53637ad | ||
|
|
23a7151278 | ||
|
|
0254cf3567 | ||
|
|
f91a02a9e4 | ||
|
|
93b794eaa7 | ||
|
|
927c99a2f4 | ||
|
|
b4759de30d | ||
|
|
11a71f5ffa | ||
|
|
064f871fff | ||
|
|
ffbce2611a | ||
|
|
8b9ddb5922 | ||
|
|
c65b77a20a | ||
|
|
b7e8044d69 | ||
|
|
8b1ca12d8f | ||
|
|
4f546be87a | ||
|
|
e6475f21f6 | ||
|
|
218348412b | ||
|
|
db64717551 | ||
|
|
9edc686ab5 | ||
|
|
c819a78a4b | ||
|
|
11146a071f | ||
|
|
4d31d83e1e | ||
|
|
3a9a884bbc | ||
|
|
8a31be6ffe | ||
|
|
6fd86fed65 | ||
|
|
9d48ba4243 | ||
|
|
80bdb1a67a | ||
|
|
24c16b1c58 | ||
|
|
0fb7c859f4 | ||
|
|
1e5bcca0b9 | ||
|
|
daba25f107 | ||
|
|
9d2441789e | ||
|
|
b4f6b380fd | ||
|
|
444f024bb0 |
9
.gitignore
vendored
9
.gitignore
vendored
@@ -133,4 +133,13 @@ awx/lib/site-packages
|
||||
venv/*
|
||||
use_dev_supervisor.txt
|
||||
|
||||
|
||||
# Ansible module tests
|
||||
/awx_collection_test_venv/
|
||||
/awx_collection/*.tar.gz
|
||||
/awx_collection/galaxy.yml
|
||||
/sanity/
|
||||
|
||||
.idea/*
|
||||
*.unison.tmp
|
||||
*.#
|
||||
|
||||
@@ -156,8 +156,8 @@ If you start a second terminal session, you can take a look at the running conta
|
||||
|
||||
$ docker ps
|
||||
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
|
||||
aa4a75d6d77b gcr.io/ansible-tower-engineering/awx_devel:devel "/tini -- /bin/sh ..." 23 seconds ago Up 15 seconds 0.0.0.0:5555->5555/tcp, 0.0.0.0:6899-6999->6899-6999/tcp, 0.0.0.0:8013->8013/tcp, 0.0.0.0:8043->8043/tcp, 22/tcp, 0.0.0.0:8080->8080/tcp tools_awx_1
|
||||
e4c0afeb548c postgres:9.6 "docker-entrypoint..." 26 seconds ago Up 23 seconds 5432/tcp tools_postgres_1
|
||||
aa4a75d6d77b gcr.io/ansible-tower-engineering/awx_devel:devel "/tini -- /bin/sh ..." 23 seconds ago Up 15 seconds 0.0.0.0:5555->5555/tcp, 0.0.0.0:7899-7999->7899-7999/tcp, 0.0.0.0:8013->8013/tcp, 0.0.0.0:8043->8043/tcp, 22/tcp, 0.0.0.0:8080->8080/tcp tools_awx_1
|
||||
e4c0afeb548c postgres:10 "docker-entrypoint..." 26 seconds ago Up 23 seconds 5432/tcp tools_postgres_1
|
||||
0089699d5afd tools_logstash "/docker-entrypoin..." 26 seconds ago Up 25 seconds tools_logstash_1
|
||||
4d4ff0ced266 memcached:alpine "docker-entrypoint..." 26 seconds ago Up 25 seconds 0.0.0.0:11211->11211/tcp tools_memcached_1
|
||||
92842acd64cd rabbitmq:3-management "docker-entrypoint..." 26 seconds ago Up 24 seconds 4369/tcp, 5671-5672/tcp, 15671/tcp, 25672/tcp, 0.0.0.0:15672->15672/tcp tools_rabbitmq_1
|
||||
|
||||
10
INSTALL.md
10
INSTALL.md
@@ -120,6 +120,8 @@ If these variables are present then all deployments will use these hosted images
|
||||
|
||||
To complete a deployment to OpenShift, you will obviously need access to an OpenShift cluster. For demo and testing purposes, you can use [Minishift](https://github.com/minishift/minishift) to create a single node cluster running inside a virtual machine.
|
||||
|
||||
When using OpenShift for deploying AWX make sure you have correct privileges to add the security context 'privileged', otherwise the installation will fail. The privileged context is needed because of the use of [the bubblewrap tool](https://github.com/containers/bubblewrap) to add an additional layer of security when using containers.
|
||||
|
||||
You will also need to have the `oc` command in your PATH. The `install.yml` playbook will call out to `oc` when logging into, and creating objects on the cluster.
|
||||
|
||||
The default resource requests per-deployment requires:
|
||||
@@ -193,7 +195,7 @@ $ eval $(minishift docker-env)
|
||||
|
||||
By default, AWX will deploy a PostgreSQL pod inside of your cluster. You will need to create a [Persistent Volume Claim](https://docs.openshift.org/latest/dev_guide/persistent_volumes.html) which is named `postgresql` by default, and can be overridden by setting the `openshift_pg_pvc_name` variable. For testing and demo purposes, you may set `openshift_pg_emptydir=yes`.
|
||||
|
||||
If you wish to use an external database, in the inventory file, set the value of `pg_hostname`, and update `pg_username`, `pg_password`, `pg_database`, and `pg_port` with the connection information. When setting `pg_hostname` the installer will assume you have configured the database in that location and will not launch the postgresql pod.
|
||||
If you wish to use an external database, in the inventory file, set the value of `pg_hostname`, and update `pg_username`, `pg_password`, `pg_admin_password`, `pg_database`, and `pg_port` with the connection information. When setting `pg_hostname` the installer will assume you have configured the database in that location and will not launch the postgresql pod.
|
||||
|
||||
### Start the build
|
||||
|
||||
@@ -456,6 +458,10 @@ Before starting the build process, review the [inventory](./installer/inventory)
|
||||
|
||||
> When using docker-compose, the `docker-compose.yml` file will be created there (default `/tmp/awxcompose`).
|
||||
|
||||
*custom_venv_dir*
|
||||
|
||||
> Adds the custom venv environments from the local host to be passed into the containers at install.
|
||||
|
||||
*ca_trust_dir*
|
||||
|
||||
> If you're using a non trusted CA, provide a path where the untrusted Certs are stored on your Host.
|
||||
@@ -503,7 +509,7 @@ If you wish to tag and push built images to a Docker registry, set the following
|
||||
|
||||
AWX requires access to a PostgreSQL database, and by default, one will be created and deployed in a container, and data will be persisted to a host volume. In this scenario, you must set the value of `postgres_data_dir` to a path that can be mounted to the container. When the container is stopped, the database files will still exist in the specified path.
|
||||
|
||||
If you wish to use an external database, in the inventory file, set the value of `pg_hostname`, and update `pg_username`, `pg_password`, `pg_database`, and `pg_port` with the connection information.
|
||||
If you wish to use an external database, in the inventory file, set the value of `pg_hostname`, and update `pg_username`, `pg_password`, `pg_admin_password`, `pg_database`, and `pg_port` with the connection information.
|
||||
|
||||
### Start the build
|
||||
|
||||
|
||||
59
Makefile
59
Makefile
@@ -18,6 +18,7 @@ COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
COMPOSE_HOST ?= $(shell hostname)
|
||||
|
||||
VENV_BASE ?= /venv
|
||||
COLLECTION_VENV ?= /awx_devel/awx_collection_test_venv
|
||||
SCL_PREFIX ?=
|
||||
CELERY_SCHEDULE_FILE ?= /var/lib/awx/beat.db
|
||||
|
||||
@@ -99,20 +100,22 @@ clean-languages:
|
||||
find . -type f -regex ".*\.mo$$" -delete
|
||||
|
||||
# Remove temporary build files, compiled Python files.
|
||||
clean: clean-ui clean-dist
|
||||
clean: clean-ui clean-api clean-dist
|
||||
rm -rf awx/public
|
||||
rm -rf awx/lib/site-packages
|
||||
rm -rf awx/job_status
|
||||
rm -rf awx/job_output
|
||||
rm -rf reports
|
||||
rm -f awx/awx_test.sqlite3*
|
||||
rm -rf requirements/vendor
|
||||
rm -rf tmp
|
||||
rm -rf $(I18N_FLAG_FILE)
|
||||
mkdir tmp
|
||||
|
||||
clean-api:
|
||||
rm -rf build $(NAME)-$(VERSION) *.egg-info
|
||||
find . -type f -regex ".*\.py[co]$$" -delete
|
||||
find . -type d -name "__pycache__" -delete
|
||||
rm -f awx/awx_test.sqlite3*
|
||||
rm -rf requirements/vendor
|
||||
|
||||
# convenience target to assert environment variables are defined
|
||||
guard-%:
|
||||
@@ -186,14 +189,14 @@ requirements_awx: virtualenv_awx
|
||||
cat requirements/requirements.txt requirements/requirements_git.txt | $(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --no-binary $(SRC_ONLY_PKGS) --ignore-installed -r /dev/stdin ; \
|
||||
fi
|
||||
echo "include-system-site-packages = true" >> $(VENV_BASE)/awx/lib/python$(PYTHON_VERSION)/pyvenv.cfg
|
||||
#$(VENV_BASE)/awx/bin/pip uninstall --yes -r requirements/requirements_tower_uninstall.txt
|
||||
$(VENV_BASE)/awx/bin/pip uninstall --yes -r requirements/requirements_tower_uninstall.txt
|
||||
|
||||
requirements_awx_dev:
|
||||
$(VENV_BASE)/awx/bin/pip install -r requirements/requirements_dev.txt
|
||||
|
||||
requirements: requirements_ansible requirements_awx
|
||||
|
||||
requirements_dev: requirements requirements_awx_dev requirements_ansible_dev
|
||||
requirements_dev: requirements_awx requirements_ansible_py3 requirements_awx_dev requirements_ansible_dev
|
||||
|
||||
requirements_test: requirements
|
||||
|
||||
@@ -375,6 +378,38 @@ test:
|
||||
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py2,py3
|
||||
awx-manage check_migrations --dry-run --check -n 'vNNN_missing_migration_file'
|
||||
|
||||
prepare_collection_venv:
|
||||
rm -rf $(COLLECTION_VENV)
|
||||
mkdir $(COLLECTION_VENV)
|
||||
ln -s /usr/lib/python2.7/site-packages/ansible $(COLLECTION_VENV)/ansible
|
||||
$(VENV_BASE)/awx/bin/pip install --target=$(COLLECTION_VENV) git+https://github.com/ansible/tower-cli.git
|
||||
|
||||
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
||||
COLLECTION_PACKAGE ?= awx
|
||||
COLLECTION_NAMESPACE ?= awx
|
||||
|
||||
test_collection:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
PYTHONPATH=$(COLLECTION_VENV):/awx_devel/awx_collection:$PYTHONPATH py.test $(COLLECTION_TEST_DIRS)
|
||||
|
||||
flake8_collection:
|
||||
flake8 awx_collection/ # Different settings, in main exclude list
|
||||
|
||||
test_collection_all: prepare_collection_venv test_collection flake8_collection
|
||||
|
||||
test_collection_sanity:
|
||||
rm -rf sanity
|
||||
mkdir -p sanity/ansible_collections/awx
|
||||
cp -Ra awx_collection sanity/ansible_collections/awx/awx # symlinks do not work
|
||||
cd sanity/ansible_collections/awx/awx && git init && git add . # requires both this file structure and a git repo, so there you go
|
||||
cd sanity/ansible_collections/awx/awx && ansible-test sanity --test validate-modules
|
||||
|
||||
build_collection:
|
||||
ansible-playbook -i localhost, awx_collection/template_galaxy.yml -e collection_package=$(COLLECTION_PACKAGE) -e collection_namespace=$(COLLECTION_NAMESPACE) -e collection_version=$(VERSION)
|
||||
ansible-galaxy collection build awx_collection --output-path=awx_collection
|
||||
|
||||
test_unit:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
@@ -516,6 +551,12 @@ jshint: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) run --prefix awx/ui jshint
|
||||
$(NPM_BIN) run --prefix awx/ui lint
|
||||
|
||||
ui-zuul-lint-and-test: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) run --prefix awx/ui jshint
|
||||
$(NPM_BIN) run --prefix awx/ui lint
|
||||
$(NPM_BIN) --prefix awx/ui run test:ci
|
||||
$(NPM_BIN) --prefix awx/ui run unit
|
||||
|
||||
# END UI TASKS
|
||||
# --------------------------------------
|
||||
|
||||
@@ -531,6 +572,12 @@ ui-next-test:
|
||||
$(NPM_BIN) --prefix awx/ui_next install
|
||||
$(NPM_BIN) run --prefix awx/ui_next test
|
||||
|
||||
ui-next-zuul-lint-and-test:
|
||||
$(NPM_BIN) --prefix awx/ui_next install
|
||||
$(NPM_BIN) run --prefix awx/ui_next lint
|
||||
$(NPM_BIN) run --prefix awx/ui_next prettier-check
|
||||
$(NPM_BIN) run --prefix awx/ui_next test
|
||||
|
||||
# END UI NEXT TASKS
|
||||
# --------------------------------------
|
||||
|
||||
@@ -648,7 +695,7 @@ clean-elk:
|
||||
docker rm tools_kibana_1
|
||||
|
||||
psql-container:
|
||||
docker run -it --net tools_default --rm postgres:9.6 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
docker run -it --net tools_default --rm postgres:10 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
|
||||
VERSION:
|
||||
@echo "awx: $(VERSION)"
|
||||
|
||||
@@ -82,6 +82,16 @@ def find_commands(management_dir):
|
||||
return commands
|
||||
|
||||
|
||||
def oauth2_getattribute(self, attr):
|
||||
# Custom method to override
|
||||
# oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__
|
||||
from django.conf import settings
|
||||
val = settings.OAUTH2_PROVIDER.get(attr)
|
||||
if val is None:
|
||||
val = object.__getattribute__(self, attr)
|
||||
return val
|
||||
|
||||
|
||||
def prepare_env():
|
||||
# Update the default settings environment variable based on current mode.
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE)
|
||||
@@ -93,6 +103,12 @@ def prepare_env():
|
||||
# Monkeypatch Django find_commands to also work with .pyc files.
|
||||
import django.core.management
|
||||
django.core.management.find_commands = find_commands
|
||||
|
||||
# Monkeypatch Oauth2 toolkit settings class to check for settings
|
||||
# in django.conf settings each time, not just once during import
|
||||
import oauth2_provider.settings
|
||||
oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__ = oauth2_getattribute
|
||||
|
||||
# Use the AWX_TEST_DATABASE_* environment variables to specify the test
|
||||
# database settings to use when management command is run as an external
|
||||
# program via unit tests.
|
||||
|
||||
@@ -38,12 +38,15 @@ register(
|
||||
'OAUTH2_PROVIDER',
|
||||
field_class=OAuth2ProviderField,
|
||||
default={'ACCESS_TOKEN_EXPIRE_SECONDS': oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS,
|
||||
'AUTHORIZATION_CODE_EXPIRE_SECONDS': 600},
|
||||
'AUTHORIZATION_CODE_EXPIRE_SECONDS': oauth2_settings.AUTHORIZATION_CODE_EXPIRE_SECONDS,
|
||||
'REFRESH_TOKEN_EXPIRE_SECONDS': oauth2_settings.REFRESH_TOKEN_EXPIRE_SECONDS},
|
||||
label=_('OAuth 2 Timeout Settings'),
|
||||
help_text=_('Dictionary for customizing OAuth 2 timeouts, available items are '
|
||||
'`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number '
|
||||
'of seconds, and `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
|
||||
'authorization codes in the number of seconds.'),
|
||||
'of seconds, `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
|
||||
'authorization codes in the number of seconds, and `REFRESH_TOKEN_EXPIRE_SECONDS`, '
|
||||
'the duration of refresh tokens, after expired access tokens, '
|
||||
'in the number of seconds.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
@@ -80,7 +80,7 @@ class OAuth2ProviderField(fields.DictField):
|
||||
default_error_messages = {
|
||||
'invalid_key_names': _('Invalid key names: {invalid_key_names}'),
|
||||
}
|
||||
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS'}
|
||||
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
|
||||
child = fields.IntegerField(min_value=1)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
|
||||
@@ -126,7 +126,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
'''
|
||||
|
||||
RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by',
|
||||
'search', 'type', 'host_filter')
|
||||
'search', 'type', 'host_filter', 'count_disabled', 'no_truncate')
|
||||
|
||||
SUPPORTED_LOOKUPS = ('exact', 'iexact', 'contains', 'icontains',
|
||||
'startswith', 'istartswith', 'endswith', 'iendswith',
|
||||
|
||||
@@ -92,7 +92,7 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||
current_user = getattr(request, 'user', None)
|
||||
if request.user.is_authenticated:
|
||||
logger.info(smart_text(u"User {} logged in.".format(self.request.user.username)))
|
||||
logger.info(smart_text(u"User {} logged in from {}".format(self.request.user.username,request.META.get('REMOTE_ADDR', None))))
|
||||
ret.set_cookie('userLoggedIn', 'true')
|
||||
current_user = UserSerializer(self.request.user)
|
||||
current_user = smart_text(JSONRenderer().render(current_user.data))
|
||||
@@ -205,6 +205,9 @@ class APIView(views.APIView):
|
||||
response['X-API-Query-Count'] = len(q_times)
|
||||
response['X-API-Query-Time'] = '%0.3fs' % sum(q_times)
|
||||
|
||||
if getattr(self, 'deprecated', False):
|
||||
response['Warning'] = '299 awx "This resource has been deprecated and will be removed in a future release."' # noqa
|
||||
|
||||
return response
|
||||
|
||||
def get_authenticate_header(self, request):
|
||||
@@ -489,9 +492,12 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
qs = self.request.user.get_queryset(self.model).distinct()
|
||||
sublist_qs = getattrd(parent, self.relationship).distinct()
|
||||
sublist_qs = self.get_sublist_queryset(parent)
|
||||
return qs & sublist_qs
|
||||
|
||||
def get_sublist_queryset(self, parent):
|
||||
return getattrd(parent, self.relationship).distinct()
|
||||
|
||||
|
||||
class DestroyAPIView(generics.DestroyAPIView):
|
||||
|
||||
@@ -568,7 +574,7 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Verify we have permission to add the object as given.
|
||||
if not request.user.can_access(self.model, 'add', serializer.initial_data):
|
||||
if not request.user.can_access(self.model, 'add', serializer.validated_data):
|
||||
raise PermissionDenied()
|
||||
|
||||
# save the object through the serializer, reload and returned the saved
|
||||
|
||||
@@ -20,8 +20,9 @@ from rest_framework.fields import JSONField as DRFJSONField
|
||||
from rest_framework.request import clone_request
|
||||
|
||||
# AWX
|
||||
from awx.main.fields import JSONField
|
||||
from awx.main.fields import JSONField, ImplicitRoleField
|
||||
from awx.main.models import InventorySource, NotificationTemplate
|
||||
from awx.main.scheduler.kubernetes import PodManager
|
||||
|
||||
|
||||
class Metadata(metadata.SimpleMetadata):
|
||||
@@ -157,9 +158,16 @@ class Metadata(metadata.SimpleMetadata):
|
||||
isinstance(field, JSONField) or
|
||||
isinstance(model_field, JSONField) or
|
||||
isinstance(field, DRFJSONField) or
|
||||
isinstance(getattr(field, 'model_field', None), JSONField)
|
||||
isinstance(getattr(field, 'model_field', None), JSONField) or
|
||||
field.field_name == 'credential_passwords'
|
||||
):
|
||||
field_info['type'] = 'json'
|
||||
elif (
|
||||
isinstance(field, ManyRelatedField) and
|
||||
field.field_name == 'credentials'
|
||||
# launch-time credentials
|
||||
):
|
||||
field_info['type'] = 'list_of_ids'
|
||||
elif isinstance(model_field, BooleanField):
|
||||
field_info['type'] = 'boolean'
|
||||
|
||||
@@ -200,6 +208,9 @@ class Metadata(metadata.SimpleMetadata):
|
||||
if not isinstance(meta, dict):
|
||||
continue
|
||||
|
||||
if field == "pod_spec_override":
|
||||
meta['default'] = PodManager().pod_definition
|
||||
|
||||
# Add type choices if available from the serializer.
|
||||
if field == 'type' and hasattr(serializer, 'get_type_choices'):
|
||||
meta['choices'] = serializer.get_type_choices()
|
||||
@@ -252,6 +263,16 @@ class Metadata(metadata.SimpleMetadata):
|
||||
if getattr(view, 'related_search_fields', None):
|
||||
metadata['related_search_fields'] = view.related_search_fields
|
||||
|
||||
# include role names in metadata
|
||||
roles = []
|
||||
model = getattr(view, 'model', None)
|
||||
if model:
|
||||
for field in model._meta.get_fields():
|
||||
if type(field) is ImplicitRoleField:
|
||||
roles.append(field.name)
|
||||
if len(roles) > 0:
|
||||
metadata['object_roles'] = roles
|
||||
|
||||
from rest_framework import generics
|
||||
if isinstance(view, generics.ListAPIView) and hasattr(view, 'paginator'):
|
||||
metadata['max_page_size'] = view.paginator.max_page_size
|
||||
|
||||
@@ -3,14 +3,28 @@
|
||||
|
||||
# Django REST Framework
|
||||
from django.conf import settings
|
||||
from django.core.paginator import Paginator as DjangoPaginator
|
||||
from rest_framework import pagination
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.utils.urls import replace_query_param
|
||||
|
||||
|
||||
class DisabledPaginator(DjangoPaginator):
|
||||
|
||||
@property
|
||||
def num_pages(self):
|
||||
return 1
|
||||
|
||||
@property
|
||||
def count(self):
|
||||
return 200
|
||||
|
||||
|
||||
class Pagination(pagination.PageNumberPagination):
|
||||
|
||||
page_size_query_param = 'page_size'
|
||||
max_page_size = settings.MAX_PAGE_SIZE
|
||||
count_disabled = False
|
||||
|
||||
def get_next_link(self):
|
||||
if not self.page.has_next():
|
||||
@@ -39,3 +53,17 @@ class Pagination(pagination.PageNumberPagination):
|
||||
for pl in context['page_links']]
|
||||
|
||||
return context
|
||||
|
||||
def paginate_queryset(self, queryset, request, **kwargs):
|
||||
self.count_disabled = 'count_disabled' in request.query_params
|
||||
try:
|
||||
if self.count_disabled:
|
||||
self.django_paginator_class = DisabledPaginator
|
||||
return super(Pagination, self).paginate_queryset(queryset, request, **kwargs)
|
||||
finally:
|
||||
self.django_paginator_class = DjangoPaginator
|
||||
|
||||
def get_paginated_response(self, data):
|
||||
if self.count_disabled:
|
||||
return Response({'results': data})
|
||||
return super(Pagination, self).get_paginated_response(data)
|
||||
|
||||
@@ -249,3 +249,8 @@ class InstanceGroupTowerPermission(ModelAccessPermission):
|
||||
if request.method == 'DELETE' and obj.name == "tower":
|
||||
return False
|
||||
return super(InstanceGroupTowerPermission, self).has_object_permission(request, view, obj)
|
||||
|
||||
|
||||
class WebhookKeyPermission(permissions.BasePermission):
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return request.user.can_access(view.model, 'admin', obj, request.data)
|
||||
|
||||
@@ -45,7 +45,6 @@ from polymorphic.models import PolymorphicModel
|
||||
from awx.main.access import get_user_capabilities
|
||||
from awx.main.constants import (
|
||||
SCHEDULEABLE_PROVIDERS,
|
||||
ANSI_SGR_PATTERN,
|
||||
ACTIVE_STATES,
|
||||
CENSOR_VALUE,
|
||||
)
|
||||
@@ -70,7 +69,8 @@ from awx.main.utils import (
|
||||
get_type_for_model, get_model_for_type,
|
||||
camelcase_to_underscore, getattrd, parse_yaml_or_json,
|
||||
has_model_field_prefetched, extract_ansible_vars, encrypt_dict,
|
||||
prefetch_page_capabilities, get_external_account)
|
||||
prefetch_page_capabilities, get_external_account, truncate_stdout,
|
||||
)
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
from awx.main.redact import UriCleaner, REPLACE_STR
|
||||
|
||||
@@ -116,7 +116,7 @@ SUMMARIZABLE_FK_FIELDS = {
|
||||
'project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'),
|
||||
'source_project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'),
|
||||
'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed',),
|
||||
'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'kubernetes', 'credential_type_id'),
|
||||
'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed', 'type'),
|
||||
'job_template': DEFAULT_SUMMARY_FIELDS,
|
||||
'workflow_job_template': DEFAULT_SUMMARY_FIELDS,
|
||||
@@ -135,10 +135,12 @@ SUMMARIZABLE_FK_FIELDS = {
|
||||
'source_script': ('name', 'description'),
|
||||
'role': ('id', 'role_field'),
|
||||
'notification_template': DEFAULT_SUMMARY_FIELDS,
|
||||
'instance_group': {'id', 'name', 'controller_id'},
|
||||
'instance_group': ('id', 'name', 'controller_id', 'is_containerized'),
|
||||
'insights_credential': DEFAULT_SUMMARY_FIELDS,
|
||||
'source_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
'target_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
'webhook_credential': DEFAULT_SUMMARY_FIELDS,
|
||||
'approved_or_denied_by': ('id', 'username', 'first_name', 'last_name'),
|
||||
}
|
||||
|
||||
|
||||
@@ -1261,6 +1263,7 @@ class OrganizationSerializer(BaseSerializer):
|
||||
notification_templates_started = self.reverse('api:organization_notification_templates_started_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_success = self.reverse('api:organization_notification_templates_success_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_error = self.reverse('api:organization_notification_templates_error_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_approvals = self.reverse('api:organization_notification_templates_approvals_list', kwargs={'pk': obj.pk}),
|
||||
object_roles = self.reverse('api:organization_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
access_list = self.reverse('api:organization_access_list', kwargs={'pk': obj.pk}),
|
||||
instance_groups = self.reverse('api:organization_instance_groups_list', kwargs={'pk': obj.pk}),
|
||||
@@ -2513,7 +2516,7 @@ class CredentialSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Credential
|
||||
fields = ('*', 'organization', 'credential_type', 'inputs', 'kind', 'cloud')
|
||||
fields = ('*', 'organization', 'credential_type', 'inputs', 'kind', 'cloud', 'kubernetes')
|
||||
extra_kwargs = {
|
||||
'credential_type': {
|
||||
'label': _('Credential Type'),
|
||||
@@ -2825,6 +2828,25 @@ class JobTemplateMixin(object):
|
||||
d['recent_jobs'] = self._recent_jobs(obj)
|
||||
return d
|
||||
|
||||
def validate(self, attrs):
|
||||
webhook_service = attrs.get('webhook_service', getattr(self.instance, 'webhook_service', None))
|
||||
webhook_credential = attrs.get('webhook_credential', getattr(self.instance, 'webhook_credential', None))
|
||||
|
||||
if webhook_credential:
|
||||
if webhook_credential.credential_type.kind != 'token':
|
||||
raise serializers.ValidationError({
|
||||
'webhook_credential': _("Must be a Personal Access Token."),
|
||||
})
|
||||
|
||||
msg = {'webhook_credential': _("Must match the selected webhook service.")}
|
||||
if webhook_service:
|
||||
if webhook_credential.credential_type.namespace != '{}_token'.format(webhook_service):
|
||||
raise serializers.ValidationError(msg)
|
||||
else:
|
||||
raise serializers.ValidationError(msg)
|
||||
|
||||
return super().validate(attrs)
|
||||
|
||||
|
||||
class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobOptionsSerializer):
|
||||
show_capabilities = ['start', 'schedule', 'copy', 'edit', 'delete']
|
||||
@@ -2837,30 +2859,39 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
|
||||
class Meta:
|
||||
model = JobTemplate
|
||||
fields = ('*', 'host_config_key', 'ask_scm_branch_on_launch', 'ask_diff_mode_on_launch', 'ask_variables_on_launch',
|
||||
'ask_limit_on_launch', 'ask_tags_on_launch',
|
||||
'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch',
|
||||
'ask_credential_on_launch', 'survey_enabled', 'become_enabled', 'diff_mode',
|
||||
'allow_simultaneous', 'custom_virtualenv', 'job_slice_count')
|
||||
fields = (
|
||||
'*', 'host_config_key', 'ask_scm_branch_on_launch', 'ask_diff_mode_on_launch',
|
||||
'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch',
|
||||
'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch',
|
||||
'ask_inventory_on_launch', 'ask_credential_on_launch', 'survey_enabled',
|
||||
'become_enabled', 'diff_mode', 'allow_simultaneous', 'custom_virtualenv',
|
||||
'job_slice_count', 'webhook_service', 'webhook_credential',
|
||||
)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(JobTemplateSerializer, self).get_related(obj)
|
||||
res.update(dict(
|
||||
jobs = self.reverse('api:job_template_jobs_list', kwargs={'pk': obj.pk}),
|
||||
schedules = self.reverse('api:job_template_schedules_list', kwargs={'pk': obj.pk}),
|
||||
activity_stream = self.reverse('api:job_template_activity_stream_list', kwargs={'pk': obj.pk}),
|
||||
launch = self.reverse('api:job_template_launch', kwargs={'pk': obj.pk}),
|
||||
notification_templates_started = self.reverse('api:job_template_notification_templates_started_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_success = self.reverse('api:job_template_notification_templates_success_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_error = self.reverse('api:job_template_notification_templates_error_list', kwargs={'pk': obj.pk}),
|
||||
access_list = self.reverse('api:job_template_access_list', kwargs={'pk': obj.pk}),
|
||||
survey_spec = self.reverse('api:job_template_survey_spec', kwargs={'pk': obj.pk}),
|
||||
labels = self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk}),
|
||||
object_roles = self.reverse('api:job_template_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
instance_groups = self.reverse('api:job_template_instance_groups_list', kwargs={'pk': obj.pk}),
|
||||
slice_workflow_jobs = self.reverse('api:job_template_slice_workflow_jobs_list', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:job_template_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
res.update(
|
||||
jobs=self.reverse('api:job_template_jobs_list', kwargs={'pk': obj.pk}),
|
||||
schedules=self.reverse('api:job_template_schedules_list', kwargs={'pk': obj.pk}),
|
||||
activity_stream=self.reverse('api:job_template_activity_stream_list', kwargs={'pk': obj.pk}),
|
||||
launch=self.reverse('api:job_template_launch', kwargs={'pk': obj.pk}),
|
||||
webhook_key=self.reverse('api:webhook_key', kwargs={'model_kwarg': 'job_templates', 'pk': obj.pk}),
|
||||
webhook_receiver=(
|
||||
self.reverse('api:webhook_receiver_{}'.format(obj.webhook_service),
|
||||
kwargs={'model_kwarg': 'job_templates', 'pk': obj.pk})
|
||||
if obj.webhook_service else ''
|
||||
),
|
||||
notification_templates_started=self.reverse('api:job_template_notification_templates_started_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_success=self.reverse('api:job_template_notification_templates_success_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_error=self.reverse('api:job_template_notification_templates_error_list', kwargs={'pk': obj.pk}),
|
||||
access_list=self.reverse('api:job_template_access_list', kwargs={'pk': obj.pk}),
|
||||
survey_spec=self.reverse('api:job_template_survey_spec', kwargs={'pk': obj.pk}),
|
||||
labels=self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk}),
|
||||
object_roles=self.reverse('api:job_template_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
instance_groups=self.reverse('api:job_template_instance_groups_list', kwargs={'pk': obj.pk}),
|
||||
slice_workflow_jobs=self.reverse('api:job_template_slice_workflow_jobs_list', kwargs={'pk': obj.pk}),
|
||||
copy=self.reverse('api:job_template_copy', kwargs={'pk': obj.pk}),
|
||||
)
|
||||
if obj.host_config_key:
|
||||
res['callback'] = self.reverse('api:job_template_callback', kwargs={'pk': obj.pk})
|
||||
return res
|
||||
@@ -2888,7 +2919,6 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
def validate_extra_vars(self, value):
|
||||
return vars_validate_or_raise(value)
|
||||
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
summary_fields = super(JobTemplateSerializer, self).get_summary_fields(obj)
|
||||
all_creds = []
|
||||
@@ -2929,9 +2959,11 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Job
|
||||
fields = ('*', 'job_template', 'passwords_needed_to_start',
|
||||
'allow_simultaneous', 'artifacts', 'scm_revision',
|
||||
'instance_group', 'diff_mode', 'job_slice_number', 'job_slice_count')
|
||||
fields = (
|
||||
'*', 'job_template', 'passwords_needed_to_start', 'allow_simultaneous',
|
||||
'artifacts', 'scm_revision', 'instance_group', 'diff_mode', 'job_slice_number',
|
||||
'job_slice_count', 'webhook_service', 'webhook_credential', 'webhook_guid',
|
||||
)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(JobSerializer, self).get_related(obj)
|
||||
@@ -3314,29 +3346,42 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
|
||||
'admin', 'execute',
|
||||
{'copy': 'organization.workflow_admin'}
|
||||
]
|
||||
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||
scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJobTemplate
|
||||
fields = ('*', 'extra_vars', 'organization', 'survey_enabled', 'allow_simultaneous',
|
||||
'ask_variables_on_launch', 'inventory', 'ask_inventory_on_launch',)
|
||||
fields = (
|
||||
'*', 'extra_vars', 'organization', 'survey_enabled', 'allow_simultaneous',
|
||||
'ask_variables_on_launch', 'inventory', 'limit', 'scm_branch',
|
||||
'ask_inventory_on_launch', 'ask_scm_branch_on_launch', 'ask_limit_on_launch',
|
||||
'webhook_service', 'webhook_credential',
|
||||
)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(WorkflowJobTemplateSerializer, self).get_related(obj)
|
||||
res.update(dict(
|
||||
res.update(
|
||||
workflow_jobs = self.reverse('api:workflow_job_template_jobs_list', kwargs={'pk': obj.pk}),
|
||||
schedules = self.reverse('api:workflow_job_template_schedules_list', kwargs={'pk': obj.pk}),
|
||||
launch = self.reverse('api:workflow_job_template_launch', kwargs={'pk': obj.pk}),
|
||||
webhook_key=self.reverse('api:webhook_key', kwargs={'model_kwarg': 'workflow_job_templates', 'pk': obj.pk}),
|
||||
webhook_receiver=(
|
||||
self.reverse('api:webhook_receiver_{}'.format(obj.webhook_service),
|
||||
kwargs={'model_kwarg': 'workflow_job_templates', 'pk': obj.pk})
|
||||
if obj.webhook_service else ''
|
||||
),
|
||||
workflow_nodes = self.reverse('api:workflow_job_template_workflow_nodes_list', kwargs={'pk': obj.pk}),
|
||||
labels = self.reverse('api:workflow_job_template_label_list', kwargs={'pk': obj.pk}),
|
||||
activity_stream = self.reverse('api:workflow_job_template_activity_stream_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_started = self.reverse('api:workflow_job_template_notification_templates_started_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_success = self.reverse('api:workflow_job_template_notification_templates_success_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_error = self.reverse('api:workflow_job_template_notification_templates_error_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_approvals = self.reverse('api:workflow_job_template_notification_templates_approvals_list', kwargs={'pk': obj.pk}),
|
||||
access_list = self.reverse('api:workflow_job_template_access_list', kwargs={'pk': obj.pk}),
|
||||
object_roles = self.reverse('api:workflow_job_template_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
survey_spec = self.reverse('api:workflow_job_template_survey_spec', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:workflow_job_template_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
)
|
||||
if obj.organization:
|
||||
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
|
||||
return res
|
||||
@@ -3344,6 +3389,22 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
|
||||
def validate_extra_vars(self, value):
|
||||
return vars_validate_or_raise(value)
|
||||
|
||||
def validate(self, attrs):
|
||||
attrs = super(WorkflowJobTemplateSerializer, self).validate(attrs)
|
||||
|
||||
# process char_prompts, these are not direct fields on the model
|
||||
mock_obj = self.Meta.model()
|
||||
for field_name in ('scm_branch', 'limit'):
|
||||
if field_name in attrs:
|
||||
setattr(mock_obj, field_name, attrs[field_name])
|
||||
attrs.pop(field_name)
|
||||
|
||||
# Model `.save` needs the container dict, not the pseudo fields
|
||||
if mock_obj.char_prompts:
|
||||
attrs['char_prompts'] = mock_obj.char_prompts
|
||||
|
||||
return attrs
|
||||
|
||||
|
||||
class WorkflowJobTemplateWithSpecSerializer(WorkflowJobTemplateSerializer):
|
||||
'''
|
||||
@@ -3356,13 +3417,16 @@ class WorkflowJobTemplateWithSpecSerializer(WorkflowJobTemplateSerializer):
|
||||
|
||||
|
||||
class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
|
||||
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||
scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJob
|
||||
fields = ('*', 'workflow_job_template', 'extra_vars', 'allow_simultaneous',
|
||||
'job_template', 'is_sliced_job',
|
||||
'-execution_node', '-event_processing_finished', '-controller_node',
|
||||
'inventory',)
|
||||
fields = (
|
||||
'*', 'workflow_job_template', 'extra_vars', 'allow_simultaneous', 'job_template',
|
||||
'is_sliced_job', '-execution_node', '-event_processing_finished', '-controller_node',
|
||||
'inventory', 'limit', 'scm_branch', 'webhook_service', 'webhook_credential', 'webhook_guid',
|
||||
)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(WorkflowJobSerializer, self).get_related(obj)
|
||||
@@ -3438,6 +3502,8 @@ class WorkflowApprovalSerializer(UnifiedJobSerializer):
|
||||
kwargs={'pk': obj.workflow_approval_template.pk})
|
||||
res['approve'] = self.reverse('api:workflow_approval_approve', kwargs={'pk': obj.pk})
|
||||
res['deny'] = self.reverse('api:workflow_approval_deny', kwargs={'pk': obj.pk})
|
||||
if obj.approved_or_denied_by:
|
||||
res['approved_or_denied_by'] = self.reverse('api:user_detail', kwargs={'pk': obj.approved_or_denied_by.pk})
|
||||
return res
|
||||
|
||||
|
||||
@@ -3469,7 +3535,7 @@ class WorkflowApprovalTemplateSerializer(UnifiedJobTemplateSerializer):
|
||||
if 'last_job' in res:
|
||||
del res['last_job']
|
||||
|
||||
res.update(dict(jobs = self.reverse('api:workflow_approval_template_jobs_list', kwargs={'pk': obj.pk}),))
|
||||
res.update(jobs = self.reverse('api:workflow_approval_template_jobs_list', kwargs={'pk': obj.pk}))
|
||||
return res
|
||||
|
||||
|
||||
@@ -3596,7 +3662,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
if errors:
|
||||
raise serializers.ValidationError(errors)
|
||||
|
||||
# Model `.save` needs the container dict, not the psuedo fields
|
||||
# Model `.save` needs the container dict, not the pseudo fields
|
||||
if mock_obj.char_prompts:
|
||||
attrs['char_prompts'] = mock_obj.char_prompts
|
||||
|
||||
@@ -3788,25 +3854,17 @@ class JobEventSerializer(BaseSerializer):
|
||||
return d
|
||||
|
||||
def to_representation(self, obj):
|
||||
ret = super(JobEventSerializer, self).to_representation(obj)
|
||||
# Show full stdout for event detail view, truncate only for list view.
|
||||
if hasattr(self.context.get('view', None), 'retrieve'):
|
||||
return ret
|
||||
data = super(JobEventSerializer, self).to_representation(obj)
|
||||
# Show full stdout for playbook_on_* events.
|
||||
if obj and obj.event.startswith('playbook_on'):
|
||||
return ret
|
||||
return data
|
||||
# If the view logic says to not trunctate (request was to the detail view or a param was used)
|
||||
if self.context.get('no_truncate', False):
|
||||
return data
|
||||
max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY
|
||||
if max_bytes > 0 and 'stdout' in ret and len(ret['stdout']) >= max_bytes:
|
||||
ret['stdout'] = ret['stdout'][:(max_bytes - 1)] + u'\u2026'
|
||||
set_count = 0
|
||||
reset_count = 0
|
||||
for m in ANSI_SGR_PATTERN.finditer(ret['stdout']):
|
||||
if m.string[m.start():m.end()] == u'\u001b[0m':
|
||||
reset_count += 1
|
||||
else:
|
||||
set_count += 1
|
||||
ret['stdout'] += u'\u001b[0m' * (set_count - reset_count)
|
||||
return ret
|
||||
if 'stdout' in data:
|
||||
data['stdout'] = truncate_stdout(data['stdout'], max_bytes)
|
||||
return data
|
||||
|
||||
|
||||
class JobEventWebSocketSerializer(JobEventSerializer):
|
||||
@@ -3901,22 +3959,14 @@ class AdHocCommandEventSerializer(BaseSerializer):
|
||||
return res
|
||||
|
||||
def to_representation(self, obj):
|
||||
ret = super(AdHocCommandEventSerializer, self).to_representation(obj)
|
||||
# Show full stdout for event detail view, truncate only for list view.
|
||||
if hasattr(self.context.get('view', None), 'retrieve'):
|
||||
return ret
|
||||
data = super(AdHocCommandEventSerializer, self).to_representation(obj)
|
||||
# If the view logic says to not trunctate (request was to the detail view or a param was used)
|
||||
if self.context.get('no_truncate', False):
|
||||
return data
|
||||
max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY
|
||||
if max_bytes > 0 and 'stdout' in ret and len(ret['stdout']) >= max_bytes:
|
||||
ret['stdout'] = ret['stdout'][:(max_bytes - 1)] + u'\u2026'
|
||||
set_count = 0
|
||||
reset_count = 0
|
||||
for m in ANSI_SGR_PATTERN.finditer(ret['stdout']):
|
||||
if m.string[m.start():m.end()] == u'\u001b[0m':
|
||||
reset_count += 1
|
||||
else:
|
||||
set_count += 1
|
||||
ret['stdout'] += u'\u001b[0m' * (set_count - reset_count)
|
||||
return ret
|
||||
if 'stdout' in data:
|
||||
data['stdout'] = truncate_stdout(data['stdout'], max_bytes)
|
||||
return data
|
||||
|
||||
|
||||
class AdHocCommandEventWebSocketSerializer(AdHocCommandEventSerializer):
|
||||
@@ -4180,12 +4230,16 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
|
||||
queryset=Inventory.objects.all(),
|
||||
required=False, write_only=True
|
||||
)
|
||||
limit = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||
scm_branch = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||
workflow_job_template_data = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJobTemplate
|
||||
fields = ('ask_inventory_on_launch', 'can_start_without_user_input', 'defaults', 'extra_vars',
|
||||
'inventory', 'survey_enabled', 'variables_needed_to_start',
|
||||
fields = ('ask_inventory_on_launch', 'ask_limit_on_launch', 'ask_scm_branch_on_launch',
|
||||
'can_start_without_user_input', 'defaults', 'extra_vars',
|
||||
'inventory', 'limit', 'scm_branch',
|
||||
'survey_enabled', 'variables_needed_to_start',
|
||||
'node_templates_missing', 'node_prompts_rejected',
|
||||
'workflow_job_template_data', 'survey_enabled', 'ask_variables_on_launch')
|
||||
read_only_fields = ('ask_inventory_on_launch', 'ask_variables_on_launch')
|
||||
@@ -4225,9 +4279,14 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
|
||||
|
||||
WFJT_extra_vars = template.extra_vars
|
||||
WFJT_inventory = template.inventory
|
||||
WFJT_limit = template.limit
|
||||
WFJT_scm_branch = template.scm_branch
|
||||
super(WorkflowJobLaunchSerializer, self).validate(attrs)
|
||||
template.extra_vars = WFJT_extra_vars
|
||||
template.inventory = WFJT_inventory
|
||||
template.limit = WFJT_limit
|
||||
template.scm_branch = WFJT_scm_branch
|
||||
|
||||
return accepted
|
||||
|
||||
|
||||
@@ -4279,13 +4338,30 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
error_list = []
|
||||
collected_messages = []
|
||||
|
||||
def check_messages(messages):
|
||||
for message_type in messages:
|
||||
if message_type not in ('message', 'body'):
|
||||
error_list.append(_("Message type '{}' invalid, must be either 'message' or 'body'").format(message_type))
|
||||
continue
|
||||
message = messages[message_type]
|
||||
if message is None:
|
||||
continue
|
||||
if not isinstance(message, str):
|
||||
error_list.append(_("Expected string for '{}', found {}, ").format(message_type, type(message)))
|
||||
continue
|
||||
if message_type == 'message':
|
||||
if '\n' in message:
|
||||
error_list.append(_("Messages cannot contain newlines (found newline in {} event)".format(event)))
|
||||
continue
|
||||
collected_messages.append(message)
|
||||
|
||||
# Validate structure / content types
|
||||
if not isinstance(messages, dict):
|
||||
error_list.append(_("Expected dict for 'messages' field, found {}".format(type(messages))))
|
||||
else:
|
||||
for event in messages:
|
||||
if event not in ['started', 'success', 'error']:
|
||||
error_list.append(_("Event '{}' invalid, must be one of 'started', 'success', or 'error'").format(event))
|
||||
if event not in ('started', 'success', 'error', 'workflow_approval'):
|
||||
error_list.append(_("Event '{}' invalid, must be one of 'started', 'success', 'error', or 'workflow_approval'").format(event))
|
||||
continue
|
||||
event_messages = messages[event]
|
||||
if event_messages is None:
|
||||
@@ -4293,21 +4369,21 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
if not isinstance(event_messages, dict):
|
||||
error_list.append(_("Expected dict for event '{}', found {}").format(event, type(event_messages)))
|
||||
continue
|
||||
for message_type in event_messages:
|
||||
if message_type not in ['message', 'body']:
|
||||
error_list.append(_("Message type '{}' invalid, must be either 'message' or 'body'").format(message_type))
|
||||
continue
|
||||
message = event_messages[message_type]
|
||||
if message is None:
|
||||
continue
|
||||
if not isinstance(message, str):
|
||||
error_list.append(_("Expected string for '{}', found {}, ").format(message_type, type(message)))
|
||||
continue
|
||||
if message_type == 'message':
|
||||
if '\n' in message:
|
||||
error_list.append(_("Messages cannot contain newlines (found newline in {} event)".format(event)))
|
||||
if event == 'workflow_approval':
|
||||
for subevent in event_messages:
|
||||
if subevent not in ('running', 'approved', 'timed_out', 'denied'):
|
||||
error_list.append(_("Workflow Approval event '{}' invalid, must be one of "
|
||||
"'running', 'approved', 'timed_out', or 'denied'").format(subevent))
|
||||
continue
|
||||
collected_messages.append(message)
|
||||
subevent_messages = event_messages[subevent]
|
||||
if subevent_messages is None:
|
||||
continue
|
||||
if not isinstance(subevent_messages, dict):
|
||||
error_list.append(_("Expected dict for workflow approval event '{}', found {}").format(subevent, type(subevent_messages)))
|
||||
continue
|
||||
check_messages(subevent_messages)
|
||||
else:
|
||||
check_messages(event_messages)
|
||||
|
||||
# Subclass to return name of undefined field
|
||||
class DescriptiveUndefined(StrictUndefined):
|
||||
@@ -4347,6 +4423,8 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
for event in messages:
|
||||
if not messages[event]:
|
||||
continue
|
||||
if not isinstance(messages[event], dict):
|
||||
continue
|
||||
body = messages[event].get('body', {})
|
||||
if body:
|
||||
try:
|
||||
@@ -4436,8 +4514,18 @@ class NotificationSerializer(BaseSerializer):
|
||||
'notification_type', 'recipients', 'subject', 'body')
|
||||
|
||||
def get_body(self, obj):
|
||||
if obj.notification_type == 'webhook' and 'body' in obj.body:
|
||||
return obj.body['body']
|
||||
if obj.notification_type in ('webhook', 'pagerduty'):
|
||||
if isinstance(obj.body, dict):
|
||||
if 'body' in obj.body:
|
||||
return obj.body['body']
|
||||
elif isinstance(obj.body, str):
|
||||
# attempt to load json string
|
||||
try:
|
||||
potential_body = json.loads(obj.body)
|
||||
if isinstance(potential_body, dict):
|
||||
return potential_body
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
return obj.body
|
||||
|
||||
def get_related(self, obj):
|
||||
@@ -4658,6 +4746,11 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
'Isolated groups have a designated controller group.'),
|
||||
read_only=True
|
||||
)
|
||||
is_containerized = serializers.BooleanField(
|
||||
help_text=_('Indicates whether instances in this group are containerized.'
|
||||
'Containerized groups have a designated Openshift or Kubernetes cluster.'),
|
||||
read_only=True
|
||||
)
|
||||
# NOTE: help_text is duplicated from field definitions, no obvious way of
|
||||
# both defining field details here and also getting the field's help_text
|
||||
policy_instance_percentage = serializers.IntegerField(
|
||||
@@ -4683,8 +4776,9 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
fields = ("id", "type", "url", "related", "name", "created", "modified",
|
||||
"capacity", "committed_capacity", "consumed_capacity",
|
||||
"percent_capacity_remaining", "jobs_running", "jobs_total",
|
||||
"instances", "controller", "is_controller", "is_isolated",
|
||||
"policy_instance_percentage", "policy_instance_minimum", "policy_instance_list")
|
||||
"instances", "controller", "is_controller", "is_isolated", "is_containerized", "credential",
|
||||
"policy_instance_percentage", "policy_instance_minimum", "policy_instance_list",
|
||||
"pod_spec_override", "summary_fields")
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(InstanceGroupSerializer, self).get_related(obj)
|
||||
@@ -4692,6 +4786,9 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
res['instances'] = self.reverse('api:instance_group_instance_list', kwargs={'pk': obj.pk})
|
||||
if obj.controller_id:
|
||||
res['controller'] = self.reverse('api:instance_group_detail', kwargs={'pk': obj.controller_id})
|
||||
if obj.credential:
|
||||
res['credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.credential_id})
|
||||
|
||||
return res
|
||||
|
||||
def validate_policy_instance_list(self, value):
|
||||
@@ -4704,6 +4801,18 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError(_('Isolated instances may not be added or removed from instances groups via the API.'))
|
||||
if self.instance and self.instance.controller_id is not None:
|
||||
raise serializers.ValidationError(_('Isolated instance group membership may not be managed via the API.'))
|
||||
if value and self.instance and self.instance.is_containerized:
|
||||
raise serializers.ValidationError(_('Containerized instances may not be managed via the API'))
|
||||
return value
|
||||
|
||||
def validate_policy_instance_percentage(self, value):
|
||||
if value and self.instance and self.instance.is_containerized:
|
||||
raise serializers.ValidationError(_('Containerized instances may not be managed via the API'))
|
||||
return value
|
||||
|
||||
def validate_policy_instance_minimum(self, value):
|
||||
if value and self.instance and self.instance.is_containerized:
|
||||
raise serializers.ValidationError(_('Containerized instances may not be managed via the API'))
|
||||
return value
|
||||
|
||||
def validate_name(self, value):
|
||||
@@ -4711,6 +4820,11 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError(_('tower instance group name may not be changed.'))
|
||||
return value
|
||||
|
||||
def validate_credential(self, value):
|
||||
if value and not value.kubernetes:
|
||||
raise serializers.ValidationError(_('Only Kubernetes credentials can be associated with an Instance Group'))
|
||||
return value
|
||||
|
||||
def get_capacity_dict(self):
|
||||
# Store capacity values (globally computed) in the context
|
||||
if 'capacity_map' not in self.context:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{% ifmeth GET %}
|
||||
# List Roles for a Team:
|
||||
|
||||
{% ifmeth GET %}
|
||||
Make a GET request to this resource to retrieve a list of roles associated with the selected team.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{% ifmeth GET %}
|
||||
# List Roles for a User:
|
||||
|
||||
{% ifmeth GET %}
|
||||
Make a GET request to this resource to retrieve a list of roles associated with the selected user.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
|
||||
12
awx/api/templates/api/webhook_key_view.md
Normal file
12
awx/api/templates/api/webhook_key_view.md
Normal file
@@ -0,0 +1,12 @@
|
||||
Webhook Secret Key:
|
||||
|
||||
Make a GET request to this resource to obtain the secret key for a job
|
||||
template or workflow job template configured to be triggered by
|
||||
webhook events. The response will include the following fields:
|
||||
|
||||
* `webhook_key`: Secret key that needs to be copied and added to the
|
||||
webhook configuration of the service this template will be receiving
|
||||
webhook events from (string, read-only)
|
||||
|
||||
Make an empty POST request to this resource to generate a new
|
||||
replacement `webhook_key`.
|
||||
@@ -1,7 +1,7 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
from django.conf.urls import include, url
|
||||
|
||||
from awx.api.views import (
|
||||
JobTemplateList,
|
||||
@@ -45,6 +45,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', JobTemplateLabelList.as_view(), name='job_template_label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', JobTemplateCopy.as_view(), name='job_template_copy'),
|
||||
url(r'^(?P<pk>[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'job_templates'}),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -18,6 +18,7 @@ from awx.api.views import (
|
||||
OrganizationNotificationTemplatesErrorList,
|
||||
OrganizationNotificationTemplatesStartedList,
|
||||
OrganizationNotificationTemplatesSuccessList,
|
||||
OrganizationNotificationTemplatesApprovalList,
|
||||
OrganizationInstanceGroupsList,
|
||||
OrganizationObjectRolesList,
|
||||
OrganizationAccessList,
|
||||
@@ -43,6 +44,8 @@ urls = [
|
||||
name='organization_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', OrganizationNotificationTemplatesSuccessList.as_view(),
|
||||
name='organization_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_approvals/$', OrganizationNotificationTemplatesApprovalList.as_view(),
|
||||
name='organization_notification_templates_approvals_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', OrganizationAccessList.as_view(), name='organization_access_list'),
|
||||
|
||||
@@ -14,6 +14,7 @@ from awx.api.views import (
|
||||
ApiV2RootView,
|
||||
ApiV2PingView,
|
||||
ApiV2ConfigView,
|
||||
ApiV2SubscriptionView,
|
||||
AuthView,
|
||||
UserMeList,
|
||||
DashboardView,
|
||||
@@ -94,6 +95,7 @@ v2_urls = [
|
||||
url(r'^metrics/$', MetricsView.as_view(), name='metrics_view'),
|
||||
url(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'),
|
||||
url(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'),
|
||||
url(r'^config/subscriptions/$', ApiV2SubscriptionView.as_view(), name='api_v2_subscription_view'),
|
||||
url(r'^auth/$', AuthView.as_view()),
|
||||
url(r'^me/$', UserMeList.as_view(), name='user_me_list'),
|
||||
url(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'),
|
||||
|
||||
14
awx/api/urls/webhooks.py
Normal file
14
awx/api/urls/webhooks.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WebhookKeyView,
|
||||
GithubWebhookReceiver,
|
||||
GitlabWebhookReceiver,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
url(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'),
|
||||
url(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'),
|
||||
url(r'^gitlab/$', GitlabWebhookReceiver.as_view(), name='webhook_receiver_gitlab'),
|
||||
]
|
||||
@@ -1,7 +1,7 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
from django.conf.urls import include, url
|
||||
|
||||
from awx.api.views import (
|
||||
WorkflowJobTemplateList,
|
||||
@@ -16,6 +16,7 @@ from awx.api.views import (
|
||||
WorkflowJobTemplateNotificationTemplatesErrorList,
|
||||
WorkflowJobTemplateNotificationTemplatesStartedList,
|
||||
WorkflowJobTemplateNotificationTemplatesSuccessList,
|
||||
WorkflowJobTemplateNotificationTemplatesApprovalList,
|
||||
WorkflowJobTemplateAccessList,
|
||||
WorkflowJobTemplateObjectRolesList,
|
||||
WorkflowJobTemplateLabelList,
|
||||
@@ -38,9 +39,12 @@ urls = [
|
||||
name='workflow_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='workflow_job_template_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_approvals/$', WorkflowJobTemplateNotificationTemplatesApprovalList.as_view(),
|
||||
name='workflow_job_template_notification_templates_approvals_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', WorkflowJobTemplateAccessList.as_view(), name='workflow_job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', WorkflowJobTemplateObjectRolesList.as_view(), name='workflow_job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobTemplateLabelList.as_view(), name='workflow_job_template_label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'workflow_job_templates'}),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -102,7 +102,7 @@ from awx.main.scheduler.dag_workflow import WorkflowDAG
|
||||
from awx.api.views.mixin import (
|
||||
ControlledByScmMixin, InstanceGroupMembershipMixin,
|
||||
OrganizationCountsMixin, RelatedJobsPreventDeleteMixin,
|
||||
UnifiedJobDeletionMixin,
|
||||
UnifiedJobDeletionMixin, NoTruncateMixin,
|
||||
)
|
||||
from awx.api.views.organization import ( # noqa
|
||||
OrganizationList,
|
||||
@@ -119,6 +119,7 @@ from awx.api.views.organization import ( # noqa
|
||||
OrganizationNotificationTemplatesErrorList,
|
||||
OrganizationNotificationTemplatesStartedList,
|
||||
OrganizationNotificationTemplatesSuccessList,
|
||||
OrganizationNotificationTemplatesApprovalList,
|
||||
OrganizationInstanceGroupsList,
|
||||
OrganizationAccessList,
|
||||
OrganizationObjectRolesList,
|
||||
@@ -147,6 +148,12 @@ from awx.api.views.root import ( # noqa
|
||||
ApiV2RootView,
|
||||
ApiV2PingView,
|
||||
ApiV2ConfigView,
|
||||
ApiV2SubscriptionView,
|
||||
)
|
||||
from awx.api.views.webhooks import ( # noqa
|
||||
WebhookKeyView,
|
||||
GithubWebhookReceiver,
|
||||
GitlabWebhookReceiver,
|
||||
)
|
||||
|
||||
|
||||
@@ -245,13 +252,6 @@ class DashboardView(APIView):
|
||||
'total': hg_projects.count(),
|
||||
'failed': hg_failed_projects.count()}
|
||||
|
||||
user_jobs = get_user_queryset(request.user, models.Job)
|
||||
user_failed_jobs = user_jobs.filter(failed=True)
|
||||
data['jobs'] = {'url': reverse('api:job_list', request=request),
|
||||
'failure_url': reverse('api:job_list', request=request) + "?failed=True",
|
||||
'total': user_jobs.count(),
|
||||
'failed': user_failed_jobs.count()}
|
||||
|
||||
user_list = get_user_queryset(request.user, models.User)
|
||||
team_list = get_user_queryset(request.user, models.Team)
|
||||
credential_list = get_user_queryset(request.user, models.Credential)
|
||||
@@ -383,6 +383,13 @@ class InstanceGroupDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAP
|
||||
serializer_class = serializers.InstanceGroupSerializer
|
||||
permission_classes = (InstanceGroupTowerPermission,)
|
||||
|
||||
def update_raw_data(self, data):
|
||||
if self.get_object().is_containerized:
|
||||
data.pop('policy_instance_percentage', None)
|
||||
data.pop('policy_instance_minimum', None)
|
||||
data.pop('policy_instance_list', None)
|
||||
return super(InstanceGroupDetail, self).update_raw_data(data)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
if instance.controller is not None:
|
||||
@@ -568,6 +575,7 @@ class TeamUsersList(BaseUsersList):
|
||||
serializer_class = serializers.UserSerializer
|
||||
parent_model = models.Team
|
||||
relationship = 'member_role.members'
|
||||
ordering = ('username',)
|
||||
|
||||
|
||||
class TeamRolesList(SubListAttachDetachAPIView):
|
||||
@@ -904,6 +912,7 @@ class UserList(ListCreateAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
permission_classes = (UserPermission,)
|
||||
ordering = ('username',)
|
||||
|
||||
|
||||
class UserMeList(ListAPIView):
|
||||
@@ -911,6 +920,7 @@ class UserMeList(ListAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
name = _('Me')
|
||||
ordering = ('username',)
|
||||
|
||||
def get_queryset(self):
|
||||
return self.model.objects.filter(pk=self.request.user.pk)
|
||||
@@ -1254,6 +1264,7 @@ class CredentialOwnerUsersList(SubListAPIView):
|
||||
serializer_class = serializers.UserSerializer
|
||||
parent_model = models.Credential
|
||||
relationship = 'admin_role.members'
|
||||
ordering = ('username',)
|
||||
|
||||
|
||||
class CredentialOwnerTeamsList(SubListAPIView):
|
||||
@@ -2136,12 +2147,21 @@ class InventorySourceHostsList(HostRelatedSearchMixin, SubListDestroyAPIView):
|
||||
def perform_list_destroy(self, instance_list):
|
||||
inv_source = self.get_parent_object()
|
||||
with ignore_inventory_computed_fields():
|
||||
# Activity stream doesn't record disassociation here anyway
|
||||
# no signals-related reason to not bulk-delete
|
||||
models.Host.groups.through.objects.filter(
|
||||
host__inventory_sources=inv_source
|
||||
).delete()
|
||||
r = super(InventorySourceHostsList, self).perform_list_destroy(instance_list)
|
||||
if not settings.ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC:
|
||||
from awx.main.signals import disable_activity_stream
|
||||
with disable_activity_stream():
|
||||
# job host summary deletion necessary to avoid deadlock
|
||||
models.JobHostSummary.objects.filter(host__inventory_sources=inv_source).update(host=None)
|
||||
models.Host.objects.filter(inventory_sources=inv_source).delete()
|
||||
r = super(InventorySourceHostsList, self).perform_list_destroy([])
|
||||
else:
|
||||
# Advance delete of group-host memberships to prevent deadlock
|
||||
# Activity stream doesn't record disassociation here anyway
|
||||
# no signals-related reason to not bulk-delete
|
||||
models.Host.groups.through.objects.filter(
|
||||
host__inventory_sources=inv_source
|
||||
).delete()
|
||||
r = super(InventorySourceHostsList, self).perform_list_destroy(instance_list)
|
||||
update_inventory_computed_fields.delay(inv_source.inventory_id, True)
|
||||
return r
|
||||
|
||||
@@ -2157,11 +2177,18 @@ class InventorySourceGroupsList(SubListDestroyAPIView):
|
||||
def perform_list_destroy(self, instance_list):
|
||||
inv_source = self.get_parent_object()
|
||||
with ignore_inventory_computed_fields():
|
||||
# Same arguments for bulk delete as with host list
|
||||
models.Group.hosts.through.objects.filter(
|
||||
group__inventory_sources=inv_source
|
||||
).delete()
|
||||
r = super(InventorySourceGroupsList, self).perform_list_destroy(instance_list)
|
||||
if not settings.ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC:
|
||||
from awx.main.signals import disable_activity_stream
|
||||
with disable_activity_stream():
|
||||
models.Group.objects.filter(inventory_sources=inv_source).delete()
|
||||
r = super(InventorySourceGroupsList, self).perform_list_destroy([])
|
||||
else:
|
||||
# Advance delete of group-host memberships to prevent deadlock
|
||||
# Same arguments for bulk delete as with host list
|
||||
models.Group.hosts.through.objects.filter(
|
||||
group__inventory_sources=inv_source
|
||||
).delete()
|
||||
r = super(InventorySourceGroupsList, self).perform_list_destroy(instance_list)
|
||||
update_inventory_computed_fields.delay(inv_source.inventory_id, True)
|
||||
return r
|
||||
|
||||
@@ -2568,10 +2595,34 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
return Response(dict(error=_(
|
||||
"The {min_or_max} limit in survey question {idx} expected to be integer."
|
||||
).format(min_or_max=key, **context)))
|
||||
if qtype in ['multiplechoice', 'multiselect'] and 'choices' not in survey_item:
|
||||
return Response(dict(error=_(
|
||||
"Survey question {idx} of type {survey_item[type]} must specify choices.".format(**context)
|
||||
)))
|
||||
# if it's a multiselect or multiple choice, it must have coices listed
|
||||
# choices and defualts must come in as strings seperated by /n characters.
|
||||
if qtype == 'multiselect' or qtype == 'multiplechoice':
|
||||
if 'choices' in survey_item:
|
||||
if isinstance(survey_item['choices'], str):
|
||||
survey_item['choices'] = '\n'.join(choice for choice in survey_item['choices'].splitlines() if choice.strip() != '')
|
||||
else:
|
||||
return Response(dict(error=_(
|
||||
"Survey question {idx} of type {survey_item[type]} must specify choices.".format(**context)
|
||||
)))
|
||||
# If there is a default string split it out removing extra /n characters.
|
||||
# Note: There can still be extra newline characters added in the API, these are sanitized out using .strip()
|
||||
if 'default' in survey_item:
|
||||
if isinstance(survey_item['default'], str):
|
||||
survey_item['default'] = '\n'.join(choice for choice in survey_item['default'].splitlines() if choice.strip() != '')
|
||||
list_of_defaults = survey_item['default'].splitlines()
|
||||
else:
|
||||
list_of_defaults = survey_item['default']
|
||||
if qtype == 'multiplechoice':
|
||||
# Multiplechoice types should only have 1 default.
|
||||
if len(list_of_defaults) > 1:
|
||||
return Response(dict(error=_(
|
||||
"Multiple Choice (Single Select) can only have one default value.".format(**context)
|
||||
)))
|
||||
if any(item not in survey_item['choices'] for item in list_of_defaults):
|
||||
return Response(dict(error=_(
|
||||
"Default choice must be answered from the choices listed.".format(**context)
|
||||
)))
|
||||
|
||||
# Process encryption substitution
|
||||
if ("default" in survey_item and isinstance(survey_item['default'], str) and
|
||||
@@ -3117,6 +3168,17 @@ class WorkflowJobTemplateCopy(CopyAPIView):
|
||||
data.update(messages)
|
||||
return Response(data)
|
||||
|
||||
def _build_create_dict(self, obj):
|
||||
"""Special processing of fields managed by char_prompts
|
||||
"""
|
||||
r = super(WorkflowJobTemplateCopy, self)._build_create_dict(obj)
|
||||
field_names = set(f.name for f in obj._meta.get_fields())
|
||||
for field_name, ask_field_name in obj.get_ask_mapping().items():
|
||||
if field_name in r and field_name not in field_names:
|
||||
r.setdefault('char_prompts', {})
|
||||
r['char_prompts'][field_name] = r.pop(field_name)
|
||||
return r
|
||||
|
||||
@staticmethod
|
||||
def deep_copy_permission_check_func(user, new_objs):
|
||||
for obj in new_objs:
|
||||
@@ -3145,7 +3207,6 @@ class WorkflowJobTemplateLabelList(JobTemplateLabelList):
|
||||
|
||||
class WorkflowJobTemplateLaunch(RetrieveAPIView):
|
||||
|
||||
|
||||
model = models.WorkflowJobTemplate
|
||||
obj_permission_type = 'start'
|
||||
serializer_class = serializers.WorkflowJobLaunchSerializer
|
||||
@@ -3162,10 +3223,15 @@ class WorkflowJobTemplateLaunch(RetrieveAPIView):
|
||||
extra_vars.setdefault(v, u'')
|
||||
if extra_vars:
|
||||
data['extra_vars'] = extra_vars
|
||||
if obj.ask_inventory_on_launch:
|
||||
data['inventory'] = obj.inventory_id
|
||||
else:
|
||||
data.pop('inventory', None)
|
||||
modified_ask_mapping = models.WorkflowJobTemplate.get_ask_mapping()
|
||||
modified_ask_mapping.pop('extra_vars')
|
||||
for field_name, ask_field_name in obj.get_ask_mapping().items():
|
||||
if not getattr(obj, ask_field_name):
|
||||
data.pop(field_name, None)
|
||||
elif field_name == 'inventory':
|
||||
data[field_name] = getattrd(obj, "%s.%s" % (field_name, 'id'), None)
|
||||
else:
|
||||
data[field_name] = getattr(obj, field_name)
|
||||
return data
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
@@ -3279,6 +3345,11 @@ class WorkflowJobTemplateNotificationTemplatesSuccessList(WorkflowJobTemplateNot
|
||||
relationship = 'notification_templates_success'
|
||||
|
||||
|
||||
class WorkflowJobTemplateNotificationTemplatesApprovalList(WorkflowJobTemplateNotificationTemplatesAnyList):
|
||||
|
||||
relationship = 'notification_templates_approvals'
|
||||
|
||||
|
||||
class WorkflowJobTemplateAccessList(ResourceAccessList):
|
||||
|
||||
model = models.User # needs to be User for AccessLists's
|
||||
@@ -3364,6 +3435,11 @@ class WorkflowJobNotificationsList(SubListAPIView):
|
||||
relationship = 'notifications'
|
||||
search_fields = ('subject', 'notification_type', 'body',)
|
||||
|
||||
def get_sublist_queryset(self, parent):
|
||||
return self.model.objects.filter(Q(unifiedjob_notifications=parent) |
|
||||
Q(unifiedjob_notifications__unified_job_node__workflow_job=parent,
|
||||
unifiedjob_notifications__workflowapproval__isnull=False)).distinct()
|
||||
|
||||
|
||||
class WorkflowJobActivityStreamList(SubListAPIView):
|
||||
|
||||
@@ -3713,7 +3789,7 @@ class JobHostSummaryDetail(RetrieveAPIView):
|
||||
serializer_class = serializers.JobHostSummarySerializer
|
||||
|
||||
|
||||
class JobEventList(ListAPIView):
|
||||
class JobEventList(NoTruncateMixin, ListAPIView):
|
||||
|
||||
model = models.JobEvent
|
||||
serializer_class = serializers.JobEventSerializer
|
||||
@@ -3725,8 +3801,13 @@ class JobEventDetail(RetrieveAPIView):
|
||||
model = models.JobEvent
|
||||
serializer_class = serializers.JobEventSerializer
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
context.update(no_truncate=True)
|
||||
return context
|
||||
|
||||
class JobEventChildrenList(SubListAPIView):
|
||||
|
||||
class JobEventChildrenList(NoTruncateMixin, SubListAPIView):
|
||||
|
||||
model = models.JobEvent
|
||||
serializer_class = serializers.JobEventSerializer
|
||||
@@ -3751,7 +3832,7 @@ class JobEventHostsList(HostRelatedSearchMixin, SubListAPIView):
|
||||
name = _('Job Event Hosts List')
|
||||
|
||||
|
||||
class BaseJobEventsList(SubListAPIView):
|
||||
class BaseJobEventsList(NoTruncateMixin, SubListAPIView):
|
||||
|
||||
model = models.JobEvent
|
||||
serializer_class = serializers.JobEventSerializer
|
||||
@@ -3947,7 +4028,7 @@ class AdHocCommandRelaunch(GenericAPIView):
|
||||
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
|
||||
class AdHocCommandEventList(ListAPIView):
|
||||
class AdHocCommandEventList(NoTruncateMixin, ListAPIView):
|
||||
|
||||
model = models.AdHocCommandEvent
|
||||
serializer_class = serializers.AdHocCommandEventSerializer
|
||||
@@ -3959,8 +4040,13 @@ class AdHocCommandEventDetail(RetrieveAPIView):
|
||||
model = models.AdHocCommandEvent
|
||||
serializer_class = serializers.AdHocCommandEventSerializer
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
context.update(no_truncate=True)
|
||||
return context
|
||||
|
||||
class BaseAdHocCommandEventsList(SubListAPIView):
|
||||
|
||||
class BaseAdHocCommandEventsList(NoTruncateMixin, SubListAPIView):
|
||||
|
||||
model = models.AdHocCommandEvent
|
||||
serializer_class = serializers.AdHocCommandEventSerializer
|
||||
@@ -4226,8 +4312,15 @@ class NotificationTemplateTest(GenericAPIView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
notification = obj.generate_notification("Tower Notification Test {} {}".format(obj.id, settings.TOWER_URL_BASE),
|
||||
{"body": "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)})
|
||||
msg = "Tower Notification Test {} {}".format(obj.id, settings.TOWER_URL_BASE)
|
||||
if obj.notification_type in ('email', 'pagerduty'):
|
||||
body = "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)
|
||||
elif obj.notification_type == 'webhook':
|
||||
body = '{{"body": "Ansible Tower Test Notification {} {}"}}'.format(obj.id, settings.TOWER_URL_BASE)
|
||||
else:
|
||||
body = {"body": "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)}
|
||||
notification = obj.generate_notification(msg, body)
|
||||
|
||||
if not notification:
|
||||
return Response({}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
|
||||
@@ -70,12 +70,16 @@ class InventoryUpdateEventsList(SubListAPIView):
|
||||
|
||||
class InventoryScriptList(ListCreateAPIView):
|
||||
|
||||
deprecated = True
|
||||
|
||||
model = CustomInventoryScript
|
||||
serializer_class = CustomInventoryScriptSerializer
|
||||
|
||||
|
||||
class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
deprecated = True
|
||||
|
||||
model = CustomInventoryScript
|
||||
serializer_class = CustomInventoryScriptSerializer
|
||||
|
||||
@@ -92,6 +96,8 @@ class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
class InventoryScriptObjectRolesList(SubListAPIView):
|
||||
|
||||
deprecated = True
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = CustomInventoryScript
|
||||
@@ -105,6 +111,8 @@ class InventoryScriptObjectRolesList(SubListAPIView):
|
||||
|
||||
class InventoryScriptCopy(CopyAPIView):
|
||||
|
||||
deprecated = True
|
||||
|
||||
model = CustomInventoryScript
|
||||
copy_return_serializer_class = CustomInventoryScriptSerializer
|
||||
|
||||
|
||||
@@ -270,3 +270,11 @@ class ControlledByScmMixin(object):
|
||||
obj = super(ControlledByScmMixin, self).get_parent_object()
|
||||
self._reset_inv_src_rev(obj)
|
||||
return obj
|
||||
|
||||
|
||||
class NoTruncateMixin(object):
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
if self.request.query_params.get('no_truncate'):
|
||||
context.update(no_truncate=True)
|
||||
return context
|
||||
|
||||
@@ -195,6 +195,11 @@ class OrganizationNotificationTemplatesSuccessList(OrganizationNotificationTempl
|
||||
relationship = 'notification_templates_success'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesApprovalList(OrganizationNotificationTemplatesAnyList):
|
||||
|
||||
relationship = 'notification_templates_approvals'
|
||||
|
||||
|
||||
class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
|
||||
model = InstanceGroup
|
||||
|
||||
@@ -17,6 +17,8 @@ from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
import requests
|
||||
|
||||
from awx.api.generics import APIView
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.main.utils import (
|
||||
@@ -169,6 +171,45 @@ class ApiV2PingView(APIView):
|
||||
return Response(response)
|
||||
|
||||
|
||||
class ApiV2SubscriptionView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
name = _('Configuration')
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
def check_permissions(self, request):
|
||||
super(ApiV2SubscriptionView, self).check_permissions(request)
|
||||
if not request.user.is_superuser and request.method.lower() not in {'options', 'head'}:
|
||||
self.permission_denied(request) # Raises PermissionDenied exception.
|
||||
|
||||
def post(self, request):
|
||||
from awx.main.utils.common import get_licenser
|
||||
data = request.data.copy()
|
||||
if data.get('rh_password') == '$encrypted$':
|
||||
data['rh_password'] = settings.REDHAT_PASSWORD
|
||||
try:
|
||||
user, pw = data.get('rh_username'), data.get('rh_password')
|
||||
validated = get_licenser().validate_rh(user, pw)
|
||||
if user:
|
||||
settings.REDHAT_USERNAME = data['rh_username']
|
||||
if pw:
|
||||
settings.REDHAT_PASSWORD = data['rh_password']
|
||||
except Exception as exc:
|
||||
msg = _("Invalid License")
|
||||
if (
|
||||
isinstance(exc, requests.exceptions.HTTPError) and
|
||||
getattr(getattr(exc, 'response', None), 'status_code', None) == 401
|
||||
):
|
||||
msg = _("The provided credentials are invalid (HTTP 401).")
|
||||
if isinstance(exc, (ValueError, OSError)) and exc.args:
|
||||
msg = exc.args[0]
|
||||
logger.exception(smart_text(u"Invalid license submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
return Response(validated)
|
||||
|
||||
|
||||
class ApiV2ConfigView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
246
awx/api/views/webhooks.py
Normal file
246
awx/api/views/webhooks.py
Normal file
@@ -0,0 +1,246 @@
|
||||
from hashlib import sha1
|
||||
import hmac
|
||||
import logging
|
||||
import urllib.parse
|
||||
|
||||
from django.utils.encoding import force_bytes
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
|
||||
from rest_framework import status
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.response import Response
|
||||
|
||||
from awx.api import serializers
|
||||
from awx.api.generics import APIView, GenericAPIView
|
||||
from awx.api.permissions import WebhookKeyPermission
|
||||
from awx.main.models import Job, JobTemplate, WorkflowJob, WorkflowJobTemplate
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.api.views.webhooks')
|
||||
|
||||
|
||||
class WebhookKeyView(GenericAPIView):
|
||||
serializer_class = serializers.EmptySerializer
|
||||
permission_classes = (WebhookKeyPermission,)
|
||||
|
||||
def get_queryset(self):
|
||||
qs_models = {
|
||||
'job_templates': JobTemplate,
|
||||
'workflow_job_templates': WorkflowJobTemplate,
|
||||
}
|
||||
self.model = qs_models.get(self.kwargs['model_kwarg'])
|
||||
|
||||
return super().get_queryset()
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
|
||||
return Response({'webhook_key': obj.webhook_key})
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
obj.rotate_webhook_key()
|
||||
obj.save(update_fields=['webhook_key'])
|
||||
|
||||
return Response({'webhook_key': obj.webhook_key}, status=status.HTTP_201_CREATED)
|
||||
|
||||
|
||||
class WebhookReceiverBase(APIView):
|
||||
lookup_url_kwarg = None
|
||||
lookup_field = 'pk'
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
authentication_classes = ()
|
||||
|
||||
ref_keys = {}
|
||||
|
||||
def get_queryset(self):
|
||||
qs_models = {
|
||||
'job_templates': JobTemplate,
|
||||
'workflow_job_templates': WorkflowJobTemplate,
|
||||
}
|
||||
model = qs_models.get(self.kwargs['model_kwarg'])
|
||||
if model is None:
|
||||
raise PermissionDenied
|
||||
|
||||
return model.objects.filter(webhook_service=self.service).exclude(webhook_key='')
|
||||
|
||||
def get_object(self):
|
||||
queryset = self.get_queryset()
|
||||
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
|
||||
filter_kwargs = {self.lookup_field: self.kwargs[lookup_url_kwarg]}
|
||||
|
||||
obj = queryset.filter(**filter_kwargs).first()
|
||||
if obj is None:
|
||||
raise PermissionDenied
|
||||
|
||||
return obj
|
||||
|
||||
def get_event_type(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_event_guid(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_event_status_api(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_event_ref(self):
|
||||
key = self.ref_keys.get(self.get_event_type(), '')
|
||||
value = self.request.data
|
||||
for element in key.split('.'):
|
||||
try:
|
||||
if element.isdigit():
|
||||
value = value[int(element)]
|
||||
else:
|
||||
value = (value or {}).get(element)
|
||||
except Exception:
|
||||
value = None
|
||||
if value == '0000000000000000000000000000000000000000': # a deleted ref
|
||||
value = None
|
||||
return value
|
||||
|
||||
def get_signature(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def check_signature(self, obj):
|
||||
if not obj.webhook_key:
|
||||
raise PermissionDenied
|
||||
|
||||
mac = hmac.new(force_bytes(obj.webhook_key), msg=force_bytes(self.request.body), digestmod=sha1)
|
||||
logger.debug("header signature: %s", self.get_signature())
|
||||
logger.debug("calculated signature: %s", force_bytes(mac.hexdigest()))
|
||||
if not hmac.compare_digest(force_bytes(mac.hexdigest()), self.get_signature()):
|
||||
raise PermissionDenied
|
||||
|
||||
@csrf_exempt
|
||||
def post(self, request, *args, **kwargs):
|
||||
# Ensure that the full contents of the request are captured for multiple uses.
|
||||
request.body
|
||||
|
||||
logger.debug(
|
||||
"headers: {}\n"
|
||||
"data: {}\n".format(request.headers, request.data)
|
||||
)
|
||||
obj = self.get_object()
|
||||
self.check_signature(obj)
|
||||
|
||||
event_type = self.get_event_type()
|
||||
event_guid = self.get_event_guid()
|
||||
event_ref = self.get_event_ref()
|
||||
status_api = self.get_event_status_api()
|
||||
|
||||
kwargs = {
|
||||
'unified_job_template_id': obj.id,
|
||||
'webhook_service': obj.webhook_service,
|
||||
'webhook_guid': event_guid,
|
||||
}
|
||||
if WorkflowJob.objects.filter(**kwargs).exists() or Job.objects.filter(**kwargs).exists():
|
||||
# Short circuit if this webhook has already been received and acted upon.
|
||||
logger.debug("Webhook previously received, returning without action.")
|
||||
return Response({'message': _("Webhook previously received, aborting.")},
|
||||
status=status.HTTP_202_ACCEPTED)
|
||||
|
||||
kwargs = {
|
||||
'_eager_fields': {
|
||||
'launch_type': 'webhook',
|
||||
'webhook_service': obj.webhook_service,
|
||||
'webhook_credential': obj.webhook_credential,
|
||||
'webhook_guid': event_guid,
|
||||
},
|
||||
'extra_vars': {
|
||||
'tower_webhook_event_type': event_type,
|
||||
'tower_webhook_event_guid': event_guid,
|
||||
'tower_webhook_event_ref': event_ref,
|
||||
'tower_webhook_status_api': status_api,
|
||||
'tower_webhook_payload': request.data,
|
||||
}
|
||||
}
|
||||
|
||||
new_job = obj.create_unified_job(**kwargs)
|
||||
new_job.signal_start()
|
||||
|
||||
return Response({'message': "Job queued."}, status=status.HTTP_202_ACCEPTED)
|
||||
|
||||
|
||||
class GithubWebhookReceiver(WebhookReceiverBase):
|
||||
service = 'github'
|
||||
|
||||
ref_keys = {
|
||||
'pull_request': 'pull_request.head.sha',
|
||||
'pull_request_review': 'pull_request.head.sha',
|
||||
'pull_request_review_comment': 'pull_request.head.sha',
|
||||
'push': 'after',
|
||||
'release': 'release.tag_name',
|
||||
'commit_comment': 'comment.commit_id',
|
||||
'create': 'ref',
|
||||
'page_build': 'build.commit',
|
||||
}
|
||||
|
||||
def get_event_type(self):
|
||||
return self.request.META.get('HTTP_X_GITHUB_EVENT')
|
||||
|
||||
def get_event_guid(self):
|
||||
return self.request.META.get('HTTP_X_GITHUB_DELIVERY')
|
||||
|
||||
def get_event_status_api(self):
|
||||
if self.get_event_type() != 'pull_request':
|
||||
return
|
||||
return self.request.data.get('pull_request', {}).get('statuses_url')
|
||||
|
||||
def get_signature(self):
|
||||
header_sig = self.request.META.get('HTTP_X_HUB_SIGNATURE')
|
||||
if not header_sig:
|
||||
logger.debug("Expected signature missing from header key HTTP_X_HUB_SIGNATURE")
|
||||
raise PermissionDenied
|
||||
hash_alg, signature = header_sig.split('=')
|
||||
if hash_alg != 'sha1':
|
||||
logger.debug("Unsupported signature type, expected: sha1, received: {}".format(hash_alg))
|
||||
raise PermissionDenied
|
||||
return force_bytes(signature)
|
||||
|
||||
|
||||
class GitlabWebhookReceiver(WebhookReceiverBase):
|
||||
service = 'gitlab'
|
||||
|
||||
ref_keys = {
|
||||
'Push Hook': 'checkout_sha',
|
||||
'Tag Push Hook': 'checkout_sha',
|
||||
'Merge Request Hook': 'object_attributes.last_commit.id',
|
||||
}
|
||||
|
||||
def get_event_type(self):
|
||||
return self.request.META.get('HTTP_X_GITLAB_EVENT')
|
||||
|
||||
def get_event_guid(self):
|
||||
# GitLab does not provide a unique identifier on events, so construct one.
|
||||
h = sha1()
|
||||
h.update(force_bytes(self.request.body))
|
||||
return h.hexdigest()
|
||||
|
||||
def get_event_status_api(self):
|
||||
if self.get_event_type() != 'Merge Request Hook':
|
||||
return
|
||||
project = self.request.data.get('project', {})
|
||||
repo_url = project.get('web_url')
|
||||
if not repo_url:
|
||||
return
|
||||
parsed = urllib.parse.urlparse(repo_url)
|
||||
|
||||
return "{}://{}/api/v4/projects/{}/statuses/{}".format(
|
||||
parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
|
||||
|
||||
def get_signature(self):
|
||||
return force_bytes(self.request.META.get('HTTP_X_GITLAB_TOKEN') or '')
|
||||
|
||||
def check_signature(self, obj):
|
||||
if not obj.webhook_key:
|
||||
raise PermissionDenied
|
||||
|
||||
# GitLab only returns the secret token, not an hmac hash. Use
|
||||
# the hmac `compare_digest` helper function to prevent timing
|
||||
# analysis by attackers.
|
||||
if not hmac.compare_digest(force_bytes(obj.webhook_key), self.get_signature()):
|
||||
raise PermissionDenied
|
||||
@@ -1,17 +1,18 @@
|
||||
# Python
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
import urllib.parse as urlparse
|
||||
from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
from django.core.validators import URLValidator
|
||||
from django.core.validators import URLValidator, _lazy_re_compile
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.fields import ( # noqa
|
||||
BooleanField, CharField, ChoiceField, DictField, EmailField, IntegerField,
|
||||
ListField, NullBooleanField
|
||||
BooleanField, CharField, ChoiceField, DictField, EmailField,
|
||||
IntegerField, ListField, NullBooleanField
|
||||
)
|
||||
|
||||
logger = logging.getLogger('awx.conf.fields')
|
||||
@@ -118,14 +119,42 @@ class StringListPathField(StringListField):
|
||||
|
||||
|
||||
class URLField(CharField):
|
||||
# these lines set up a custom regex that allow numbers in the
|
||||
# top-level domain
|
||||
tld_re = (
|
||||
r'\.' # dot
|
||||
r'(?!-)' # can't start with a dash
|
||||
r'(?:[a-z' + URLValidator.ul + r'0-9' + '-]{2,63}' # domain label, this line was changed from the original URLValidator
|
||||
r'|xn--[a-z0-9]{1,59})' # or punycode label
|
||||
r'(?<!-)' # can't end with a dash
|
||||
r'\.?' # may have a trailing dot
|
||||
)
|
||||
|
||||
host_re = '(' + URLValidator.hostname_re + URLValidator.domain_re + tld_re + '|localhost)'
|
||||
|
||||
regex = _lazy_re_compile(
|
||||
r'^(?:[a-z0-9\.\-\+]*)://' # scheme is validated separately
|
||||
r'(?:[^\s:@/]+(?::[^\s:@/]*)?@)?' # user:pass authentication
|
||||
r'(?:' + URLValidator.ipv4_re + '|' + URLValidator.ipv6_re + '|' + host_re + ')'
|
||||
r'(?::\d{2,5})?' # port
|
||||
r'(?:[/?#][^\s]*)?' # resource path
|
||||
r'\Z', re.IGNORECASE)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
schemes = kwargs.pop('schemes', None)
|
||||
regex = kwargs.pop('regex', None)
|
||||
self.allow_plain_hostname = kwargs.pop('allow_plain_hostname', False)
|
||||
self.allow_numbers_in_top_level_domain = kwargs.pop('allow_numbers_in_top_level_domain', True)
|
||||
super(URLField, self).__init__(**kwargs)
|
||||
validator_kwargs = dict(message=_('Enter a valid URL'))
|
||||
if schemes is not None:
|
||||
validator_kwargs['schemes'] = schemes
|
||||
if regex is not None:
|
||||
validator_kwargs['regex'] = regex
|
||||
if self.allow_numbers_in_top_level_domain and regex is None:
|
||||
# default behavior is to allow numbers in the top level domain
|
||||
# if a custom regex isn't provided
|
||||
validator_kwargs['regex'] = URLField.regex
|
||||
self.validators.append(URLValidator(**validator_kwargs))
|
||||
|
||||
def to_representation(self, value):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import pytest
|
||||
|
||||
from rest_framework.fields import ValidationError
|
||||
from awx.conf.fields import StringListBooleanField, StringListPathField, ListTuplesField
|
||||
from awx.conf.fields import StringListBooleanField, StringListPathField, ListTuplesField, URLField
|
||||
|
||||
|
||||
class TestStringListBooleanField():
|
||||
@@ -62,7 +62,7 @@ class TestListTuplesField():
|
||||
FIELD_VALUES = [
|
||||
([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")]),
|
||||
]
|
||||
|
||||
|
||||
FIELD_VALUES_INVALID = [
|
||||
("abc", type("abc")),
|
||||
([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))),
|
||||
@@ -130,3 +130,25 @@ class TestStringListPathField():
|
||||
field.to_internal_value([value])
|
||||
assert e.value.detail[0] == "{} is not a valid path choice.".format(value)
|
||||
|
||||
|
||||
class TestURLField():
|
||||
regex = "^https://www.example.org$"
|
||||
|
||||
@pytest.mark.parametrize("url,schemes,regex, allow_numbers_in_top_level_domain, expect_no_error",[
|
||||
("ldap://www.example.org42", "ldap", None, True, True),
|
||||
("https://www.example.org42", "https", None, False, False),
|
||||
("https://www.example.org", None, regex, None, True),
|
||||
("https://www.example3.org", None, regex, None, False),
|
||||
("ftp://www.example.org", "https", None, None, False)
|
||||
])
|
||||
def test_urls(self, url, schemes, regex, allow_numbers_in_top_level_domain, expect_no_error):
|
||||
kwargs = {}
|
||||
kwargs.setdefault("allow_numbers_in_top_level_domain", allow_numbers_in_top_level_domain)
|
||||
kwargs.setdefault("schemes", schemes)
|
||||
kwargs.setdefault("regex", regex)
|
||||
field = URLField(**kwargs)
|
||||
if expect_no_error:
|
||||
field.run_validators(url)
|
||||
else:
|
||||
with pytest.raises(ValidationError):
|
||||
field.run_validators(url)
|
||||
|
||||
@@ -317,10 +317,19 @@ class BaseAccess(object):
|
||||
validation_info['time_remaining'] = 99999999
|
||||
validation_info['grace_period_remaining'] = 99999999
|
||||
|
||||
report_violation = lambda message: logger.error(message)
|
||||
|
||||
if (
|
||||
validation_info.get('trial', False) is True or
|
||||
validation_info['instance_count'] == 10 # basic 10 license
|
||||
):
|
||||
def report_violation(message):
|
||||
raise PermissionDenied(message)
|
||||
|
||||
if check_expiration and validation_info.get('time_remaining', None) is None:
|
||||
raise PermissionDenied(_("License is missing."))
|
||||
if check_expiration and validation_info.get("grace_period_remaining") <= 0:
|
||||
raise PermissionDenied(_("License has expired."))
|
||||
elif check_expiration and validation_info.get("grace_period_remaining") <= 0:
|
||||
report_violation(_("License has expired."))
|
||||
|
||||
free_instances = validation_info.get('free_instances', 0)
|
||||
available_instances = validation_info.get('available_instances', 0)
|
||||
@@ -328,11 +337,11 @@ class BaseAccess(object):
|
||||
if add_host_name:
|
||||
host_exists = Host.objects.filter(name=add_host_name).exists()
|
||||
if not host_exists and free_instances == 0:
|
||||
raise PermissionDenied(_("License count of %s instances has been reached.") % available_instances)
|
||||
report_violation(_("License count of %s instances has been reached.") % available_instances)
|
||||
elif not host_exists and free_instances < 0:
|
||||
raise PermissionDenied(_("License count of %s instances has been exceeded.") % available_instances)
|
||||
report_violation(_("License count of %s instances has been exceeded.") % available_instances)
|
||||
elif not add_host_name and free_instances < 0:
|
||||
raise PermissionDenied(_("Host count exceeds available instances."))
|
||||
report_violation(_("Host count exceeds available instances."))
|
||||
|
||||
def check_org_host_limit(self, data, add_host_name=None):
|
||||
validation_info = get_licenser().validate()
|
||||
@@ -456,7 +465,7 @@ class BaseAccess(object):
|
||||
else:
|
||||
relationship = 'members'
|
||||
return access_method(obj, parent_obj, relationship, skip_sub_obj_read_check=True, data={})
|
||||
except (ParseError, ObjectDoesNotExist):
|
||||
except (ParseError, ObjectDoesNotExist, PermissionDenied):
|
||||
return False
|
||||
return False
|
||||
|
||||
@@ -652,7 +661,7 @@ class UserAccess(BaseAccess):
|
||||
if obj.is_superuser and super_users.count() == 1:
|
||||
# cannot delete the last active superuser
|
||||
return False
|
||||
if self.user.is_superuser:
|
||||
if self.can_admin(obj, None, allow_orphans=True):
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -1651,26 +1660,19 @@ class JobAccess(BaseAccess):
|
||||
except JobLaunchConfig.DoesNotExist:
|
||||
config = None
|
||||
|
||||
if obj.job_template and (self.user not in obj.job_template.execute_role):
|
||||
return False
|
||||
|
||||
# Check if JT execute access (and related prompts) is sufficient
|
||||
if obj.job_template is not None:
|
||||
if config is None:
|
||||
prompts_access = False
|
||||
elif not config.has_user_prompts(obj.job_template):
|
||||
prompts_access = True
|
||||
elif obj.created_by_id != self.user.pk and vars_are_encrypted(config.extra_data):
|
||||
prompts_access = False
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with secret prompts provided by another user.')
|
||||
else:
|
||||
prompts_access = (
|
||||
JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}) and
|
||||
not config.has_unprompted(obj.job_template)
|
||||
)
|
||||
jt_access = self.user in obj.job_template.execute_role
|
||||
if prompts_access and jt_access:
|
||||
if config and obj.job_template:
|
||||
if not config.has_user_prompts(obj.job_template):
|
||||
return True
|
||||
elif not jt_access:
|
||||
return False
|
||||
elif obj.created_by_id != self.user.pk and vars_are_encrypted(config.extra_data):
|
||||
# never allowed, not even for org admins
|
||||
raise PermissionDenied(_('Job was launched with secret prompts provided by another user.'))
|
||||
elif not config.has_unprompted(obj.job_template):
|
||||
if JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}):
|
||||
return True
|
||||
|
||||
org_access = bool(obj.inventory) and self.user in obj.inventory.organization.inventory_admin_role
|
||||
project_access = obj.project is None or self.user in obj.project.admin_role
|
||||
@@ -2089,23 +2091,20 @@ class WorkflowJobAccess(BaseAccess):
|
||||
self.messages['detail'] = _('Workflow Job was launched with unknown prompts.')
|
||||
return False
|
||||
|
||||
# execute permission to WFJT is mandatory for any relaunch
|
||||
if self.user not in template.execute_role:
|
||||
return False
|
||||
|
||||
# Check if access to prompts to prevent relaunch
|
||||
if config.prompts_dict():
|
||||
if obj.created_by_id != self.user.pk and vars_are_encrypted(config.extra_data):
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with secret prompts provided by another user.')
|
||||
return False
|
||||
raise PermissionDenied(_("Job was launched with secret prompts provided by another user."))
|
||||
if not JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}):
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with prompts you lack access to.')
|
||||
return False
|
||||
raise PermissionDenied(_('Job was launched with prompts you lack access to.'))
|
||||
if config.has_unprompted(template):
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with prompts no longer accepted.')
|
||||
return False
|
||||
raise PermissionDenied(_('Job was launched with prompts no longer accepted.'))
|
||||
|
||||
# execute permission to WFJT is mandatory for any relaunch
|
||||
return (self.user in template.execute_role)
|
||||
return True # passed config checks
|
||||
|
||||
def can_recreate(self, obj):
|
||||
node_qs = obj.workflow_job_nodes.all().prefetch_related('inventory', 'credentials', 'unified_job_template')
|
||||
|
||||
@@ -5,10 +5,9 @@ import os
|
||||
import os.path
|
||||
import tempfile
|
||||
import shutil
|
||||
import subprocess
|
||||
import requests
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.timezone import now, timedelta
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
@@ -81,17 +80,16 @@ def gather(dest=None, module=None, collection_type='scheduled'):
|
||||
last_run = state.last_run
|
||||
logger.debug("Last analytics run was: {}".format(last_run))
|
||||
|
||||
max_interval = now() - timedelta(days=7)
|
||||
max_interval = now() - timedelta(weeks=4)
|
||||
if last_run < max_interval or not last_run:
|
||||
last_run = max_interval
|
||||
|
||||
|
||||
if _valid_license() is False:
|
||||
logger.exception("Invalid License provided, or No License Provided")
|
||||
return "Error: Invalid License provided, or No License Provided"
|
||||
|
||||
if not settings.INSIGHTS_TRACKING_STATE:
|
||||
logger.error("Insights analytics not enabled")
|
||||
|
||||
if collection_type != 'dry-run' and not settings.INSIGHTS_TRACKING_STATE:
|
||||
logger.error("Automation Analytics not enabled. Use --dry-run to gather locally without sending.")
|
||||
return
|
||||
|
||||
if module is None:
|
||||
@@ -146,30 +144,39 @@ def gather(dest=None, module=None, collection_type='scheduled'):
|
||||
|
||||
def ship(path):
|
||||
"""
|
||||
Ship gathered metrics via the Insights agent
|
||||
Ship gathered metrics to the Insights API
|
||||
"""
|
||||
if not path:
|
||||
logger.error('Automation Analytics TAR not found')
|
||||
return
|
||||
if "Error:" in str(path):
|
||||
return
|
||||
try:
|
||||
agent = 'insights-client'
|
||||
if shutil.which(agent) is None:
|
||||
logger.error('could not find {} on PATH'.format(agent))
|
||||
return
|
||||
logger.debug('shipping analytics file: {}'.format(path))
|
||||
try:
|
||||
cmd = [
|
||||
agent, '--payload', path, '--content-type', settings.INSIGHTS_AGENT_MIME
|
||||
]
|
||||
output = smart_str(subprocess.check_output(cmd, timeout=60 * 5))
|
||||
logger.debug(output)
|
||||
# reset the `last_run` when data is shipped
|
||||
run_now = now()
|
||||
state = TowerAnalyticsState.get_solo()
|
||||
state.last_run = run_now
|
||||
state.save()
|
||||
|
||||
except subprocess.CalledProcessError:
|
||||
logger.exception('{} failure:'.format(cmd))
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.exception('{} timeout:'.format(cmd))
|
||||
url = getattr(settings, 'AUTOMATION_ANALYTICS_URL', None)
|
||||
if not url:
|
||||
logger.error('AUTOMATION_ANALYTICS_URL is not set')
|
||||
return
|
||||
rh_user = getattr(settings, 'REDHAT_USERNAME', None)
|
||||
rh_password = getattr(settings, 'REDHAT_PASSWORD', None)
|
||||
if not rh_user:
|
||||
return logger.error('REDHAT_USERNAME is not set')
|
||||
if not rh_password:
|
||||
return logger.error('REDHAT_PASSWORD is not set')
|
||||
with open(path, 'rb') as f:
|
||||
files = {'file': (os.path.basename(path), f, settings.INSIGHTS_AGENT_MIME)}
|
||||
response = requests.post(url,
|
||||
files=files,
|
||||
verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
|
||||
auth=(rh_user, rh_password),
|
||||
timeout=(31, 31))
|
||||
if response.status_code != 202:
|
||||
return logger.exception('Upload failed with status {}, {}'.format(response.status_code,
|
||||
response.text))
|
||||
run_now = now()
|
||||
state = TowerAnalyticsState.get_solo()
|
||||
state.last_run = run_now
|
||||
state.save()
|
||||
finally:
|
||||
# cleanup tar.gz
|
||||
os.remove(path)
|
||||
|
||||
243
awx/main/conf.py
243
awx/main/conf.py
@@ -2,12 +2,14 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from distutils.version import LooseVersion as Version
|
||||
|
||||
# Django
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework import serializers
|
||||
from rest_framework.fields import FloatField
|
||||
|
||||
# Tower
|
||||
from awx.conf import fields, register, register_validate
|
||||
@@ -52,15 +54,6 @@ register(
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'TOWER_ADMIN_ALERTS',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Enable Administrator Alerts'),
|
||||
help_text=_('Email Admin users for system events that may require attention.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'TOWER_URL_BASE',
|
||||
field_class=fields.URLField,
|
||||
@@ -153,7 +146,7 @@ register(
|
||||
register(
|
||||
'AUTOMATION_ANALYTICS_URL',
|
||||
field_class=fields.URLField,
|
||||
default='https://cloud.redhat.com',
|
||||
default='https://example.com',
|
||||
schemes=('http', 'https'),
|
||||
allow_plain_hostname=True, # Allow hostname only without TLD.
|
||||
label=_('Automation Analytics upload URL.'),
|
||||
@@ -298,6 +291,16 @@ register(
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_ISOLATED_HOST_KEY_CHECKING',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Isolated host key checking'),
|
||||
help_text=_('When set to True, AWX will enforce strict host key checking for communication with isolated nodes.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
default=False
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_ISOLATED_KEY_GENERATION',
|
||||
field_class=fields.BooleanField,
|
||||
@@ -335,6 +338,53 @@ register(
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_RESOURCE_PROFILING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Enable detailed resource profiling on all playbook runs'),
|
||||
help_text=_('If set, detailed resource profiling data will be collected on all jobs. '
|
||||
'This data can be gathered with `sosreport`.'), # noqa
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_RESOURCE_PROFILING_CPU_POLL_INTERVAL',
|
||||
field_class=FloatField,
|
||||
default='0.25',
|
||||
label=_('Interval (in seconds) between polls for cpu usage.'),
|
||||
help_text=_('Interval (in seconds) between polls for cpu usage. '
|
||||
'Setting this lower than the default will affect playbook performance.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
required=False,
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_RESOURCE_PROFILING_MEMORY_POLL_INTERVAL',
|
||||
field_class=FloatField,
|
||||
default='0.25',
|
||||
label=_('Interval (in seconds) between polls for memory usage.'),
|
||||
help_text=_('Interval (in seconds) between polls for memory usage. '
|
||||
'Setting this lower than the default will affect playbook performance.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
required=False,
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_RESOURCE_PROFILING_PID_POLL_INTERVAL',
|
||||
field_class=FloatField,
|
||||
default='0.25',
|
||||
label=_('Interval (in seconds) between polls for PID count.'),
|
||||
help_text=_('Interval (in seconds) between polls for PID count. '
|
||||
'Setting this lower than the default will affect playbook performance.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
required=False,
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_TASK_ENV',
|
||||
field_class=fields.KeyValueField,
|
||||
@@ -350,12 +400,21 @@ register(
|
||||
'INSIGHTS_TRACKING_STATE',
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Gather data for Automation Insights'),
|
||||
help_text=_('Enables Tower to gather data on automation and send it to Red Hat Insights.'),
|
||||
label=_('Gather data for Automation Analytics'),
|
||||
help_text=_('Enables Tower to gather data on automation and send it to Red Hat.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'PROJECT_UPDATE_VVV',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Run Project Updates With Higher Verbosity'),
|
||||
help_text=_('Adds the CLI -vvv flag to ansible-playbook runs of project_update.yml used for project updates.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_ROLES_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
@@ -376,6 +435,85 @@ register(
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'PRIMARY_GALAXY_URL',
|
||||
field_class=fields.URLField,
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
label=_('Primary Galaxy Server URL'),
|
||||
help_text=_(
|
||||
'For organizations that run their own Galaxy service, this gives the option to specify a '
|
||||
'host as the primary galaxy server. Requirements will be downloaded from the primary if the '
|
||||
'specific role or collection is available there. If the content is not avilable in the primary, '
|
||||
'or if this field is left blank, it will default to galaxy.ansible.com.'
|
||||
),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs'
|
||||
)
|
||||
|
||||
register(
|
||||
'PRIMARY_GALAXY_USERNAME',
|
||||
field_class=fields.CharField,
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
label=_('Primary Galaxy Server Username'),
|
||||
help_text=_('For using a galaxy server at higher precedence than the public Ansible Galaxy. '
|
||||
'The username to use for basic authentication against the Galaxy instance, '
|
||||
'this is mutually exclusive with PRIMARY_GALAXY_TOKEN.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs'
|
||||
)
|
||||
|
||||
register(
|
||||
'PRIMARY_GALAXY_PASSWORD',
|
||||
field_class=fields.CharField,
|
||||
encrypted=True,
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
label=_('Primary Galaxy Server Password'),
|
||||
help_text=_('For using a galaxy server at higher precedence than the public Ansible Galaxy. '
|
||||
'The password to use for basic authentication against the Galaxy instance, '
|
||||
'this is mutually exclusive with PRIMARY_GALAXY_TOKEN.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs'
|
||||
)
|
||||
|
||||
register(
|
||||
'PRIMARY_GALAXY_TOKEN',
|
||||
field_class=fields.CharField,
|
||||
encrypted=True,
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
label=_('Primary Galaxy Server Token'),
|
||||
help_text=_('For using a galaxy server at higher precedence than the public Ansible Galaxy. '
|
||||
'The token to use for connecting with the Galaxy instance, '
|
||||
'this is mutually exclusive with corresponding username and password settings.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs'
|
||||
)
|
||||
|
||||
register(
|
||||
'PRIMARY_GALAXY_AUTH_URL',
|
||||
field_class=fields.CharField,
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
label=_('Primary Galaxy Authentication URL'),
|
||||
help_text=_('For using a galaxy server at higher precedence than the public Ansible Galaxy. '
|
||||
'The token_endpoint of a Keycloak server.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs'
|
||||
)
|
||||
|
||||
register(
|
||||
'PUBLIC_GALAXY_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
default=True,
|
||||
label=_('Allow Access to Public Galaxy'),
|
||||
help_text=_('Allow or deny access to the public Ansible Galaxy during project updates.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs'
|
||||
)
|
||||
|
||||
register(
|
||||
'STDOUT_MAX_BYTES_DISPLAY',
|
||||
field_class=fields.IntegerField,
|
||||
@@ -616,6 +754,16 @@ register(
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
register(
|
||||
'LOG_AGGREGATOR_AUDIT',
|
||||
field_class=fields.BooleanField,
|
||||
allow_null=True,
|
||||
default=False,
|
||||
label=_('Enabled external log aggregation auditing'),
|
||||
help_text=_('When enabled, all external logs emitted by Tower will also be written to /var/log/tower/external.log. This is an experimental setting intended to be used for debugging external log aggregation issues (and may be subject to change in the future).'), # noqa
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
|
||||
|
||||
register(
|
||||
@@ -646,4 +794,75 @@ def logging_validate(serializer, attrs):
|
||||
return attrs
|
||||
|
||||
|
||||
def galaxy_validate(serializer, attrs):
|
||||
"""Ansible Galaxy config options have mutual exclusivity rules, these rules
|
||||
are enforced here on serializer validation so that users will not be able
|
||||
to save settings which obviously break all project updates.
|
||||
"""
|
||||
prefix = 'PRIMARY_GALAXY_'
|
||||
|
||||
from awx.main.constants import GALAXY_SERVER_FIELDS
|
||||
if not any('{}{}'.format(prefix, subfield.upper()) in attrs for subfield in GALAXY_SERVER_FIELDS):
|
||||
return attrs
|
||||
|
||||
def _new_value(setting_name):
|
||||
if setting_name in attrs:
|
||||
return attrs[setting_name]
|
||||
elif not serializer.instance:
|
||||
return ''
|
||||
return getattr(serializer.instance, setting_name, '')
|
||||
|
||||
galaxy_data = {}
|
||||
for subfield in GALAXY_SERVER_FIELDS:
|
||||
galaxy_data[subfield] = _new_value('{}{}'.format(prefix, subfield.upper()))
|
||||
errors = {}
|
||||
if not galaxy_data['url']:
|
||||
for k, v in galaxy_data.items():
|
||||
if v:
|
||||
setting_name = '{}{}'.format(prefix, k.upper())
|
||||
errors.setdefault(setting_name, [])
|
||||
errors[setting_name].append(_(
|
||||
'Cannot provide field if PRIMARY_GALAXY_URL is not set.'
|
||||
))
|
||||
for k in GALAXY_SERVER_FIELDS:
|
||||
if galaxy_data[k]:
|
||||
setting_name = '{}{}'.format(prefix, k.upper())
|
||||
if (not serializer.instance) or (not getattr(serializer.instance, setting_name, '')):
|
||||
# new auth is applied, so check if compatible with version
|
||||
from awx.main.utils import get_ansible_version
|
||||
current_version = get_ansible_version()
|
||||
min_version = '2.9'
|
||||
if Version(current_version) < Version(min_version):
|
||||
errors.setdefault(setting_name, [])
|
||||
errors[setting_name].append(_(
|
||||
'Galaxy server settings are not available until Ansible {min_version}, '
|
||||
'you are running {current_version}.'
|
||||
).format(min_version=min_version, current_version=current_version))
|
||||
if (galaxy_data['password'] or galaxy_data['username']) and (galaxy_data['token'] or galaxy_data['auth_url']):
|
||||
for k in ('password', 'username', 'token', 'auth_url'):
|
||||
setting_name = '{}{}'.format(prefix, k.upper())
|
||||
if setting_name in attrs:
|
||||
errors.setdefault(setting_name, [])
|
||||
errors[setting_name].append(_(
|
||||
'Setting Galaxy token and authentication URL is mutually exclusive with username and password.'
|
||||
))
|
||||
if bool(galaxy_data['username']) != bool(galaxy_data['password']):
|
||||
msg = _('If authenticating via username and password, both must be provided.')
|
||||
for k in ('username', 'password'):
|
||||
setting_name = '{}{}'.format(prefix, k.upper())
|
||||
errors.setdefault(setting_name, [])
|
||||
errors[setting_name].append(msg)
|
||||
if bool(galaxy_data['token']) != bool(galaxy_data['auth_url']):
|
||||
msg = _('If authenticating via token, both token and authentication URL must be provided.')
|
||||
for k in ('token', 'auth_url'):
|
||||
setting_name = '{}{}'.format(prefix, k.upper())
|
||||
errors.setdefault(setting_name, [])
|
||||
errors[setting_name].append(msg)
|
||||
|
||||
if errors:
|
||||
raise serializers.ValidationError(errors)
|
||||
return attrs
|
||||
|
||||
|
||||
register_validate('logging', logging_validate)
|
||||
register_validate('jobs', galaxy_validate)
|
||||
|
||||
@@ -51,3 +51,7 @@ LOGGER_BLACKLIST = (
|
||||
# loggers that may be called getting logging settings
|
||||
'awx.conf'
|
||||
)
|
||||
|
||||
# these correspond to both AWX and Ansible settings to keep naming consistent
|
||||
# for instance, settings.PRIMARY_GALAXY_AUTH_URL vs env var ANSIBLE_GALAXY_SERVER_FOO_AUTH_URL
|
||||
GALAXY_SERVER_FIELDS = ('url', 'username', 'password', 'token', 'auth_url')
|
||||
|
||||
@@ -101,7 +101,7 @@ def aim_backend(**kwargs):
|
||||
|
||||
|
||||
aim_plugin = CredentialPlugin(
|
||||
'CyberArk AIM Secret Lookup',
|
||||
'CyberArk AIM Central Credential Provider Lookup',
|
||||
inputs=aim_inputs,
|
||||
backend=aim_backend
|
||||
)
|
||||
|
||||
@@ -103,6 +103,8 @@ def kv_backend(**kwargs):
|
||||
|
||||
sess = requests.Session()
|
||||
sess.headers['Authorization'] = 'Bearer {}'.format(token)
|
||||
# Compatability header for older installs of Hashicorp Vault
|
||||
sess.headers['X-Vault-Token'] = token
|
||||
|
||||
if api_version == 'v2':
|
||||
if kwargs.get('secret_version'):
|
||||
@@ -158,6 +160,8 @@ def ssh_backend(**kwargs):
|
||||
|
||||
sess = requests.Session()
|
||||
sess.headers['Authorization'] = 'Bearer {}'.format(token)
|
||||
# Compatability header for older installs of Hashicorp Vault
|
||||
sess.headers['X-Vault-Token'] = token
|
||||
# https://www.vaultproject.io/api/secret/ssh/index.html#sign-ssh-key
|
||||
request_url = '/'.join([url, secret_path, 'sign', role]).rstrip('/')
|
||||
resp = sess.post(request_url, **request_kwargs)
|
||||
|
||||
@@ -33,7 +33,11 @@ def reap(instance=None, status='failed', excluded_uuids=[]):
|
||||
'''
|
||||
Reap all jobs in waiting|running for this instance.
|
||||
'''
|
||||
me = instance or Instance.objects.me()
|
||||
me = instance
|
||||
if me is None:
|
||||
(changed, me) = Instance.objects.get_or_register()
|
||||
if changed:
|
||||
logger.info("Registered tower node '{}'".format(me.hostname))
|
||||
now = tz_now()
|
||||
workflow_ctype_id = ContentType.objects.get_for_model(WorkflowJob).id
|
||||
jobs = UnifiedJob.objects.filter(
|
||||
|
||||
@@ -4,6 +4,7 @@ import importlib
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from kubernetes.config import kube_config
|
||||
|
||||
from awx.main.tasks import dispatch_startup, inform_cluster_of_shutdown
|
||||
|
||||
@@ -107,6 +108,14 @@ class TaskWorker(BaseWorker):
|
||||
for callback in body.get('errbacks', []) or []:
|
||||
callback['uuid'] = body['uuid']
|
||||
self.perform_work(callback)
|
||||
finally:
|
||||
# It's frustrating that we have to do this, but the python k8s
|
||||
# client leaves behind cacert files in /tmp, so we must clean up
|
||||
# the tmpdir per-dispatcher process every time a new task comes in
|
||||
try:
|
||||
kube_config._cleanup_temp_files()
|
||||
except Exception:
|
||||
logger.exception('failed to cleanup k8s client tmp files')
|
||||
|
||||
for callback in body.get('callbacks', []) or []:
|
||||
callback['uuid'] = body['uuid']
|
||||
|
||||
@@ -6,12 +6,15 @@ import stat
|
||||
import tempfile
|
||||
import time
|
||||
import logging
|
||||
import yaml
|
||||
|
||||
from django.conf import settings
|
||||
import ansible_runner
|
||||
|
||||
import awx
|
||||
from awx.main.utils import get_system_task_capacity
|
||||
from awx.main.utils import (
|
||||
get_system_task_capacity
|
||||
)
|
||||
from awx.main.queue import CallbackQueueDispatcher
|
||||
|
||||
logger = logging.getLogger('awx.isolated.manager')
|
||||
@@ -29,7 +32,7 @@ def set_pythonpath(venv_libdir, env):
|
||||
|
||||
class IsolatedManager(object):
|
||||
|
||||
def __init__(self, cancelled_callback=None, check_callback=None):
|
||||
def __init__(self, cancelled_callback=None, check_callback=None, pod_manager=None):
|
||||
"""
|
||||
:param cancelled_callback: a callable - which returns `True` or `False`
|
||||
- signifying if the job has been prematurely
|
||||
@@ -40,11 +43,36 @@ class IsolatedManager(object):
|
||||
self.idle_timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
self.started_at = None
|
||||
self.captured_command_artifact = False
|
||||
self.instance = None
|
||||
self.pod_manager = pod_manager
|
||||
|
||||
def build_inventory(self, hosts):
|
||||
if self.instance and self.instance.is_containerized:
|
||||
inventory = {'all': {'hosts': {}}}
|
||||
fd, path = tempfile.mkstemp(
|
||||
prefix='.kubeconfig', dir=self.private_data_dir
|
||||
)
|
||||
with open(path, 'wb') as temp:
|
||||
temp.write(yaml.dump(self.pod_manager.kube_config).encode())
|
||||
temp.flush()
|
||||
os.chmod(temp.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
for host in hosts:
|
||||
inventory['all']['hosts'][host] = {
|
||||
"ansible_connection": "kubectl",
|
||||
"ansible_kubectl_config": path,
|
||||
}
|
||||
else:
|
||||
inventory = '\n'.join([
|
||||
'{} ansible_ssh_user={}'.format(host, settings.AWX_ISOLATED_USERNAME)
|
||||
for host in hosts
|
||||
])
|
||||
|
||||
return inventory
|
||||
|
||||
def build_runner_params(self, hosts, verbosity=1):
|
||||
env = dict(os.environ.items())
|
||||
env['ANSIBLE_RETRY_FILES_ENABLED'] = 'False'
|
||||
env['ANSIBLE_HOST_KEY_CHECKING'] = 'False'
|
||||
env['ANSIBLE_HOST_KEY_CHECKING'] = str(settings.AWX_ISOLATED_HOST_KEY_CHECKING)
|
||||
env['ANSIBLE_LIBRARY'] = os.path.join(os.path.dirname(awx.__file__), 'plugins', 'isolated')
|
||||
set_pythonpath(os.path.join(settings.ANSIBLE_VENV_PATH, 'lib'), env)
|
||||
|
||||
@@ -69,17 +97,12 @@ class IsolatedManager(object):
|
||||
else:
|
||||
playbook_logger.info(runner_obj.stdout.read())
|
||||
|
||||
inventory = '\n'.join([
|
||||
'{} ansible_ssh_user={}'.format(host, settings.AWX_ISOLATED_USERNAME)
|
||||
for host in hosts
|
||||
])
|
||||
|
||||
return {
|
||||
'project_dir': os.path.abspath(os.path.join(
|
||||
os.path.dirname(awx.__file__),
|
||||
'playbooks'
|
||||
)),
|
||||
'inventory': inventory,
|
||||
'inventory': self.build_inventory(hosts),
|
||||
'envvars': env,
|
||||
'finished_callback': finished_callback,
|
||||
'verbosity': verbosity,
|
||||
@@ -128,6 +151,8 @@ class IsolatedManager(object):
|
||||
'- /artifacts/job_events/*-partial.json.tmp',
|
||||
# don't rsync the ssh_key FIFO
|
||||
'- /env/ssh_key',
|
||||
# don't rsync kube config files
|
||||
'- .kubeconfig*'
|
||||
]
|
||||
|
||||
for filename, data in (
|
||||
@@ -153,6 +178,12 @@ class IsolatedManager(object):
|
||||
runner_obj = self.run_management_playbook('run_isolated.yml',
|
||||
self.private_data_dir,
|
||||
extravars=extravars)
|
||||
|
||||
if runner_obj.status == 'failed':
|
||||
self.instance.result_traceback = runner_obj.stdout.read()
|
||||
self.instance.save(update_fields=['result_traceback'])
|
||||
return 'error', runner_obj.rc
|
||||
|
||||
return runner_obj.status, runner_obj.rc
|
||||
|
||||
def check(self, interval=None):
|
||||
@@ -175,6 +206,7 @@ class IsolatedManager(object):
|
||||
rc = None
|
||||
last_check = time.time()
|
||||
dispatcher = CallbackQueueDispatcher()
|
||||
|
||||
while status == 'failed':
|
||||
canceled = self.cancelled_callback() if self.cancelled_callback else False
|
||||
if not canceled and time.time() - last_check < interval:
|
||||
@@ -279,7 +311,6 @@ class IsolatedManager(object):
|
||||
|
||||
|
||||
def cleanup(self):
|
||||
# If the job failed for any reason, make a last-ditch effort at cleanup
|
||||
extravars = {
|
||||
'private_data_dir': self.private_data_dir,
|
||||
'cleanup_dirs': [
|
||||
@@ -393,6 +424,7 @@ class IsolatedManager(object):
|
||||
[instance.execution_node],
|
||||
verbosity=min(5, self.instance.verbosity)
|
||||
)
|
||||
|
||||
status, rc = self.dispatch(playbook, module, module_args)
|
||||
if status == 'successful':
|
||||
status, rc = self.check()
|
||||
|
||||
17
awx/main/management/commands/check_db.py
Normal file
17
awx/main/management/commands/check_db.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Checks connection to the database, and prints out connection info if not connected"""
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT version()")
|
||||
version = str(cursor.fetchone()[0])
|
||||
|
||||
return "Database Version: {}".format(version)
|
||||
@@ -11,8 +11,10 @@ class Command(BaseCommand):
|
||||
help = 'Gather AWX analytics data'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--dry-run', dest='dry-run', action='store_true',
|
||||
help='Gather analytics without shipping. Works even if analytics are disabled in settings.')
|
||||
parser.add_argument('--ship', dest='ship', action='store_true',
|
||||
help='Enable to ship metrics via insights-client')
|
||||
help='Enable to ship metrics to the Red Hat Cloud')
|
||||
|
||||
def init_logging(self):
|
||||
self.logger = logging.getLogger('awx.main.analytics')
|
||||
@@ -23,9 +25,14 @@ class Command(BaseCommand):
|
||||
self.logger.propagate = False
|
||||
|
||||
def handle(self, *args, **options):
|
||||
tgz = gather(collection_type='manual')
|
||||
self.init_logging()
|
||||
opt_ship = options.get('ship')
|
||||
opt_dry_run = options.get('dry-run')
|
||||
if opt_ship and opt_dry_run:
|
||||
self.logger.error('Both --ship and --dry-run cannot be processed at the same time.')
|
||||
return
|
||||
tgz = gather(collection_type='manual' if not opt_dry_run else 'dry-run')
|
||||
if tgz:
|
||||
self.logger.debug(tgz)
|
||||
if options.get('ship'):
|
||||
if opt_ship:
|
||||
ship(tgz)
|
||||
|
||||
@@ -919,7 +919,8 @@ class Command(BaseCommand):
|
||||
new_count = Host.objects.active_count()
|
||||
if time_remaining <= 0 and not license_info.get('demo', False):
|
||||
logger.error(LICENSE_EXPIRED_MESSAGE)
|
||||
raise CommandError("License has expired!")
|
||||
if license_info.get('trial', False) is True:
|
||||
raise CommandError("License has expired!")
|
||||
# special check for tower-type inventory sources
|
||||
# but only if running the plugin
|
||||
TOWER_SOURCE_FILES = ['tower.yml', 'tower.yaml']
|
||||
@@ -936,7 +937,11 @@ class Command(BaseCommand):
|
||||
logger.error(DEMO_LICENSE_MESSAGE % d)
|
||||
else:
|
||||
logger.error(LICENSE_MESSAGE % d)
|
||||
raise CommandError('License count exceeded!')
|
||||
if (
|
||||
license_info.get('trial', False) is True or
|
||||
license_info['instance_count'] == 10 # basic 10 license
|
||||
):
|
||||
raise CommandError('License count exceeded!')
|
||||
|
||||
def check_org_host_limit(self):
|
||||
license_info = get_licenser().validate()
|
||||
|
||||
@@ -33,6 +33,7 @@ class Command(BaseCommand):
|
||||
]):
|
||||
ssh_key = settings.AWX_ISOLATED_PRIVATE_KEY
|
||||
env = dict(os.environ.items())
|
||||
env['ANSIBLE_HOST_KEY_CHECKING'] = str(settings.AWX_ISOLATED_HOST_KEY_CHECKING)
|
||||
set_pythonpath(os.path.join(settings.ANSIBLE_VENV_PATH, 'lib'), env)
|
||||
res = ansible_runner.interface.run(
|
||||
private_data_dir=path,
|
||||
|
||||
@@ -221,8 +221,9 @@ class InstanceGroupManager(models.Manager):
|
||||
elif t.status == 'running':
|
||||
# Subtract capacity from all groups that contain the instance
|
||||
if t.execution_node not in instance_ig_mapping:
|
||||
logger.warning('Detected %s running inside lost instance, '
|
||||
'may still be waiting for reaper.', t.log_format)
|
||||
if not t.is_containerized:
|
||||
logger.warning('Detected %s running inside lost instance, '
|
||||
'may still be waiting for reaper.', t.log_format)
|
||||
if t.instance_group:
|
||||
impacted_groups = [t.instance_group.name]
|
||||
else:
|
||||
|
||||
28
awx/main/migrations/0088_v360_dashboard_optimizations.py
Normal file
28
awx/main/migrations/0088_v360_dashboard_optimizations.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 2.2.4 on 2019-09-10 21:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0087_v360_update_credential_injector_help_text'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='finished',
|
||||
field=models.DateTimeField(db_index=True, default=None, editable=False, help_text='The date and time the job finished execution.', null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='launch_type',
|
||||
field=models.CharField(choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('sync', 'Sync'), ('scm', 'SCM Update')], db_index=True, default='manual', editable=False, max_length=20),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='created',
|
||||
field=models.DateTimeField(db_index=True, default=None, editable=False),
|
||||
),
|
||||
]
|
||||
23
awx/main/migrations/0089_v360_new_job_event_types.py
Normal file
23
awx/main/migrations/0089_v360_new_job_event_types.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 2.2.4 on 2019-09-12 13:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0088_v360_dashboard_optimizations'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='jobevent',
|
||||
name='event',
|
||||
field=models.CharField(choices=[('runner_on_failed', 'Host Failed'), ('runner_on_start', 'Host Started'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')], max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdateevent',
|
||||
name='event',
|
||||
field=models.CharField(choices=[('runner_on_failed', 'Host Failed'), ('runner_on_start', 'Host Started'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')], max_length=100),
|
||||
),
|
||||
]
|
||||
59
awx/main/migrations/0090_v360_WFJT_prompts.py
Normal file
59
awx/main/migrations/0090_v360_WFJT_prompts.py
Normal file
@@ -0,0 +1,59 @@
|
||||
# Generated by Django 2.2.2 on 2019-07-23 17:56
|
||||
|
||||
import awx.main.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0089_v360_new_job_event_types'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='ask_limit_on_launch',
|
||||
field=awx.main.fields.AskForField(blank=True, default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='ask_scm_branch_on_launch',
|
||||
field=awx.main.fields.AskForField(blank=True, default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='char_prompts',
|
||||
field=awx.main.fields.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='joblaunchconfig',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='joblaunchconfigs', to='main.Inventory'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='schedule',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='schedules', to='main.Inventory'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjob',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='workflowjobs', to='main.Inventory'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobnode',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='workflowjobnodes', to='main.Inventory'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='workflowjobtemplates', to='main.Inventory'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplatenode',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='workflowjobtemplatenodes', to='main.Inventory'),
|
||||
),
|
||||
]
|
||||
28
awx/main/migrations/0091_v360_approval_node_notifications.py
Normal file
28
awx/main/migrations/0091_v360_approval_node_notifications.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 2.2.4 on 2019-09-11 13:44
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0090_v360_WFJT_prompts'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notification_templates_approvals',
|
||||
field=models.ManyToManyField(blank=True, related_name='organization_notification_templates_for_approvals', to='main.NotificationTemplate'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='notification_templates_approvals',
|
||||
field=models.ManyToManyField(blank=True, related_name='workflowjobtemplate_notification_templates_for_approvals', to='main.NotificationTemplate'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobnode',
|
||||
name='do_not_run',
|
||||
field=models.BooleanField(default=False, help_text='Indicates that a job will not be created when True. Workflow runtime semantics will mark this True if the node is in a path that will decidedly not be ran. A value of False means the node may not run.'),
|
||||
),
|
||||
]
|
||||
49
awx/main/migrations/0092_v360_webhook_mixin.py
Normal file
49
awx/main/migrations/0092_v360_webhook_mixin.py
Normal file
@@ -0,0 +1,49 @@
|
||||
# Generated by Django 2.2.4 on 2019-09-12 14:49
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0091_v360_approval_node_notifications'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='webhook_credential',
|
||||
field=models.ForeignKey(blank=True, help_text='Personal Access Token for posting back the status to the service API', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='jobtemplates', to='main.Credential'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='webhook_key',
|
||||
field=models.CharField(blank=True, help_text='Shared secret that the webhook service will use to sign requests', max_length=64),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='webhook_service',
|
||||
field=models.CharField(blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='webhook_credential',
|
||||
field=models.ForeignKey(blank=True, help_text='Personal Access Token for posting back the status to the service API', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workflowjobtemplates', to='main.Credential'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='webhook_key',
|
||||
field=models.CharField(blank=True, help_text='Shared secret that the webhook service will use to sign requests', max_length=64),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='webhook_service',
|
||||
field=models.CharField(blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='launch_type',
|
||||
field=models.CharField(choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('webhook', 'Webhook'), ('sync', 'Sync'), ('scm', 'SCM Update')], db_index=True, default='manual', editable=False, max_length=20),
|
||||
),
|
||||
]
|
||||
27
awx/main/migrations/0093_v360_personal_access_tokens.py
Normal file
27
awx/main/migrations/0093_v360_personal_access_tokens.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 2.2.4 on 2019-09-12 14:50
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
from awx.main.models import CredentialType
|
||||
from awx.main.utils.common import set_current_apps
|
||||
|
||||
|
||||
def setup_tower_managed_defaults(apps, schema_editor):
|
||||
set_current_apps(apps)
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0092_v360_webhook_mixin'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='credentialtype',
|
||||
name='kind',
|
||||
field=models.CharField(choices=[('ssh', 'Machine'), ('vault', 'Vault'), ('net', 'Network'), ('scm', 'Source Control'), ('cloud', 'Cloud'), ('token', 'Personal Access Token'), ('insights', 'Insights'), ('external', 'External')], max_length=32),
|
||||
),
|
||||
migrations.RunPython(setup_tower_managed_defaults),
|
||||
]
|
||||
44
awx/main/migrations/0094_v360_webhook_mixin2.py
Normal file
44
awx/main/migrations/0094_v360_webhook_mixin2.py
Normal file
@@ -0,0 +1,44 @@
|
||||
# Generated by Django 2.2.4 on 2019-09-12 14:52
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0093_v360_personal_access_tokens'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='webhook_credential',
|
||||
field=models.ForeignKey(blank=True, help_text='Personal Access Token for posting back the status to the service API', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='jobs', to='main.Credential'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='webhook_guid',
|
||||
field=models.CharField(blank=True, help_text='Unique identifier of the event that triggered this webhook', max_length=128),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='webhook_service',
|
||||
field=models.CharField(blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjob',
|
||||
name='webhook_credential',
|
||||
field=models.ForeignKey(blank=True, help_text='Personal Access Token for posting back the status to the service API', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workflowjobs', to='main.Credential'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjob',
|
||||
name='webhook_guid',
|
||||
field=models.CharField(blank=True, help_text='Unique identifier of the event that triggered this webhook', max_length=128),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjob',
|
||||
name='webhook_service',
|
||||
field=models.CharField(blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 2.2.4 on 2019-10-04 00:50
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0094_v360_webhook_mixin2'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='instance',
|
||||
name='version',
|
||||
field=models.CharField(blank=True, max_length=120),
|
||||
),
|
||||
]
|
||||
38
awx/main/migrations/0096_v360_container_groups.py
Normal file
38
awx/main/migrations/0096_v360_container_groups.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Generated by Django 2.2.4 on 2019-09-16 23:50
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
from awx.main.models import CredentialType
|
||||
from awx.main.utils.common import set_current_apps
|
||||
|
||||
|
||||
def create_new_credential_types(apps, schema_editor):
|
||||
set_current_apps(apps)
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0095_v360_increase_instance_version_length'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='instancegroup',
|
||||
name='credential',
|
||||
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='instancegroups', to='main.Credential'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='instancegroup',
|
||||
name='pod_spec_override',
|
||||
field=models.TextField(blank=True, default=''),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credentialtype',
|
||||
name='kind',
|
||||
field=models.CharField(choices=[('ssh', 'Machine'), ('vault', 'Vault'), ('net', 'Network'), ('scm', 'Source Control'), ('cloud', 'Cloud'), ('token', 'Personal Access Token'), ('insights', 'Insights'), ('external', 'External'), ('kubernetes', 'Kubernetes')], max_length=32),
|
||||
),
|
||||
migrations.RunPython(create_new_credential_types)
|
||||
]
|
||||
@@ -0,0 +1,21 @@
|
||||
# Generated by Django 2.2.4 on 2019-10-11 15:40
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('main', '0096_v360_container_groups'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='workflowapproval',
|
||||
name='approved_or_denied_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'workflowapproval', 'model_name': 'workflowapproval', 'app_label': 'main'}(class)s_approved+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,31 @@
|
||||
# Generated by Django 2.2.4 on 2019-10-16 19:51
|
||||
|
||||
from django.db import migrations
|
||||
from awx.main.models import CredentialType
|
||||
|
||||
|
||||
def update_cyberark_aim_name(apps, schema_editor):
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
aim_types = apps.get_model('main', 'CredentialType').objects.filter(
|
||||
namespace='aim'
|
||||
).order_by('id')
|
||||
|
||||
if aim_types.count() == 2:
|
||||
original, renamed = aim_types.all()
|
||||
apps.get_model('main', 'Credential').objects.filter(
|
||||
credential_type_id=original.id
|
||||
).update(
|
||||
credential_type_id=renamed.id
|
||||
)
|
||||
original.delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0097_v360_workflowapproval_approved_or_denied_by'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(update_cyberark_aim_name)
|
||||
]
|
||||
@@ -150,6 +150,14 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
||||
def supports_isolation(cls):
|
||||
return True
|
||||
|
||||
@property
|
||||
def is_containerized(self):
|
||||
return bool(self.instance_group and self.instance_group.is_containerized)
|
||||
|
||||
@property
|
||||
def can_run_containerized(self):
|
||||
return True
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:ad_hoc_command_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
|
||||
@@ -295,7 +295,10 @@ class PrimordialModel(HasEditsMixin, CreatedModifiedModel):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
r = super(PrimordialModel, self).__init__(*args, **kwargs)
|
||||
self._prior_values_store = self._get_fields_snapshot()
|
||||
if self.pk:
|
||||
self._prior_values_store = self._get_fields_snapshot()
|
||||
else:
|
||||
self._prior_values_store = {}
|
||||
return r
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
@@ -64,7 +64,7 @@ def build_safe_env(env):
|
||||
for k, v in safe_env.items():
|
||||
if k == 'AWS_ACCESS_KEY_ID':
|
||||
continue
|
||||
elif k.startswith('ANSIBLE_') and not k.startswith('ANSIBLE_NET'):
|
||||
elif k.startswith('ANSIBLE_') and not k.startswith('ANSIBLE_NET') and not k.startswith('ANSIBLE_GALAXY_SERVER'):
|
||||
continue
|
||||
elif hidden_re.search(k):
|
||||
safe_env[k] = HIDDEN_PASSWORD
|
||||
@@ -86,6 +86,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
unique_together = (('organization', 'name', 'credential_type'))
|
||||
|
||||
PASSWORD_FIELDS = ['inputs']
|
||||
FIELDS_TO_PRESERVE_AT_COPY = ['input_sources']
|
||||
|
||||
credential_type = models.ForeignKey(
|
||||
'CredentialType',
|
||||
@@ -135,6 +136,10 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
def cloud(self):
|
||||
return self.credential_type.kind == 'cloud'
|
||||
|
||||
@property
|
||||
def kubernetes(self):
|
||||
return self.credential_type.kind == 'kubernetes'
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:credential_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
@@ -151,7 +156,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
@property
|
||||
def has_encrypted_ssh_key_data(self):
|
||||
try:
|
||||
ssh_key_data = decrypt_field(self, 'ssh_key_data')
|
||||
ssh_key_data = self.get_input('ssh_key_data')
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
@@ -322,8 +327,10 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
('net', _('Network')),
|
||||
('scm', _('Source Control')),
|
||||
('cloud', _('Cloud')),
|
||||
('token', _('Personal Access Token')),
|
||||
('insights', _('Insights')),
|
||||
('external', _('External')),
|
||||
('kubernetes', _('Kubernetes')),
|
||||
)
|
||||
|
||||
kind = models.CharField(
|
||||
@@ -633,9 +640,6 @@ ManagedCredentialType(
|
||||
'secret': True,
|
||||
'ask_at_runtime': True
|
||||
}],
|
||||
'dependencies': {
|
||||
'ssh_key_unlock': ['ssh_key_data'],
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
@@ -667,9 +671,6 @@ ManagedCredentialType(
|
||||
'type': 'string',
|
||||
'secret': True
|
||||
}],
|
||||
'dependencies': {
|
||||
'ssh_key_unlock': ['ssh_key_data'],
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
@@ -738,7 +739,6 @@ ManagedCredentialType(
|
||||
'secret': True,
|
||||
}],
|
||||
'dependencies': {
|
||||
'ssh_key_unlock': ['ssh_key_data'],
|
||||
'authorize_password': ['authorize'],
|
||||
},
|
||||
'required': ['username'],
|
||||
@@ -975,6 +975,40 @@ ManagedCredentialType(
|
||||
}
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='github_token',
|
||||
kind='token',
|
||||
name=ugettext_noop('GitHub Personal Access Token'),
|
||||
managed_by_tower=True,
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'token',
|
||||
'label': ugettext_noop('Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': ugettext_noop('This token needs to come from your profile settings in GitHub')
|
||||
}],
|
||||
'required': ['token'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='gitlab_token',
|
||||
kind='token',
|
||||
name=ugettext_noop('GitLab Personal Access Token'),
|
||||
managed_by_tower=True,
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'token',
|
||||
'label': ugettext_noop('Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': ugettext_noop('This token needs to come from your profile settings in GitLab')
|
||||
}],
|
||||
'required': ['token'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='insights',
|
||||
kind='insights',
|
||||
@@ -1090,6 +1124,38 @@ ManagedCredentialType(
|
||||
)
|
||||
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='kubernetes_bearer_token',
|
||||
kind='kubernetes',
|
||||
name=ugettext_noop('OpenShift or Kubernetes API Bearer Token'),
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'host',
|
||||
'label': ugettext_noop('OpenShift or Kubernetes API Endpoint'),
|
||||
'type': 'string',
|
||||
'help_text': ugettext_noop('The OpenShift or Kubernetes API Endpoint to authenticate with.')
|
||||
},{
|
||||
'id': 'bearer_token',
|
||||
'label': ugettext_noop('API authentication bearer token.'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},{
|
||||
'id': 'verify_ssl',
|
||||
'label': ugettext_noop('Verify SSL'),
|
||||
'type': 'boolean',
|
||||
'default': True,
|
||||
},{
|
||||
'id': 'ssl_ca_cert',
|
||||
'label': ugettext_noop('Certificate Authority data'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
}],
|
||||
'required': ['host', 'bearer_token'],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class CredentialInputSource(PrimordialModel):
|
||||
|
||||
class Meta:
|
||||
@@ -1097,6 +1163,8 @@ class CredentialInputSource(PrimordialModel):
|
||||
unique_together = (('target_credential', 'input_field_name'),)
|
||||
ordering = ('target_credential', 'source_credential', 'input_field_name',)
|
||||
|
||||
FIELDS_TO_PRESERVE_AT_COPY = ['source_credential', 'metadata', 'input_field_name']
|
||||
|
||||
target_credential = models.ForeignKey(
|
||||
'Credential',
|
||||
related_name='input_sources',
|
||||
|
||||
@@ -83,6 +83,7 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
# - runner_on*
|
||||
# - playbook_on_task_start (once for each task within a play)
|
||||
# - runner_on_failed
|
||||
# - runner_on_start
|
||||
# - runner_on_ok
|
||||
# - runner_on_error (not used for v2)
|
||||
# - runner_on_skipped
|
||||
@@ -102,6 +103,7 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
EVENT_TYPES = [
|
||||
# (level, event, verbose name, failed)
|
||||
(3, 'runner_on_failed', _('Host Failed'), True),
|
||||
(3, 'runner_on_start', _('Host Started'), False),
|
||||
(3, 'runner_on_ok', _('Host OK'), False),
|
||||
(3, 'runner_on_error', _('Host Failure'), True),
|
||||
(3, 'runner_on_skipped', _('Host Skipped'), False),
|
||||
@@ -322,7 +324,10 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
kwargs.pop('created', None)
|
||||
|
||||
sanitize_event_keys(kwargs, cls.VALID_KEYS)
|
||||
workflow_job_id = kwargs.pop('workflow_job_id', None)
|
||||
job_event = cls.objects.create(**kwargs)
|
||||
if workflow_job_id:
|
||||
setattr(job_event, 'workflow_job_id', workflow_job_id)
|
||||
analytics_logger.info('Event data saved.', extra=dict(python_objects=dict(job_event=job_event)))
|
||||
return job_event
|
||||
|
||||
@@ -394,7 +399,7 @@ class JobEvent(BasePlaybookEvent):
|
||||
An event/message logged from the callback when running a job.
|
||||
'''
|
||||
|
||||
VALID_KEYS = BasePlaybookEvent.VALID_KEYS + ['job_id']
|
||||
VALID_KEYS = BasePlaybookEvent.VALID_KEYS + ['job_id', 'workflow_job_id']
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
@@ -528,7 +533,7 @@ class JobEvent(BasePlaybookEvent):
|
||||
|
||||
class ProjectUpdateEvent(BasePlaybookEvent):
|
||||
|
||||
VALID_KEYS = BasePlaybookEvent.VALID_KEYS + ['project_update_id']
|
||||
VALID_KEYS = BasePlaybookEvent.VALID_KEYS + ['project_update_id', 'workflow_job_id']
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
@@ -614,6 +619,7 @@ class BaseCommandEvent(CreatedModifiedModel):
|
||||
kwargs.pop('created', None)
|
||||
|
||||
sanitize_event_keys(kwargs, cls.VALID_KEYS)
|
||||
kwargs.pop('workflow_job_id', None)
|
||||
event = cls.objects.create(**kwargs)
|
||||
if isinstance(event, AdHocCommandEvent):
|
||||
analytics_logger.info(
|
||||
@@ -637,7 +643,7 @@ class BaseCommandEvent(CreatedModifiedModel):
|
||||
|
||||
class AdHocCommandEvent(BaseCommandEvent):
|
||||
|
||||
VALID_KEYS = BaseCommandEvent.VALID_KEYS + ['ad_hoc_command_id', 'event']
|
||||
VALID_KEYS = BaseCommandEvent.VALID_KEYS + ['ad_hoc_command_id', 'event', 'workflow_job_id']
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
@@ -745,7 +751,7 @@ class AdHocCommandEvent(BaseCommandEvent):
|
||||
|
||||
class InventoryUpdateEvent(BaseCommandEvent):
|
||||
|
||||
VALID_KEYS = BaseCommandEvent.VALID_KEYS + ['inventory_update_id']
|
||||
VALID_KEYS = BaseCommandEvent.VALID_KEYS + ['inventory_update_id', 'workflow_job_id']
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
|
||||
@@ -18,7 +18,7 @@ from awx import __version__ as awx_application_version
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.managers import InstanceManager, InstanceGroupManager
|
||||
from awx.main.fields import JSONField
|
||||
from awx.main.models.base import BaseModel, HasEditsMixin
|
||||
from awx.main.models.base import BaseModel, HasEditsMixin, prevent_search
|
||||
from awx.main.models.unified_jobs import UnifiedJob
|
||||
from awx.main.utils import get_cpu_capacity, get_mem_capacity, get_system_task_capacity
|
||||
from awx.main.models.mixins import RelatedJobsMixin
|
||||
@@ -59,7 +59,7 @@ class Instance(HasPolicyEditsMixin, BaseModel):
|
||||
null=True,
|
||||
editable=False,
|
||||
)
|
||||
version = models.CharField(max_length=24, blank=True)
|
||||
version = models.CharField(max_length=120, blank=True)
|
||||
capacity = models.PositiveIntegerField(
|
||||
default=100,
|
||||
editable=False,
|
||||
@@ -176,6 +176,18 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
|
||||
null=True,
|
||||
on_delete=models.CASCADE
|
||||
)
|
||||
credential = models.ForeignKey(
|
||||
'Credential',
|
||||
related_name='%(class)ss',
|
||||
blank=True,
|
||||
null=True,
|
||||
default=None,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
pod_spec_override = prevent_search(models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
))
|
||||
policy_instance_percentage = models.IntegerField(
|
||||
default=0,
|
||||
help_text=_("Percentage of Instances to automatically assign to this group")
|
||||
@@ -218,6 +230,10 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
|
||||
def is_isolated(self):
|
||||
return bool(self.controller)
|
||||
|
||||
@property
|
||||
def is_containerized(self):
|
||||
return bool(self.credential and self.credential.kubernetes)
|
||||
|
||||
'''
|
||||
RelatedJobsMixin
|
||||
'''
|
||||
@@ -271,7 +287,8 @@ def schedule_policy_task():
|
||||
@receiver(post_save, sender=InstanceGroup)
|
||||
def on_instance_group_saved(sender, instance, created=False, raw=False, **kwargs):
|
||||
if created or instance.has_policy_changes():
|
||||
schedule_policy_task()
|
||||
if not instance.is_containerized:
|
||||
schedule_policy_task()
|
||||
|
||||
|
||||
@receiver(post_save, sender=Instance)
|
||||
@@ -282,7 +299,8 @@ def on_instance_saved(sender, instance, created=False, raw=False, **kwargs):
|
||||
|
||||
@receiver(post_delete, sender=InstanceGroup)
|
||||
def on_instance_group_deleted(sender, instance, using, **kwargs):
|
||||
schedule_policy_task()
|
||||
if not instance.is_containerized:
|
||||
schedule_policy_task()
|
||||
|
||||
|
||||
@receiver(post_delete, sender=Instance)
|
||||
|
||||
@@ -1501,7 +1501,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, CustomVirtualE
|
||||
@classmethod
|
||||
def _get_unified_job_field_names(cls):
|
||||
return set(f.name for f in InventorySourceOptions._meta.fields) | set(
|
||||
['name', 'description', 'schedule', 'credentials', 'inventory']
|
||||
['name', 'description', 'credentials', 'inventory']
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
@@ -39,7 +39,7 @@ from awx.main.models.notifications import (
|
||||
NotificationTemplate,
|
||||
JobNotificationMixin,
|
||||
)
|
||||
from awx.main.utils import parse_yaml_or_json, getattr_dne
|
||||
from awx.main.utils import parse_yaml_or_json, getattr_dne, NullablePromptPseudoField
|
||||
from awx.main.fields import ImplicitRoleField, JSONField, AskForField
|
||||
from awx.main.models.mixins import (
|
||||
ResourceMixin,
|
||||
@@ -48,6 +48,8 @@ from awx.main.models.mixins import (
|
||||
TaskManagerJobMixin,
|
||||
CustomVirtualEnvMixin,
|
||||
RelatedJobsMixin,
|
||||
WebhookMixin,
|
||||
WebhookTemplateMixin,
|
||||
)
|
||||
|
||||
|
||||
@@ -187,7 +189,7 @@ class JobOptions(BaseModel):
|
||||
return needed
|
||||
|
||||
|
||||
class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin):
|
||||
class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin, WebhookTemplateMixin):
|
||||
'''
|
||||
A job template is a reusable job definition for applying a project (with
|
||||
playbook) to an inventory source with a given credential.
|
||||
@@ -271,7 +273,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
@classmethod
|
||||
def _get_unified_job_field_names(cls):
|
||||
return set(f.name for f in JobOptions._meta.fields) | set(
|
||||
['name', 'description', 'schedule', 'survey_passwords', 'labels', 'credentials',
|
||||
['name', 'description', 'survey_passwords', 'labels', 'credentials',
|
||||
'job_slice_number', 'job_slice_count']
|
||||
)
|
||||
|
||||
@@ -484,7 +486,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
return UnifiedJob.objects.filter(unified_job_template=self)
|
||||
|
||||
|
||||
class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskManagerJobMixin, CustomVirtualEnvMixin):
|
||||
class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskManagerJobMixin, CustomVirtualEnvMixin, WebhookMixin):
|
||||
'''
|
||||
A job applies a project (with playbook) to an inventory source with a given
|
||||
credential. It represents a single invocation of ansible-playbook with the
|
||||
@@ -627,15 +629,17 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
|
||||
@property
|
||||
def task_impact(self):
|
||||
# NOTE: We sorta have to assume the host count matches and that forks default to 5
|
||||
from awx.main.models.inventory import Host
|
||||
if self.launch_type == 'callback':
|
||||
count_hosts = 2
|
||||
else:
|
||||
count_hosts = Host.objects.filter(inventory__jobs__pk=self.pk).count()
|
||||
if self.job_slice_count > 1:
|
||||
# Integer division intentional
|
||||
count_hosts = (count_hosts + self.job_slice_count - self.job_slice_number) // self.job_slice_count
|
||||
# If for some reason we can't count the hosts then lets assume the impact as forks
|
||||
if self.inventory is not None:
|
||||
count_hosts = self.inventory.hosts.count()
|
||||
if self.job_slice_count > 1:
|
||||
# Integer division intentional
|
||||
count_hosts = (count_hosts + self.job_slice_count - self.job_slice_number) // self.job_slice_count
|
||||
else:
|
||||
count_hosts = 5 if self.forks == 0 else self.forks
|
||||
return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1
|
||||
|
||||
@property
|
||||
@@ -666,6 +670,14 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
def processed_hosts(self):
|
||||
return self._get_hosts(job_host_summaries__processed__gt=0)
|
||||
|
||||
@property
|
||||
def ignored_hosts(self):
|
||||
return self._get_hosts(job_host_summaries__ignored__gt=0)
|
||||
|
||||
@property
|
||||
def rescued_hosts(self):
|
||||
return self._get_hosts(job_host_summaries__rescued__gt=0)
|
||||
|
||||
def notification_data(self, block=5):
|
||||
data = super(Job, self).notification_data()
|
||||
all_hosts = {}
|
||||
@@ -684,7 +696,9 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
failures=h.failures,
|
||||
ok=h.ok,
|
||||
processed=h.processed,
|
||||
skipped=h.skipped) # TODO: update with rescued, ignored (see https://github.com/ansible/awx/issues/4394)
|
||||
skipped=h.skipped,
|
||||
rescued=h.rescued,
|
||||
ignored=h.ignored)
|
||||
data.update(dict(inventory=self.inventory.name if self.inventory else None,
|
||||
project=self.project.name if self.project else None,
|
||||
playbook=self.playbook,
|
||||
@@ -706,6 +720,14 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
return "$hidden due to Ansible no_log flag$"
|
||||
return artifacts
|
||||
|
||||
@property
|
||||
def can_run_containerized(self):
|
||||
return any([ig for ig in self.preferred_instance_groups if ig.is_containerized])
|
||||
|
||||
@property
|
||||
def is_containerized(self):
|
||||
return bool(self.instance_group and self.instance_group.is_containerized)
|
||||
|
||||
@property
|
||||
def preferred_instance_groups(self):
|
||||
if self.project is not None and self.project.organization is not None:
|
||||
@@ -829,25 +851,6 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
host.save()
|
||||
|
||||
|
||||
# Add on aliases for the non-related-model fields
|
||||
class NullablePromptPsuedoField(object):
|
||||
"""
|
||||
Interface for psuedo-property stored in `char_prompts` dict
|
||||
Used in LaunchTimeConfig and submodels
|
||||
"""
|
||||
def __init__(self, field_name):
|
||||
self.field_name = field_name
|
||||
|
||||
def __get__(self, instance, type=None):
|
||||
return instance.char_prompts.get(self.field_name, None)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
if value in (None, {}):
|
||||
instance.char_prompts.pop(self.field_name, None)
|
||||
else:
|
||||
instance.char_prompts[self.field_name] = value
|
||||
|
||||
|
||||
class LaunchTimeConfigBase(BaseModel):
|
||||
'''
|
||||
Needed as separate class from LaunchTimeConfig because some models
|
||||
@@ -868,6 +871,7 @@ class LaunchTimeConfigBase(BaseModel):
|
||||
null=True,
|
||||
default=None,
|
||||
on_delete=models.SET_NULL,
|
||||
help_text=_('Inventory applied as a prompt, assuming job template prompts for inventory')
|
||||
)
|
||||
# All standard fields are stored in this dictionary field
|
||||
# This is a solution to the nullable CharField problem, specific to prompting
|
||||
@@ -896,6 +900,9 @@ class LaunchTimeConfigBase(BaseModel):
|
||||
data[prompt_name] = self.display_extra_vars()
|
||||
else:
|
||||
data[prompt_name] = self.extra_vars
|
||||
# Depending on model, field type may save and return as string
|
||||
if isinstance(data[prompt_name], str):
|
||||
data[prompt_name] = parse_yaml_or_json(data[prompt_name])
|
||||
if self.survey_passwords and not display:
|
||||
data['survey_passwords'] = self.survey_passwords
|
||||
else:
|
||||
@@ -904,21 +911,14 @@ class LaunchTimeConfigBase(BaseModel):
|
||||
data[prompt_name] = prompt_val
|
||||
return data
|
||||
|
||||
def display_extra_vars(self):
|
||||
'''
|
||||
Hides fields marked as passwords in survey.
|
||||
'''
|
||||
if self.survey_passwords:
|
||||
extra_vars = parse_yaml_or_json(self.extra_vars).copy()
|
||||
for key, value in self.survey_passwords.items():
|
||||
if key in extra_vars:
|
||||
extra_vars[key] = value
|
||||
return extra_vars
|
||||
else:
|
||||
return self.extra_vars
|
||||
|
||||
def display_extra_data(self):
|
||||
return self.display_extra_vars()
|
||||
for field_name in JobTemplate.get_ask_mapping().keys():
|
||||
if field_name == 'extra_vars':
|
||||
continue
|
||||
try:
|
||||
LaunchTimeConfigBase._meta.get_field(field_name)
|
||||
except FieldDoesNotExist:
|
||||
setattr(LaunchTimeConfigBase, field_name, NullablePromptPseudoField(field_name))
|
||||
|
||||
|
||||
class LaunchTimeConfig(LaunchTimeConfigBase):
|
||||
@@ -953,14 +953,21 @@ class LaunchTimeConfig(LaunchTimeConfigBase):
|
||||
def extra_vars(self, extra_vars):
|
||||
self.extra_data = extra_vars
|
||||
|
||||
def display_extra_vars(self):
|
||||
'''
|
||||
Hides fields marked as passwords in survey.
|
||||
'''
|
||||
if hasattr(self, 'survey_passwords') and self.survey_passwords:
|
||||
extra_vars = parse_yaml_or_json(self.extra_vars).copy()
|
||||
for key, value in self.survey_passwords.items():
|
||||
if key in extra_vars:
|
||||
extra_vars[key] = value
|
||||
return extra_vars
|
||||
else:
|
||||
return self.extra_vars
|
||||
|
||||
for field_name in JobTemplate.get_ask_mapping().keys():
|
||||
if field_name == 'extra_vars':
|
||||
continue
|
||||
try:
|
||||
LaunchTimeConfig._meta.get_field(field_name)
|
||||
except FieldDoesNotExist:
|
||||
setattr(LaunchTimeConfig, field_name, NullablePromptPsuedoField(field_name))
|
||||
def display_extra_data(self):
|
||||
return self.display_extra_vars()
|
||||
|
||||
|
||||
class JobLaunchConfig(LaunchTimeConfig):
|
||||
|
||||
@@ -1,31 +1,37 @@
|
||||
# Python
|
||||
import os
|
||||
import json
|
||||
from copy import copy, deepcopy
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
import requests
|
||||
|
||||
# Django
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.auth.models import User # noqa
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.db.models.query import QuerySet
|
||||
from django.utils.crypto import get_random_string
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# AWX
|
||||
from awx.main.models.base import prevent_search
|
||||
from awx.main.models.rbac import (
|
||||
Role, RoleAncestorEntry, get_roles_on_resource
|
||||
)
|
||||
from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices
|
||||
from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_licenser
|
||||
from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted
|
||||
from awx.main.utils.polymorphic import build_polymorphic_ctypes_map
|
||||
from awx.main.fields import JSONField, AskForField
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.models.mixins')
|
||||
|
||||
|
||||
__all__ = ['ResourceMixin', 'SurveyJobTemplateMixin', 'SurveyJobMixin',
|
||||
'TaskManagerUnifiedJobMixin', 'TaskManagerJobMixin', 'TaskManagerProjectUpdateMixin',
|
||||
'TaskManagerInventoryUpdateMixin', 'CustomVirtualEnvMixin']
|
||||
@@ -247,7 +253,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
else:
|
||||
choice_list = copy(survey_element['choices'])
|
||||
if isinstance(choice_list, str):
|
||||
choice_list = choice_list.split('\n')
|
||||
choice_list = [choice for choice in choice_list.splitlines() if choice.strip() != '']
|
||||
for val in data[survey_element['variable']]:
|
||||
if val not in choice_list:
|
||||
errors.append("Value %s for '%s' expected to be one of %s." % (val, survey_element['variable'],
|
||||
@@ -255,7 +261,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
elif survey_element['type'] == 'multiplechoice':
|
||||
choice_list = copy(survey_element['choices'])
|
||||
if isinstance(choice_list, str):
|
||||
choice_list = choice_list.split('\n')
|
||||
choice_list = [choice for choice in choice_list.splitlines() if choice.strip() != '']
|
||||
if survey_element['variable'] in data:
|
||||
if data[survey_element['variable']] not in choice_list:
|
||||
errors.append("Value %s for '%s' expected to be one of %s." % (data[survey_element['variable']],
|
||||
@@ -483,3 +489,139 @@ class RelatedJobsMixin(object):
|
||||
raise RuntimeError("Programmer error. Expected _get_active_jobs() to return a QuerySet.")
|
||||
|
||||
return [dict(id=t[0], type=mapping[t[1]]) for t in jobs.values_list('id', 'polymorphic_ctype_id')]
|
||||
|
||||
|
||||
class WebhookTemplateMixin(models.Model):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
SERVICES = [
|
||||
('github', "GitHub"),
|
||||
('gitlab', "GitLab"),
|
||||
]
|
||||
|
||||
webhook_service = models.CharField(
|
||||
max_length=16,
|
||||
choices=SERVICES,
|
||||
blank=True,
|
||||
help_text=_('Service that webhook requests will be accepted from')
|
||||
)
|
||||
webhook_key = prevent_search(models.CharField(
|
||||
max_length=64,
|
||||
blank=True,
|
||||
help_text=_('Shared secret that the webhook service will use to sign requests')
|
||||
))
|
||||
webhook_credential = models.ForeignKey(
|
||||
'Credential',
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='%(class)ss',
|
||||
help_text=_('Personal Access Token for posting back the status to the service API')
|
||||
)
|
||||
|
||||
def rotate_webhook_key(self):
|
||||
self.webhook_key = get_random_string(length=50)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
update_fields = kwargs.get('update_fields')
|
||||
|
||||
if not self.pk or self._values_have_edits({'webhook_service': self.webhook_service}):
|
||||
if self.webhook_service:
|
||||
self.rotate_webhook_key()
|
||||
else:
|
||||
self.webhook_key = ''
|
||||
|
||||
if update_fields and 'webhook_service' in update_fields:
|
||||
update_fields.add('webhook_key')
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
|
||||
class WebhookMixin(models.Model):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
SERVICES = WebhookTemplateMixin.SERVICES
|
||||
|
||||
webhook_service = models.CharField(
|
||||
max_length=16,
|
||||
choices=SERVICES,
|
||||
blank=True,
|
||||
help_text=_('Service that webhook requests will be accepted from')
|
||||
)
|
||||
webhook_credential = models.ForeignKey(
|
||||
'Credential',
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='%(class)ss',
|
||||
help_text=_('Personal Access Token for posting back the status to the service API')
|
||||
)
|
||||
webhook_guid = models.CharField(
|
||||
blank=True,
|
||||
max_length=128,
|
||||
help_text=_('Unique identifier of the event that triggered this webhook')
|
||||
)
|
||||
|
||||
def update_webhook_status(self, status):
|
||||
if not self.webhook_credential:
|
||||
logger.debug("No credential configured to post back webhook status, skipping.")
|
||||
return
|
||||
|
||||
status_api = self.extra_vars_dict.get('tower_webhook_status_api')
|
||||
if not status_api:
|
||||
logger.debug("Webhook event did not have a status API endpoint associated, skipping.")
|
||||
return
|
||||
|
||||
service_header = {
|
||||
'github': ('Authorization', 'token {}'),
|
||||
'gitlab': ('PRIVATE-TOKEN', '{}'),
|
||||
}
|
||||
service_statuses = {
|
||||
'github': {
|
||||
'pending': 'pending',
|
||||
'successful': 'success',
|
||||
'failed': 'failure',
|
||||
'canceled': 'failure', # GitHub doesn't have a 'canceled' status :(
|
||||
'error': 'error',
|
||||
},
|
||||
'gitlab': {
|
||||
'pending': 'pending',
|
||||
'running': 'running',
|
||||
'successful': 'success',
|
||||
'failed': 'failed',
|
||||
'error': 'failed', # GitLab doesn't have an 'error' status distinct from 'failed' :(
|
||||
'canceled': 'canceled',
|
||||
},
|
||||
}
|
||||
|
||||
statuses = service_statuses[self.webhook_service]
|
||||
if status not in statuses:
|
||||
logger.debug("Skipping webhook job status change: '{}'".format(status))
|
||||
return
|
||||
try:
|
||||
license_type = get_licenser().validate().get('license_type')
|
||||
data = {
|
||||
'state': statuses[status],
|
||||
'context': 'ansible/awx' if license_type == 'open' else 'ansible/tower',
|
||||
'target_url': self.get_ui_url(),
|
||||
}
|
||||
k, v = service_header[self.webhook_service]
|
||||
headers = {
|
||||
k: v.format(self.webhook_credential.get_input('token')),
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
response = requests.post(status_api, data=json.dumps(data), headers=headers, timeout=30)
|
||||
except Exception:
|
||||
logger.exception("Posting webhook status caused an error.")
|
||||
return
|
||||
|
||||
if response.status_code < 400:
|
||||
logger.debug("Webhook status update sent.")
|
||||
else:
|
||||
logger.error(
|
||||
"Posting webhook status failed, code: {}\n"
|
||||
"{}\n"
|
||||
"Payload sent: {}".format(response.status_code, response.text, json.dumps(data))
|
||||
)
|
||||
|
||||
@@ -17,7 +17,7 @@ from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models.base import CommonModelNameNotUnique, CreatedModifiedModel
|
||||
from awx.main.models.base import CommonModelNameNotUnique, CreatedModifiedModel, prevent_search
|
||||
from awx.main.utils import encrypt_field, decrypt_field, set_environ
|
||||
from awx.main.notifications.email_backend import CustomEmailBackend
|
||||
from awx.main.notifications.slack_backend import SlackBackend
|
||||
@@ -70,10 +70,10 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
||||
choices=NOTIFICATION_TYPE_CHOICES,
|
||||
)
|
||||
|
||||
notification_configuration = JSONField(blank=False)
|
||||
notification_configuration = prevent_search(JSONField(blank=False))
|
||||
|
||||
def default_messages():
|
||||
return {'started': None, 'success': None, 'error': None}
|
||||
return {'started': None, 'success': None, 'error': None, 'workflow_approval': None}
|
||||
|
||||
messages = JSONField(
|
||||
null=True,
|
||||
@@ -92,25 +92,6 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
||||
def get_message(self, condition):
|
||||
return self.messages.get(condition, {})
|
||||
|
||||
def build_notification_message(self, event_type, context):
|
||||
env = sandbox.ImmutableSandboxedEnvironment()
|
||||
templates = self.get_message(event_type)
|
||||
msg_template = templates.get('message', {})
|
||||
|
||||
try:
|
||||
notification_subject = env.from_string(msg_template).render(**context)
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||
notification_subject = ''
|
||||
|
||||
|
||||
msg_body = templates.get('body', {})
|
||||
try:
|
||||
notification_body = env.from_string(msg_body).render(**context)
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||
notification_body = ''
|
||||
|
||||
return (notification_subject, notification_body)
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:notification_template_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
@@ -128,19 +109,34 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
||||
old_messages = old_nt.messages
|
||||
new_messages = self.messages
|
||||
|
||||
def merge_messages(local_old_messages, local_new_messages, local_event):
|
||||
if local_new_messages.get(local_event, {}) and local_old_messages.get(local_event, {}):
|
||||
local_old_event_msgs = local_old_messages[local_event]
|
||||
local_new_event_msgs = local_new_messages[local_event]
|
||||
for msg_type in ['message', 'body']:
|
||||
if msg_type not in local_new_event_msgs and local_old_event_msgs.get(msg_type, None):
|
||||
local_new_event_msgs[msg_type] = local_old_event_msgs[msg_type]
|
||||
if old_messages is not None and new_messages is not None:
|
||||
for event in ['started', 'success', 'error']:
|
||||
for event in ('started', 'success', 'error', 'workflow_approval'):
|
||||
if not new_messages.get(event, {}) and old_messages.get(event, {}):
|
||||
new_messages[event] = old_messages[event]
|
||||
continue
|
||||
if new_messages.get(event, {}) and old_messages.get(event, {}):
|
||||
old_event_msgs = old_messages[event]
|
||||
new_event_msgs = new_messages[event]
|
||||
for msg_type in ['message', 'body']:
|
||||
if msg_type not in new_event_msgs and old_event_msgs.get(msg_type, None):
|
||||
new_event_msgs[msg_type] = old_event_msgs[msg_type]
|
||||
|
||||
if event == 'workflow_approval' and old_messages.get('workflow_approval', None):
|
||||
new_messages.setdefault('workflow_approval', {})
|
||||
for subevent in ('running', 'approved', 'timed_out', 'denied'):
|
||||
old_wfa_messages = old_messages['workflow_approval']
|
||||
new_wfa_messages = new_messages['workflow_approval']
|
||||
if not new_wfa_messages.get(subevent, {}) and old_wfa_messages.get(subevent, {}):
|
||||
new_wfa_messages[subevent] = old_wfa_messages[subevent]
|
||||
continue
|
||||
if old_wfa_messages:
|
||||
merge_messages(old_wfa_messages, new_wfa_messages, subevent)
|
||||
else:
|
||||
merge_messages(old_messages, new_messages, event)
|
||||
new_messages.setdefault(event, None)
|
||||
|
||||
|
||||
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
|
||||
self.notification_class.init_parameters):
|
||||
if self.notification_configuration[field].startswith("$encrypted$"):
|
||||
@@ -169,12 +165,12 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
||||
def recipients(self):
|
||||
return self.notification_configuration[self.notification_class.recipient_parameter]
|
||||
|
||||
def generate_notification(self, subject, message):
|
||||
def generate_notification(self, msg, body):
|
||||
notification = Notification(notification_template=self,
|
||||
notification_type=self.notification_type,
|
||||
recipients=smart_str(self.recipients),
|
||||
subject=subject,
|
||||
body=message)
|
||||
subject=msg,
|
||||
body=body)
|
||||
notification.save()
|
||||
return notification
|
||||
|
||||
@@ -370,7 +366,7 @@ class JobNotificationMixin(object):
|
||||
'verbosity': 0},
|
||||
'job_friendly_name': 'Job',
|
||||
'url': 'https://towerhost/#/jobs/playbook/1010',
|
||||
'job_summary_dict': """{'url': 'https://towerhost/$/jobs/playbook/13',
|
||||
'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
|
||||
'traceback': '',
|
||||
'status': 'running',
|
||||
'started': '2019-08-07T21:46:38.362630+00:00',
|
||||
@@ -389,14 +385,14 @@ class JobNotificationMixin(object):
|
||||
return context
|
||||
|
||||
def context(self, serialized_job):
|
||||
"""Returns a context that can be used for rendering notification messages.
|
||||
Context contains whitelisted content retrieved from a serialized job object
|
||||
"""Returns a dictionary that can be used for rendering notification messages.
|
||||
The context will contain whitelisted content retrieved from a serialized job object
|
||||
(see JobNotificationMixin.JOB_FIELDS_WHITELIST), the job's friendly name,
|
||||
and a url to the job run."""
|
||||
context = {'job': {},
|
||||
'job_friendly_name': self.get_notification_friendly_name(),
|
||||
'url': self.get_ui_url(),
|
||||
'job_summary_dict': json.dumps(self.notification_data(), indent=4)}
|
||||
'job_metadata': json.dumps(self.notification_data(), indent=4)}
|
||||
|
||||
def build_context(node, fields, whitelisted_fields):
|
||||
for safe_field in whitelisted_fields:
|
||||
@@ -434,32 +430,33 @@ class JobNotificationMixin(object):
|
||||
context = self.context(job_serialization)
|
||||
|
||||
msg_template = body_template = None
|
||||
msg = body = ''
|
||||
|
||||
# Use custom template if available
|
||||
if nt.messages:
|
||||
templates = nt.messages.get(self.STATUS_TO_TEMPLATE_TYPE[status], {}) or {}
|
||||
msg_template = templates.get('message', {})
|
||||
body_template = templates.get('body', {})
|
||||
template = nt.messages.get(self.STATUS_TO_TEMPLATE_TYPE[status], {}) or {}
|
||||
msg_template = template.get('message', None)
|
||||
body_template = template.get('body', None)
|
||||
# If custom template not provided, look up default template
|
||||
default_template = nt.notification_class.default_messages[self.STATUS_TO_TEMPLATE_TYPE[status]]
|
||||
if not msg_template:
|
||||
msg_template = default_template.get('message', None)
|
||||
if not body_template:
|
||||
body_template = default_template.get('body', None)
|
||||
|
||||
if msg_template:
|
||||
try:
|
||||
notification_subject = env.from_string(msg_template).render(**context)
|
||||
msg = env.from_string(msg_template).render(**context)
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||
notification_subject = ''
|
||||
else:
|
||||
notification_subject = u"{} #{} '{}' {}: {}".format(self.get_notification_friendly_name(),
|
||||
self.id,
|
||||
self.name,
|
||||
status,
|
||||
self.get_ui_url())
|
||||
notification_body = self.notification_data()
|
||||
notification_body['friendly_name'] = self.get_notification_friendly_name()
|
||||
msg = ''
|
||||
|
||||
if body_template:
|
||||
try:
|
||||
notification_body['body'] = env.from_string(body_template).render(**context)
|
||||
body = env.from_string(body_template).render(**context)
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||
notification_body['body'] = ''
|
||||
body = ''
|
||||
|
||||
return (notification_subject, notification_body)
|
||||
return (msg, body)
|
||||
|
||||
def send_notification_templates(self, status):
|
||||
from awx.main.tasks import send_notifications # avoid circular import
|
||||
@@ -475,16 +472,13 @@ class JobNotificationMixin(object):
|
||||
return
|
||||
|
||||
for nt in set(notification_templates.get(self.STATUS_TO_TEMPLATE_TYPE[status], [])):
|
||||
try:
|
||||
(notification_subject, notification_body) = self.build_notification_message(nt, status)
|
||||
except AttributeError:
|
||||
raise NotImplementedError("build_notification_message() does not exist" % status)
|
||||
(msg, body) = self.build_notification_message(nt, status)
|
||||
|
||||
# Use kwargs to force late-binding
|
||||
# https://stackoverflow.com/a/3431699/10669572
|
||||
def send_it(local_nt=nt, local_subject=notification_subject, local_body=notification_body):
|
||||
def send_it(local_nt=nt, local_msg=msg, local_body=body):
|
||||
def _func():
|
||||
send_notifications.delay([local_nt.generate_notification(local_subject, local_body).id],
|
||||
send_notifications.delay([local_nt.generate_notification(local_msg, local_body).id],
|
||||
job_id=self.id)
|
||||
return _func
|
||||
connection.on_commit(send_it())
|
||||
|
||||
@@ -3,7 +3,7 @@ import re
|
||||
|
||||
# Django
|
||||
from django.core.validators import RegexValidator
|
||||
from django.db import models
|
||||
from django.db import models, connection
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.conf import settings
|
||||
@@ -121,7 +121,7 @@ class OAuth2AccessToken(AbstractAccessToken):
|
||||
valid = super(OAuth2AccessToken, self).is_valid(scopes)
|
||||
if valid:
|
||||
self.last_used = now()
|
||||
self.save(update_fields=['last_used'])
|
||||
connection.on_commit(lambda: self.save(update_fields=['last_used']))
|
||||
return valid
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
@@ -51,6 +51,11 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
|
||||
default=0,
|
||||
help_text=_('Maximum number of hosts allowed to be managed by this organization.'),
|
||||
)
|
||||
notification_templates_approvals = models.ManyToManyField(
|
||||
"NotificationTemplate",
|
||||
blank=True,
|
||||
related_name='%(class)s_notification_templates_for_approvals'
|
||||
)
|
||||
|
||||
admin_role = ImplicitRoleField(
|
||||
parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
|
||||
@@ -329,7 +329,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
@classmethod
|
||||
def _get_unified_job_field_names(cls):
|
||||
return set(f.name for f in ProjectOptions._meta.fields) | set(
|
||||
['name', 'description', 'schedule']
|
||||
['name', 'description']
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
@@ -119,10 +119,11 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
|
||||
tzinfo = r._dtstart.tzinfo
|
||||
if tzinfo is utc:
|
||||
return 'UTC'
|
||||
fname = tzinfo._filename
|
||||
for zone in all_zones:
|
||||
if fname.endswith(zone):
|
||||
return zone
|
||||
fname = getattr(tzinfo, '_filename', None)
|
||||
if fname:
|
||||
for zone in all_zones:
|
||||
if fname.endswith(zone):
|
||||
return zone
|
||||
logger.warn('Could not detect valid zoneinfo for {}'.format(self.rrule))
|
||||
return ''
|
||||
|
||||
|
||||
@@ -42,9 +42,9 @@ from awx.main.utils import (
|
||||
camelcase_to_underscore, get_model_for_type,
|
||||
encrypt_dict, decrypt_field, _inventory_updates,
|
||||
copy_model_by_class, copy_m2m_relationships,
|
||||
get_type_for_model, parse_yaml_or_json, getattr_dne
|
||||
get_type_for_model, parse_yaml_or_json, getattr_dne,
|
||||
polymorphic, schedule_task_manager
|
||||
)
|
||||
from awx.main.utils import polymorphic, schedule_task_manager
|
||||
from awx.main.constants import ACTIVE_STATES, CAN_CANCEL
|
||||
from awx.main.redact import UriCleaner, REPLACE_STR
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
@@ -532,6 +532,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
('scheduled', _('Scheduled')), # Job was started from a schedule.
|
||||
('dependency', _('Dependency')), # Job was started as a dependency of another job.
|
||||
('workflow', _('Workflow')), # Job was started from a workflow job.
|
||||
('webhook', _('Webhook')), # Job was started from a webhook event.
|
||||
('sync', _('Sync')), # Job was started from a project sync.
|
||||
('scm', _('SCM Update')) # Job was created as an Inventory SCM sync.
|
||||
]
|
||||
@@ -559,11 +560,17 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
related_name='%(class)s_unified_jobs',
|
||||
on_delete=polymorphic.SET_NULL,
|
||||
)
|
||||
created = models.DateTimeField(
|
||||
default=None,
|
||||
editable=False,
|
||||
db_index=True, # add an index, this is a commonly queried field
|
||||
)
|
||||
launch_type = models.CharField(
|
||||
max_length=20,
|
||||
choices=LAUNCH_TYPE_CHOICES,
|
||||
default='manual',
|
||||
editable=False,
|
||||
db_index=True
|
||||
)
|
||||
schedule = models.ForeignKey( # Which schedule entry was responsible for starting this job.
|
||||
'Schedule',
|
||||
@@ -621,6 +628,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
default=None,
|
||||
editable=False,
|
||||
help_text=_("The date and time the job finished execution."),
|
||||
db_index=True,
|
||||
)
|
||||
elapsed = models.DecimalField(
|
||||
max_digits=12,
|
||||
@@ -706,6 +714,10 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
def supports_isolation(cls):
|
||||
return False
|
||||
|
||||
@property
|
||||
def can_run_containerized(self):
|
||||
return False
|
||||
|
||||
def _get_parent_field_name(self):
|
||||
return 'unified_job_template' # Override in subclasses.
|
||||
|
||||
@@ -1199,6 +1211,8 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
|
||||
def websocket_emit_status(self, status):
|
||||
connection.on_commit(lambda: self._websocket_emit_status(status))
|
||||
if hasattr(self, 'update_webhook_status'):
|
||||
connection.on_commit(lambda: self.update_webhook_status(status))
|
||||
|
||||
def notification_data(self):
|
||||
return dict(id=self.id,
|
||||
@@ -1379,9 +1393,13 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
|
||||
wj = self.get_workflow_job()
|
||||
if wj:
|
||||
schedule = getattr_dne(wj, 'schedule')
|
||||
for name in ('awx', 'tower'):
|
||||
r['{}_workflow_job_id'.format(name)] = wj.pk
|
||||
r['{}_workflow_job_name'.format(name)] = wj.name
|
||||
if schedule:
|
||||
r['{}_parent_job_schedule_id'.format(name)] = schedule.pk
|
||||
r['{}_parent_job_schedule_name'.format(name)] = schedule.name
|
||||
|
||||
if not created_by:
|
||||
schedule = getattr_dne(self, 'schedule')
|
||||
@@ -1411,3 +1429,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
|
||||
def is_isolated(self):
|
||||
return bool(self.controller_node)
|
||||
|
||||
@property
|
||||
def is_containerized(self):
|
||||
return False
|
||||
|
||||
@@ -2,15 +2,24 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import json
|
||||
import logging
|
||||
from copy import copy
|
||||
from urllib.parse import urljoin
|
||||
|
||||
# Django
|
||||
from django.db import models
|
||||
from django.db import connection, models
|
||||
from django.conf import settings
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
#from django import settings as tower_settings
|
||||
|
||||
# Django-CRUM
|
||||
from crum import get_current_user
|
||||
|
||||
from jinja2 import sandbox
|
||||
from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import (prevent_search, accepts_json, UnifiedJobTemplate,
|
||||
@@ -19,7 +28,7 @@ from awx.main.models.notifications import (
|
||||
NotificationTemplate,
|
||||
JobNotificationMixin
|
||||
)
|
||||
from awx.main.models.base import BaseModel, CreatedModifiedModel, VarsDictProperty
|
||||
from awx.main.models.base import CreatedModifiedModel, VarsDictProperty
|
||||
from awx.main.models.rbac import (
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
ROLE_SINGLETON_SYSTEM_AUDITOR
|
||||
@@ -30,6 +39,8 @@ from awx.main.models.mixins import (
|
||||
SurveyJobTemplateMixin,
|
||||
SurveyJobMixin,
|
||||
RelatedJobsMixin,
|
||||
WebhookMixin,
|
||||
WebhookTemplateMixin,
|
||||
)
|
||||
from awx.main.models.jobs import LaunchTimeConfigBase, LaunchTimeConfig, JobTemplate
|
||||
from awx.main.models.credential import Credential
|
||||
@@ -38,9 +49,6 @@ from awx.main.fields import JSONField
|
||||
from awx.main.utils import schedule_task_manager
|
||||
|
||||
|
||||
from copy import copy
|
||||
from urllib.parse import urljoin
|
||||
|
||||
__all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowJobNode',
|
||||
'WorkflowJobTemplateNode', 'WorkflowApprovalTemplate', 'WorkflowApproval']
|
||||
|
||||
@@ -196,7 +204,7 @@ class WorkflowJobNode(WorkflowNodeBase):
|
||||
)
|
||||
do_not_run = models.BooleanField(
|
||||
default=False,
|
||||
help_text=_("Indidcates that a job will not be created when True. Workflow runtime "
|
||||
help_text=_("Indicates that a job will not be created when True. Workflow runtime "
|
||||
"semantics will mark this True if the node is in a path that will "
|
||||
"decidedly not be ran. A value of False means the node may not run."),
|
||||
)
|
||||
@@ -207,11 +215,14 @@ class WorkflowJobNode(WorkflowNodeBase):
|
||||
def prompts_dict(self, *args, **kwargs):
|
||||
r = super(WorkflowJobNode, self).prompts_dict(*args, **kwargs)
|
||||
# Explanation - WFJT extra_vars still break pattern, so they are not
|
||||
# put through prompts processing, but inventory is only accepted
|
||||
# put through prompts processing, but inventory and others are only accepted
|
||||
# if JT prompts for it, so it goes through this mechanism
|
||||
if self.workflow_job and self.workflow_job.inventory_id:
|
||||
# workflow job inventory takes precedence
|
||||
r['inventory'] = self.workflow_job.inventory
|
||||
if self.workflow_job:
|
||||
if self.workflow_job.inventory_id:
|
||||
# workflow job inventory takes precedence
|
||||
r['inventory'] = self.workflow_job.inventory
|
||||
if self.workflow_job.char_prompts:
|
||||
r.update(self.workflow_job.char_prompts)
|
||||
return r
|
||||
|
||||
def get_job_kwargs(self):
|
||||
@@ -298,7 +309,7 @@ class WorkflowJobNode(WorkflowNodeBase):
|
||||
return data
|
||||
|
||||
|
||||
class WorkflowJobOptions(BaseModel):
|
||||
class WorkflowJobOptions(LaunchTimeConfigBase):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
@@ -318,10 +329,11 @@ class WorkflowJobOptions(BaseModel):
|
||||
|
||||
@classmethod
|
||||
def _get_unified_job_field_names(cls):
|
||||
return set(f.name for f in WorkflowJobOptions._meta.fields) | set(
|
||||
# NOTE: if other prompts are added to WFJT, put fields in WJOptions, remove inventory
|
||||
['name', 'description', 'schedule', 'survey_passwords', 'labels', 'inventory']
|
||||
r = set(f.name for f in WorkflowJobOptions._meta.fields) | set(
|
||||
['name', 'description', 'survey_passwords', 'labels', 'limit', 'scm_branch']
|
||||
)
|
||||
r.remove('char_prompts') # needed due to copying launch config to launch config
|
||||
return r
|
||||
|
||||
def _create_workflow_nodes(self, old_node_list, user=None):
|
||||
node_links = {}
|
||||
@@ -355,7 +367,7 @@ class WorkflowJobOptions(BaseModel):
|
||||
return new_workflow_job
|
||||
|
||||
|
||||
class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTemplateMixin, ResourceMixin, RelatedJobsMixin):
|
||||
class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTemplateMixin, ResourceMixin, RelatedJobsMixin, WebhookTemplateMixin):
|
||||
|
||||
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')]
|
||||
FIELDS_TO_PRESERVE_AT_COPY = [
|
||||
@@ -372,19 +384,24 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='workflows',
|
||||
)
|
||||
inventory = models.ForeignKey(
|
||||
'Inventory',
|
||||
related_name='%(class)ss',
|
||||
blank=True,
|
||||
null=True,
|
||||
default=None,
|
||||
on_delete=models.SET_NULL,
|
||||
help_text=_('Inventory applied to all job templates in workflow that prompt for inventory.'),
|
||||
)
|
||||
ask_inventory_on_launch = AskForField(
|
||||
blank=True,
|
||||
default=False,
|
||||
)
|
||||
ask_limit_on_launch = AskForField(
|
||||
blank=True,
|
||||
default=False,
|
||||
)
|
||||
ask_scm_branch_on_launch = AskForField(
|
||||
blank=True,
|
||||
default=False,
|
||||
)
|
||||
notification_templates_approvals = models.ManyToManyField(
|
||||
"NotificationTemplate",
|
||||
blank=True,
|
||||
related_name='%(class)s_notification_templates_for_approvals'
|
||||
)
|
||||
|
||||
admin_role = ImplicitRoleField(parent_role=[
|
||||
'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
'organization.workflow_admin_role'
|
||||
@@ -438,9 +455,22 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
.filter(unifiedjobtemplate_notification_templates_for_started__in=[self]))
|
||||
success_notification_templates = list(base_notification_templates
|
||||
.filter(unifiedjobtemplate_notification_templates_for_success__in=[self]))
|
||||
approval_notification_templates = list(base_notification_templates
|
||||
.filter(workflowjobtemplate_notification_templates_for_approvals__in=[self]))
|
||||
# Get Organization NotificationTemplates
|
||||
if self.organization is not None:
|
||||
error_notification_templates = set(error_notification_templates + list(base_notification_templates.filter(
|
||||
organization_notification_templates_for_errors=self.organization)))
|
||||
started_notification_templates = set(started_notification_templates + list(base_notification_templates.filter(
|
||||
organization_notification_templates_for_started=self.organization)))
|
||||
success_notification_templates = set(success_notification_templates + list(base_notification_templates.filter(
|
||||
organization_notification_templates_for_success=self.organization)))
|
||||
approval_notification_templates = set(approval_notification_templates + list(base_notification_templates.filter(
|
||||
organization_notification_templates_for_approvals=self.organization)))
|
||||
return dict(error=list(error_notification_templates),
|
||||
started=list(started_notification_templates),
|
||||
success=list(success_notification_templates))
|
||||
success=list(success_notification_templates),
|
||||
approvals=list(approval_notification_templates))
|
||||
|
||||
def create_unified_job(self, **kwargs):
|
||||
workflow_job = super(WorkflowJobTemplate, self).create_unified_job(**kwargs)
|
||||
@@ -515,7 +545,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
return WorkflowJob.objects.filter(workflow_job_template=self)
|
||||
|
||||
|
||||
class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificationMixin, LaunchTimeConfigBase):
|
||||
class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificationMixin, WebhookMixin):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('id',)
|
||||
@@ -646,7 +676,7 @@ class WorkflowApprovalTemplate(UnifiedJobTemplate):
|
||||
return self.workflowjobtemplatenodes.first().workflow_job_template
|
||||
|
||||
|
||||
class WorkflowApproval(UnifiedJob):
|
||||
class WorkflowApproval(UnifiedJob, JobNotificationMixin):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
|
||||
@@ -667,6 +697,14 @@ class WorkflowApproval(UnifiedJob):
|
||||
default=False,
|
||||
help_text=_("Shows when an approval node (with a timeout assigned to it) has timed out.")
|
||||
)
|
||||
approved_or_denied_by = models.ForeignKey(
|
||||
'auth.User',
|
||||
related_name='%s(class)s_approved+',
|
||||
default=None,
|
||||
null=True,
|
||||
editable=False,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
|
||||
|
||||
@classmethod
|
||||
@@ -680,26 +718,101 @@ class WorkflowApproval(UnifiedJob):
|
||||
def event_class(self):
|
||||
return None
|
||||
|
||||
def get_ui_url(self):
|
||||
return urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.workflow_job.id))
|
||||
|
||||
def _get_parent_field_name(self):
|
||||
return 'workflow_approval_template'
|
||||
|
||||
def approve(self, request=None):
|
||||
self.status = 'successful'
|
||||
self.approved_or_denied_by = get_current_user()
|
||||
self.save()
|
||||
self.send_approval_notification('approved')
|
||||
self.websocket_emit_status(self.status)
|
||||
schedule_task_manager()
|
||||
return reverse('api:workflow_approval_approve', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
def deny(self, request=None):
|
||||
self.status = 'failed'
|
||||
self.approved_or_denied_by = get_current_user()
|
||||
self.save()
|
||||
self.send_approval_notification('denied')
|
||||
self.websocket_emit_status(self.status)
|
||||
schedule_task_manager()
|
||||
return reverse('api:workflow_approval_deny', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
def signal_start(self, **kwargs):
|
||||
can_start = super(WorkflowApproval, self).signal_start(**kwargs)
|
||||
self.send_approval_notification('running')
|
||||
return can_start
|
||||
|
||||
def send_approval_notification(self, approval_status):
|
||||
from awx.main.tasks import send_notifications # avoid circular import
|
||||
if self.workflow_job_template is None:
|
||||
return
|
||||
for nt in self.workflow_job_template.notification_templates["approvals"]:
|
||||
try:
|
||||
(notification_subject, notification_body) = self.build_approval_notification_message(nt, approval_status)
|
||||
except Exception:
|
||||
raise NotImplementedError("build_approval_notification_message() does not exist")
|
||||
|
||||
# Use kwargs to force late-binding
|
||||
# https://stackoverflow.com/a/3431699/10669572
|
||||
def send_it(local_nt=nt, local_subject=notification_subject, local_body=notification_body):
|
||||
def _func():
|
||||
send_notifications.delay([local_nt.generate_notification(local_subject, local_body).id],
|
||||
job_id=self.id)
|
||||
return _func
|
||||
connection.on_commit(send_it())
|
||||
|
||||
def build_approval_notification_message(self, nt, approval_status):
|
||||
env = sandbox.ImmutableSandboxedEnvironment()
|
||||
|
||||
context = self.context(approval_status)
|
||||
|
||||
msg_template = body_template = None
|
||||
msg = body = ''
|
||||
|
||||
# Use custom template if available
|
||||
if nt.messages and nt.messages.get('workflow_approval', None):
|
||||
template = nt.messages['workflow_approval'].get(approval_status, {})
|
||||
msg_template = template.get('message', None)
|
||||
body_template = template.get('body', None)
|
||||
# If custom template not provided, look up default template
|
||||
default_template = nt.notification_class.default_messages['workflow_approval'][approval_status]
|
||||
if not msg_template:
|
||||
msg_template = default_template.get('message', None)
|
||||
if not body_template:
|
||||
body_template = default_template.get('body', None)
|
||||
|
||||
if msg_template:
|
||||
try:
|
||||
msg = env.from_string(msg_template).render(**context)
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||
msg = ''
|
||||
|
||||
if body_template:
|
||||
try:
|
||||
body = env.from_string(body_template).render(**context)
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||
body = ''
|
||||
|
||||
return (msg, body)
|
||||
|
||||
def context(self, approval_status):
|
||||
workflow_url = urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.workflow_job.id))
|
||||
return {'approval_status': approval_status,
|
||||
'approval_node_name': self.workflow_approval_template.name,
|
||||
'workflow_url': workflow_url,
|
||||
'job_metadata': json.dumps(self.notification_data(), indent=4)}
|
||||
|
||||
@property
|
||||
def workflow_job_template(self):
|
||||
return self.unified_job_node.workflow_job.unified_job_template
|
||||
try:
|
||||
return self.unified_job_node.workflow_job.unified_job_template
|
||||
except ObjectDoesNotExist:
|
||||
return None
|
||||
|
||||
@property
|
||||
def workflow_job(self):
|
||||
|
||||
@@ -1,21 +1,10 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import json
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.core.mail.backends.base import BaseEmailBackend
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
class AWXBaseEmailBackend(BaseEmailBackend):
|
||||
|
||||
def format_body(self, body):
|
||||
if "body" in body:
|
||||
body_actual = body['body']
|
||||
else:
|
||||
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
|
||||
body['friendly_name'], body['id'], body['status'], body['url'])
|
||||
)
|
||||
body_actual += json.dumps(body, indent=4)
|
||||
return body_actual
|
||||
return body
|
||||
|
||||
20
awx/main/notifications/custom_notification_base.py
Normal file
20
awx/main/notifications/custom_notification_base.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# Copyright (c) 2019 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
|
||||
class CustomNotificationBase(object):
|
||||
DEFAULT_MSG = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
DEFAULT_BODY = "{{ job_friendly_name }} #{{ job.id }} had status {{ job.status }}, view details at {{ url }}\n\n{{ job_metadata }}"
|
||||
|
||||
default_messages = {"started": {"message": DEFAULT_MSG, "body": None},
|
||||
"success": {"message": DEFAULT_MSG, "body": None},
|
||||
"error": {"message": DEFAULT_MSG, "body": None},
|
||||
"workflow_approval": {"running": {"message": 'The approval node "{{ approval_node_name }}" needs review. '
|
||||
'This node can be viewed at: {{ workflow_url }}',
|
||||
"body": None},
|
||||
"approved": {"message": 'The approval node "{{ approval_node_name }}" was approved. {{ workflow_url }}',
|
||||
"body": None},
|
||||
"timed_out": {"message": 'The approval node "{{ approval_node_name }}" has timed out. {{ workflow_url }}',
|
||||
"body": None},
|
||||
"denied": {"message": 'The approval node "{{ approval_node_name }}" was denied. {{ workflow_url }}',
|
||||
"body": None}}}
|
||||
@@ -1,14 +1,15 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import json
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.core.mail.backends.smtp import EmailBackend
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
DEFAULT_MSG = CustomNotificationBase.DEFAULT_MSG
|
||||
DEFAULT_BODY = CustomNotificationBase.DEFAULT_BODY
|
||||
|
||||
|
||||
class CustomEmailBackend(EmailBackend):
|
||||
class CustomEmailBackend(EmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"host": {"label": "Host", "type": "string"},
|
||||
"port": {"label": "Port", "type": "int"},
|
||||
@@ -19,22 +20,17 @@ class CustomEmailBackend(EmailBackend):
|
||||
"sender": {"label": "Sender Email", "type": "string"},
|
||||
"recipients": {"label": "Recipient List", "type": "list"},
|
||||
"timeout": {"label": "Timeout", "type": "int", "default": 30}}
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
DEFAULT_BODY = smart_text(_("{{ job_friendly_name }} #{{ job.id }} had status {{ job.status }}, view details at {{ url }}\n\n{{ job_summary_dict }}"))
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
|
||||
"success": {"message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
|
||||
"error": {"message": DEFAULT_SUBJECT, "body": DEFAULT_BODY}}
|
||||
recipient_parameter = "recipients"
|
||||
sender_parameter = "sender"
|
||||
|
||||
default_messages = {"started": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"success": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"error": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"workflow_approval": {"running": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"approved": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"timed_out": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"denied": {"message": DEFAULT_MSG, "body": DEFAULT_BODY}}}
|
||||
|
||||
def format_body(self, body):
|
||||
if "body" in body:
|
||||
body_actual = body['body']
|
||||
else:
|
||||
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
|
||||
body['friendly_name'], body['id'], body['status'], body['url'])
|
||||
)
|
||||
body_actual += json.dumps(body, indent=4)
|
||||
return body_actual
|
||||
# leave body unchanged (expect a string)
|
||||
return body
|
||||
|
||||
@@ -8,24 +8,21 @@ import dateutil.parser as dp
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.grafana_backend')
|
||||
|
||||
|
||||
class GrafanaBackend(AWXBaseEmailBackend):
|
||||
class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"grafana_url": {"label": "Grafana URL", "type": "string"},
|
||||
"grafana_key": {"label": "Grafana API Key", "type": "password"}}
|
||||
recipient_parameter = "grafana_url"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
||||
"success": {"message": DEFAULT_SUBJECT},
|
||||
"error": {"message": DEFAULT_SUBJECT}}
|
||||
|
||||
def __init__(self, grafana_key,dashboardId=None, panelId=None, annotation_tags=None, grafana_no_verify_ssl=False, isRegion=True,
|
||||
fail_silently=False, **kwargs):
|
||||
super(GrafanaBackend, self).__init__(fail_silently=fail_silently)
|
||||
|
||||
@@ -7,12 +7,14 @@ import requests
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.hipchat_backend')
|
||||
|
||||
|
||||
class HipChatBackend(AWXBaseEmailBackend):
|
||||
class HipChatBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"token": {"label": "Token", "type": "password"},
|
||||
"rooms": {"label": "Destination Rooms", "type": "list"},
|
||||
@@ -23,11 +25,6 @@ class HipChatBackend(AWXBaseEmailBackend):
|
||||
recipient_parameter = "rooms"
|
||||
sender_parameter = "message_from"
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
||||
"success": {"message": DEFAULT_SUBJECT},
|
||||
"error": {"message": DEFAULT_SUBJECT}}
|
||||
|
||||
def __init__(self, token, color, api_url, notify, fail_silently=False, **kwargs):
|
||||
super(HipChatBackend, self).__init__(fail_silently=fail_silently)
|
||||
self.token = token
|
||||
|
||||
@@ -9,12 +9,14 @@ import irc.client
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.irc_backend')
|
||||
|
||||
|
||||
class IrcBackend(AWXBaseEmailBackend):
|
||||
class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"server": {"label": "IRC Server Address", "type": "string"},
|
||||
"port": {"label": "IRC Server Port", "type": "int"},
|
||||
@@ -25,11 +27,6 @@ class IrcBackend(AWXBaseEmailBackend):
|
||||
recipient_parameter = "targets"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
||||
"success": {"message": DEFAULT_SUBJECT},
|
||||
"error": {"message": DEFAULT_SUBJECT}}
|
||||
|
||||
def __init__(self, server, port, nickname, password, use_ssl, fail_silently=False, **kwargs):
|
||||
super(IrcBackend, self).__init__(fail_silently=fail_silently)
|
||||
self.server = server
|
||||
|
||||
@@ -7,23 +7,20 @@ import json
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.mattermost_backend')
|
||||
|
||||
|
||||
class MattermostBackend(AWXBaseEmailBackend):
|
||||
class MattermostBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"mattermost_url": {"label": "Target URL", "type": "string"},
|
||||
"mattermost_no_verify_ssl": {"label": "Verify SSL", "type": "bool"}}
|
||||
recipient_parameter = "mattermost_url"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
||||
"success": {"message": DEFAULT_SUBJECT},
|
||||
"error": {"message": DEFAULT_SUBJECT}}
|
||||
|
||||
def __init__(self, mattermost_no_verify_ssl=False, mattermost_channel=None, mattermost_username=None,
|
||||
mattermost_icon_url=None, fail_silently=False, **kwargs):
|
||||
super(MattermostBackend, self).__init__(fail_silently=fail_silently)
|
||||
|
||||
@@ -1,17 +1,23 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import pygerduty
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
DEFAULT_BODY = CustomNotificationBase.DEFAULT_BODY
|
||||
DEFAULT_MSG = CustomNotificationBase.DEFAULT_MSG
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.pagerduty_backend')
|
||||
|
||||
|
||||
class PagerDutyBackend(AWXBaseEmailBackend):
|
||||
class PagerDutyBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"subdomain": {"label": "Pagerduty subdomain", "type": "string"},
|
||||
"token": {"label": "API Token", "type": "password"},
|
||||
@@ -20,11 +26,14 @@ class PagerDutyBackend(AWXBaseEmailBackend):
|
||||
recipient_parameter = "service_key"
|
||||
sender_parameter = "client_name"
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
DEFAULT_BODY = "{{ job_summary_dict }}"
|
||||
default_messages = {"started": { "message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
|
||||
"success": { "message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
|
||||
"error": { "message": DEFAULT_SUBJECT, "body": DEFAULT_BODY}}
|
||||
DEFAULT_BODY = "{{ job_metadata }}"
|
||||
default_messages = {"started": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"success": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"error": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"workflow_approval": {"running": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"approved": {"message": DEFAULT_MSG,"body": DEFAULT_BODY},
|
||||
"timed_out": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||
"denied": {"message": DEFAULT_MSG, "body": DEFAULT_BODY}}}
|
||||
|
||||
def __init__(self, subdomain, token, fail_silently=False, **kwargs):
|
||||
super(PagerDutyBackend, self).__init__(fail_silently=fail_silently)
|
||||
@@ -32,6 +41,16 @@ class PagerDutyBackend(AWXBaseEmailBackend):
|
||||
self.token = token
|
||||
|
||||
def format_body(self, body):
|
||||
# cast to dict if possible # TODO: is it true that this can be a dict or str?
|
||||
try:
|
||||
potential_body = json.loads(body)
|
||||
if isinstance(potential_body, dict):
|
||||
body = potential_body
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
# but it's okay if this is also just a string
|
||||
|
||||
return body
|
||||
|
||||
def send_messages(self, messages):
|
||||
|
||||
@@ -7,22 +7,20 @@ import json
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.rocketchat_backend')
|
||||
|
||||
|
||||
class RocketChatBackend(AWXBaseEmailBackend):
|
||||
class RocketChatBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"rocketchat_url": {"label": "Target URL", "type": "string"},
|
||||
"rocketchat_no_verify_ssl": {"label": "Verify SSL", "type": "bool"}}
|
||||
recipient_parameter = "rocketchat_url"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
||||
"success": {"message": DEFAULT_SUBJECT},
|
||||
"error": {"message": DEFAULT_SUBJECT}}
|
||||
|
||||
def __init__(self, rocketchat_no_verify_ssl=False, rocketchat_username=None, rocketchat_icon_url=None, fail_silently=False, **kwargs):
|
||||
super(RocketChatBackend, self).__init__(fail_silently=fail_silently)
|
||||
|
||||
@@ -6,24 +6,21 @@ from slackclient import SlackClient
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.slack_backend')
|
||||
WEBSOCKET_TIMEOUT = 30
|
||||
|
||||
|
||||
class SlackBackend(AWXBaseEmailBackend):
|
||||
class SlackBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"token": {"label": "Token", "type": "password"},
|
||||
"channels": {"label": "Destination Channels", "type": "list"}}
|
||||
recipient_parameter = "channels"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
||||
"success": {"message": DEFAULT_SUBJECT},
|
||||
"error": {"message": DEFAULT_SUBJECT}}
|
||||
|
||||
def __init__(self, token, hex_color="", fail_silently=False, **kwargs):
|
||||
super(SlackBackend, self).__init__(fail_silently=fail_silently)
|
||||
self.token = token
|
||||
@@ -50,6 +47,7 @@ class SlackBackend(AWXBaseEmailBackend):
|
||||
else:
|
||||
ret = connection.api_call("chat.postMessage",
|
||||
channel=r,
|
||||
as_user=True,
|
||||
text=m.subject)
|
||||
logger.debug(ret)
|
||||
if ret['ok']:
|
||||
|
||||
@@ -7,12 +7,14 @@ from twilio.rest import Client
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.twilio_backend')
|
||||
|
||||
|
||||
class TwilioBackend(AWXBaseEmailBackend):
|
||||
class TwilioBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"account_sid": {"label": "Account SID", "type": "string"},
|
||||
"account_token": {"label": "Account Token", "type": "password"},
|
||||
@@ -21,11 +23,6 @@ class TwilioBackend(AWXBaseEmailBackend):
|
||||
recipient_parameter = "to_numbers"
|
||||
sender_parameter = "from_number"
|
||||
|
||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
||||
"success": {"message": DEFAULT_SUBJECT},
|
||||
"error": {"message": DEFAULT_SUBJECT}}
|
||||
|
||||
def __init__(self, account_sid, account_token, fail_silently=False, **kwargs):
|
||||
super(TwilioBackend, self).__init__(fail_silently=fail_silently)
|
||||
self.account_sid = account_sid
|
||||
|
||||
@@ -7,13 +7,15 @@ import requests
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.utils import get_awx_version
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.webhook_backend')
|
||||
|
||||
|
||||
class WebhookBackend(AWXBaseEmailBackend):
|
||||
class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
init_parameters = {"url": {"label": "Target URL", "type": "string"},
|
||||
"http_method": {"label": "HTTP Method", "type": "string", "default": "POST"},
|
||||
@@ -24,10 +26,16 @@ class WebhookBackend(AWXBaseEmailBackend):
|
||||
recipient_parameter = "url"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_BODY = "{{ job_summary_dict }}"
|
||||
DEFAULT_BODY = "{{ job_metadata }}"
|
||||
default_messages = {"started": {"body": DEFAULT_BODY},
|
||||
"success": {"body": DEFAULT_BODY},
|
||||
"error": {"body": DEFAULT_BODY}}
|
||||
"error": {"body": DEFAULT_BODY},
|
||||
"workflow_approval": {
|
||||
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. '
|
||||
'This node can be viewed at: {{ workflow_url }}"}'},
|
||||
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
||||
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
||||
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'}}}
|
||||
|
||||
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
|
||||
self.http_method = http_method
|
||||
@@ -38,15 +46,13 @@ class WebhookBackend(AWXBaseEmailBackend):
|
||||
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
|
||||
|
||||
def format_body(self, body):
|
||||
# If `body` has body field, attempt to use this as the main body,
|
||||
# otherwise, leave it as a sub-field
|
||||
if isinstance(body, dict) and 'body' in body and isinstance(body['body'], str):
|
||||
try:
|
||||
potential_body = json.loads(body['body'])
|
||||
if isinstance(potential_body, dict):
|
||||
body = potential_body
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
# expect body to be a string representing a dict
|
||||
try:
|
||||
potential_body = json.loads(body)
|
||||
if isinstance(potential_body, dict):
|
||||
body = potential_body
|
||||
except json.JSONDecodeError:
|
||||
body = {}
|
||||
return body
|
||||
|
||||
def send_messages(self, messages):
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import re
|
||||
import urllib.parse as urlparse
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
REPLACE_STR = '$encrypted$'
|
||||
|
||||
|
||||
@@ -10,14 +12,24 @@ class UriCleaner(object):
|
||||
|
||||
@staticmethod
|
||||
def remove_sensitive(cleartext):
|
||||
# exclude_list contains the items that will _not_ be redacted
|
||||
exclude_list = [settings.PUBLIC_GALAXY_SERVER['url']]
|
||||
if settings.PRIMARY_GALAXY_URL:
|
||||
exclude_list += [settings.PRIMARY_GALAXY_URL]
|
||||
if settings.FALLBACK_GALAXY_SERVERS:
|
||||
exclude_list += [server['url'] for server in settings.FALLBACK_GALAXY_SERVERS]
|
||||
redactedtext = cleartext
|
||||
text_index = 0
|
||||
while True:
|
||||
match = UriCleaner.SENSITIVE_URI_PATTERN.search(redactedtext, text_index)
|
||||
if not match:
|
||||
break
|
||||
uri_str = match.group(1)
|
||||
# Do not redact items from the exclude list
|
||||
if any(uri_str.startswith(exclude_uri) for exclude_uri in exclude_list):
|
||||
text_index = match.start() + len(uri_str)
|
||||
continue
|
||||
try:
|
||||
uri_str = match.group(1)
|
||||
# May raise a ValueError if invalid URI for one reason or another
|
||||
o = urlparse.urlsplit(uri_str)
|
||||
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
#
|
||||
|
||||
from awx.main.scheduler.task_manager import TaskManager # noqa
|
||||
from .task_manager import TaskManager
|
||||
|
||||
__all__ = ['TaskManager']
|
||||
|
||||
182
awx/main/scheduler/kubernetes.py
Normal file
182
awx/main/scheduler/kubernetes.py
Normal file
@@ -0,0 +1,182 @@
|
||||
import collections
|
||||
import time
|
||||
import logging
|
||||
from base64 import b64encode
|
||||
|
||||
from django.conf import settings
|
||||
from kubernetes import client, config
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
from awx.main.utils.common import parse_yaml_or_json
|
||||
|
||||
logger = logging.getLogger('awx.main.scheduler')
|
||||
|
||||
|
||||
class PodManager(object):
|
||||
|
||||
def __init__(self, task=None):
|
||||
self.task = task
|
||||
|
||||
def deploy(self):
|
||||
if not self.credential.kubernetes:
|
||||
raise RuntimeError('Pod deployment cannot occur without a Kubernetes credential')
|
||||
|
||||
self.kube_api.create_namespaced_pod(body=self.pod_definition,
|
||||
namespace=self.namespace,
|
||||
_request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT)
|
||||
|
||||
num_retries = settings.AWX_CONTAINER_GROUP_POD_LAUNCH_RETRIES
|
||||
for retry_attempt in range(num_retries - 1):
|
||||
logger.debug(f"Checking for pod {self.pod_name}. Attempt {retry_attempt + 1} of {num_retries}")
|
||||
pod = self.kube_api.read_namespaced_pod(name=self.pod_name,
|
||||
namespace=self.namespace,
|
||||
_request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT)
|
||||
if pod.status.phase != 'Pending':
|
||||
break
|
||||
else:
|
||||
logger.debug(f"Pod {self.pod_name} is Pending.")
|
||||
time.sleep(settings.AWX_CONTAINER_GROUP_POD_LAUNCH_RETRY_DELAY)
|
||||
continue
|
||||
|
||||
if pod.status.phase == 'Running':
|
||||
logger.debug(f"Pod {self.pod_name} is online.")
|
||||
return pod
|
||||
else:
|
||||
logger.warn(f"Pod {self.pod_name} did not start. Status is {pod.status.phase}.")
|
||||
|
||||
@classmethod
|
||||
def list_active_jobs(self, instance_group):
|
||||
task = collections.namedtuple('Task', 'id instance_group')(
|
||||
id='',
|
||||
instance_group=instance_group
|
||||
)
|
||||
pm = PodManager(task)
|
||||
try:
|
||||
for pod in pm.kube_api.list_namespaced_pod(
|
||||
pm.namespace,
|
||||
label_selector='ansible-awx={}'.format(settings.INSTALL_UUID)
|
||||
).to_dict().get('items', []):
|
||||
job = pod['metadata'].get('labels', {}).get('ansible-awx-job-id')
|
||||
if job:
|
||||
try:
|
||||
yield int(job)
|
||||
except ValueError:
|
||||
pass
|
||||
except Exception:
|
||||
logger.exception('Failed to list pods for container group {}'.format(instance_group))
|
||||
|
||||
def delete(self):
|
||||
return self.kube_api.delete_namespaced_pod(name=self.pod_name,
|
||||
namespace=self.namespace,
|
||||
_request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT)
|
||||
|
||||
@property
|
||||
def namespace(self):
|
||||
return self.pod_definition['metadata']['namespace']
|
||||
|
||||
@property
|
||||
def credential(self):
|
||||
return self.task.instance_group.credential
|
||||
|
||||
@cached_property
|
||||
def kube_config(self):
|
||||
return generate_tmp_kube_config(self.credential, self.namespace)
|
||||
|
||||
@cached_property
|
||||
def kube_api(self):
|
||||
# this feels a little janky, but it's what k8s' own code does
|
||||
# internally when it reads kube config files from disk:
|
||||
# https://github.com/kubernetes-client/python-base/blob/0b208334ef0247aad9afcaae8003954423b61a0d/config/kube_config.py#L643
|
||||
loader = config.kube_config.KubeConfigLoader(
|
||||
config_dict=self.kube_config
|
||||
)
|
||||
cfg = type.__call__(client.Configuration)
|
||||
loader.load_and_set(cfg)
|
||||
return client.CoreV1Api(api_client=client.ApiClient(
|
||||
configuration=cfg
|
||||
))
|
||||
|
||||
@property
|
||||
def pod_name(self):
|
||||
return f"awx-job-{self.task.id}"
|
||||
|
||||
@property
|
||||
def pod_definition(self):
|
||||
default_pod_spec = {
|
||||
"apiVersion": "v1",
|
||||
"kind": "Pod",
|
||||
"metadata": {
|
||||
"namespace": settings.AWX_CONTAINER_GROUP_DEFAULT_NAMESPACE
|
||||
},
|
||||
"spec": {
|
||||
"containers": [{
|
||||
"image": settings.AWX_CONTAINER_GROUP_DEFAULT_IMAGE,
|
||||
"tty": True,
|
||||
"stdin": True,
|
||||
"imagePullPolicy": "Always",
|
||||
"args": [
|
||||
'sleep', 'infinity'
|
||||
]
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
pod_spec_override = {}
|
||||
if self.task and self.task.instance_group.pod_spec_override:
|
||||
pod_spec_override = parse_yaml_or_json(
|
||||
self.task.instance_group.pod_spec_override)
|
||||
pod_spec = {**default_pod_spec, **pod_spec_override}
|
||||
|
||||
if self.task:
|
||||
pod_spec['metadata']['name'] = self.pod_name
|
||||
pod_spec['metadata']['labels'] = {
|
||||
'ansible-awx': settings.INSTALL_UUID,
|
||||
'ansible-awx-job-id': str(self.task.id)
|
||||
}
|
||||
pod_spec['spec']['containers'][0]['name'] = self.pod_name
|
||||
|
||||
return pod_spec
|
||||
|
||||
|
||||
def generate_tmp_kube_config(credential, namespace):
|
||||
host_input = credential.get_input('host')
|
||||
config = {
|
||||
"apiVersion": "v1",
|
||||
"kind": "Config",
|
||||
"preferences": {},
|
||||
"clusters": [
|
||||
{
|
||||
"name": host_input,
|
||||
"cluster": {
|
||||
"server": host_input
|
||||
}
|
||||
}
|
||||
],
|
||||
"users": [
|
||||
{
|
||||
"name": host_input,
|
||||
"user": {
|
||||
"token": credential.get_input('bearer_token')
|
||||
}
|
||||
}
|
||||
],
|
||||
"contexts": [
|
||||
{
|
||||
"name": host_input,
|
||||
"context": {
|
||||
"cluster": host_input,
|
||||
"user": host_input,
|
||||
"namespace": namespace
|
||||
}
|
||||
}
|
||||
],
|
||||
"current-context": host_input
|
||||
}
|
||||
|
||||
if credential.get_input('verify_ssl'):
|
||||
config["clusters"][0]["cluster"]["certificate-authority-data"] = b64encode(
|
||||
credential.get_input('ssl_ca_cert').encode() # encode to bytes
|
||||
).decode() # decode the base64 data into a str
|
||||
else:
|
||||
config["clusters"][0]["cluster"]["insecure-skip-tls-verify"] = True
|
||||
return config
|
||||
@@ -251,6 +251,26 @@ class TaskManager():
|
||||
task.controller_node = controller_node
|
||||
logger.debug('Submitting isolated {} to queue {} controlled by {}.'.format(
|
||||
task.log_format, task.execution_node, controller_node))
|
||||
elif rampart_group.is_containerized:
|
||||
# find one real, non-containerized instance with capacity to
|
||||
# act as the controller for k8s API interaction
|
||||
match = None
|
||||
for group in InstanceGroup.objects.all():
|
||||
if group.is_containerized or group.controller_id:
|
||||
continue
|
||||
match = group.find_largest_idle_instance()
|
||||
if match:
|
||||
break
|
||||
task.instance_group = rampart_group
|
||||
if task.supports_isolation():
|
||||
task.controller_node = match.hostname
|
||||
else:
|
||||
# project updates and inventory updates don't *actually* run in pods,
|
||||
# so just pick *any* non-isolated, non-containerized host and use it
|
||||
# as the execution node
|
||||
task.execution_node = match.hostname
|
||||
logger.debug('Submitting containerized {} to queue {}.'.format(
|
||||
task.log_format, task.execution_node))
|
||||
else:
|
||||
task.instance_group = rampart_group
|
||||
if instance is not None:
|
||||
@@ -447,7 +467,7 @@ class TaskManager():
|
||||
for rampart_group in preferred_instance_groups:
|
||||
if idle_instance_that_fits is None:
|
||||
idle_instance_that_fits = rampart_group.find_largest_idle_instance()
|
||||
if self.get_remaining_capacity(rampart_group.name) <= 0:
|
||||
if not rampart_group.is_containerized and self.get_remaining_capacity(rampart_group.name) <= 0:
|
||||
logger.debug("Skipping group {} capacity <= 0".format(rampart_group.name))
|
||||
continue
|
||||
|
||||
@@ -456,10 +476,11 @@ class TaskManager():
|
||||
logger.debug("Starting dependent {} in group {} instance {}".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname))
|
||||
elif not execution_instance and idle_instance_that_fits:
|
||||
execution_instance = idle_instance_that_fits
|
||||
logger.debug("Starting dependent {} in group {} on idle instance {}".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname))
|
||||
if execution_instance:
|
||||
if not rampart_group.is_containerized:
|
||||
execution_instance = idle_instance_that_fits
|
||||
logger.debug("Starting dependent {} in group {} on idle instance {}".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname))
|
||||
if execution_instance or rampart_group.is_containerized:
|
||||
self.graph[rampart_group.name]['graph'].add_job(task)
|
||||
tasks_to_fail = [t for t in dependency_tasks if t != task]
|
||||
tasks_to_fail += [dependent_task]
|
||||
@@ -492,10 +513,16 @@ class TaskManager():
|
||||
self.start_task(task, None, task.get_jobs_fail_chain(), None)
|
||||
continue
|
||||
for rampart_group in preferred_instance_groups:
|
||||
if task.can_run_containerized and rampart_group.is_containerized:
|
||||
self.graph[rampart_group.name]['graph'].add_job(task)
|
||||
self.start_task(task, rampart_group, task.get_jobs_fail_chain(), None)
|
||||
found_acceptable_queue = True
|
||||
break
|
||||
|
||||
if idle_instance_that_fits is None:
|
||||
idle_instance_that_fits = rampart_group.find_largest_idle_instance()
|
||||
remaining_capacity = self.get_remaining_capacity(rampart_group.name)
|
||||
if remaining_capacity <= 0:
|
||||
if not rampart_group.is_containerized and self.get_remaining_capacity(rampart_group.name) <= 0:
|
||||
logger.debug("Skipping group {}, remaining_capacity {} <= 0".format(
|
||||
rampart_group.name, remaining_capacity))
|
||||
continue
|
||||
@@ -505,10 +532,11 @@ class TaskManager():
|
||||
logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity))
|
||||
elif not execution_instance and idle_instance_that_fits:
|
||||
execution_instance = idle_instance_that_fits
|
||||
logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity))
|
||||
if execution_instance:
|
||||
if not rampart_group.is_containerized:
|
||||
execution_instance = idle_instance_that_fits
|
||||
logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity))
|
||||
if execution_instance or rampart_group.is_containerized:
|
||||
self.graph[rampart_group.name]['graph'].add_job(task)
|
||||
self.start_task(task, rampart_group, task.get_jobs_fail_chain(), execution_instance)
|
||||
found_acceptable_queue = True
|
||||
@@ -533,6 +561,7 @@ class TaskManager():
|
||||
logger.warn(timeout_message)
|
||||
task.timed_out = True
|
||||
task.status = 'failed'
|
||||
task.send_approval_notification('timed_out')
|
||||
task.websocket_emit_status(task.status)
|
||||
task.job_explanation = timeout_message
|
||||
task.save(update_fields=['status', 'job_explanation', 'timed_out'])
|
||||
|
||||
@@ -684,16 +684,18 @@ def save_user_session_membership(sender, **kwargs):
|
||||
return
|
||||
if UserSessionMembership.objects.filter(user=user_id, session=session).exists():
|
||||
return
|
||||
UserSessionMembership(user_id=user_id, session=session, created=timezone.now()).save()
|
||||
expired = UserSessionMembership.get_memberships_over_limit(user_id)
|
||||
for membership in expired:
|
||||
Session.objects.filter(session_key__in=[membership.session_id]).delete()
|
||||
membership.delete()
|
||||
if len(expired):
|
||||
consumers.emit_channel_notification(
|
||||
'control-limit_reached_{}'.format(user_id),
|
||||
dict(group_name='control', reason='limit_reached')
|
||||
)
|
||||
# check if user_id from session has an id match in User before saving
|
||||
if User.objects.filter(id=int(user_id)).exists():
|
||||
UserSessionMembership(user_id=user_id, session=session, created=timezone.now()).save()
|
||||
expired = UserSessionMembership.get_memberships_over_limit(user_id)
|
||||
for membership in expired:
|
||||
Session.objects.filter(session_key__in=[membership.session_id]).delete()
|
||||
membership.delete()
|
||||
if len(expired):
|
||||
consumers.emit_channel_notification(
|
||||
'control-limit_reached_{}'.format(user_id),
|
||||
dict(group_name='control', reason='limit_reached')
|
||||
)
|
||||
|
||||
|
||||
@receiver(post_save, sender=OAuth2AccessToken)
|
||||
|
||||
@@ -22,10 +22,6 @@ import yaml
|
||||
import fcntl
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
try:
|
||||
import psutil
|
||||
except Exception:
|
||||
psutil = None
|
||||
import urllib.parse as urlparse
|
||||
|
||||
# Django
|
||||
@@ -34,12 +30,14 @@ from django.db import transaction, DatabaseError, IntegrityError
|
||||
from django.db.models.fields.related import ForeignKey
|
||||
from django.utils.timezone import now, timedelta
|
||||
from django.utils.encoding import smart_str
|
||||
from django.core.mail import send_mail
|
||||
from django.contrib.auth.models import User
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
|
||||
# Kubernetes
|
||||
from kubernetes.client.rest import ApiException
|
||||
|
||||
# Django-CRUM
|
||||
from crum import impersonate
|
||||
|
||||
@@ -52,7 +50,7 @@ import ansible_runner
|
||||
|
||||
# AWX
|
||||
from awx import __version__ as awx_application_version
|
||||
from awx.main.constants import CLOUD_PROVIDERS, PRIVILEGE_ESCALATION_METHODS, STANDARD_INVENTORY_UPDATE_ENV
|
||||
from awx.main.constants import CLOUD_PROVIDERS, PRIVILEGE_ESCALATION_METHODS, STANDARD_INVENTORY_UPDATE_ENV, GALAXY_SERVER_FIELDS
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.models import (
|
||||
Schedule, TowerScheduleState, Instance, InstanceGroup,
|
||||
@@ -69,10 +67,10 @@ from awx.main.isolated import manager as isolated_manager
|
||||
from awx.main.dispatch.publish import task
|
||||
from awx.main.dispatch import get_local_queuename, reaper
|
||||
from awx.main.utils import (get_ssh_version, update_scm_url,
|
||||
get_licenser,
|
||||
ignore_inventory_computed_fields,
|
||||
ignore_inventory_group_removal, extract_ansible_vars, schedule_task_manager,
|
||||
get_awx_version)
|
||||
from awx.main.utils.ansible import read_ansible_config
|
||||
from awx.main.utils.common import get_ansible_version, _get_ansible_version, get_custom_venv_choices
|
||||
from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja
|
||||
from awx.main.utils.reload import stop_local_services
|
||||
@@ -88,7 +86,7 @@ from rest_framework.exceptions import PermissionDenied
|
||||
__all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate',
|
||||
'RunAdHocCommand', 'handle_work_error', 'handle_work_success', 'apply_cluster_membership_policies',
|
||||
'update_inventory_computed_fields', 'update_host_smart_inventory_memberships',
|
||||
'send_notifications', 'run_administrative_checks', 'purge_old_stdout_files']
|
||||
'send_notifications', 'purge_old_stdout_files']
|
||||
|
||||
HIDDEN_PASSWORD = '**********'
|
||||
|
||||
@@ -251,6 +249,9 @@ def apply_cluster_membership_policies():
|
||||
# On a differential basis, apply instances to non-isolated groups
|
||||
with transaction.atomic():
|
||||
for g in actual_groups:
|
||||
if g.obj.is_containerized:
|
||||
logger.debug('Skipping containerized group {} for policy calculation'.format(g.obj.name))
|
||||
continue
|
||||
instances_to_add = set(g.instances) - set(g.prior_instances)
|
||||
instances_to_remove = set(g.prior_instances) - set(g.instances)
|
||||
if instances_to_add:
|
||||
@@ -323,7 +324,7 @@ def send_notifications(notification_list, job_id=None):
|
||||
notification.status = "successful"
|
||||
notification.notifications_sent = sent
|
||||
except Exception as e:
|
||||
logger.error("Send Notification Failed {}".format(e))
|
||||
logger.exception("Send Notification Failed {}".format(e))
|
||||
notification.status = "failed"
|
||||
notification.error = smart_str(e)
|
||||
update_fields.append('error')
|
||||
@@ -349,28 +350,6 @@ def gather_analytics():
|
||||
os.remove(tgz)
|
||||
|
||||
|
||||
@task()
|
||||
def run_administrative_checks():
|
||||
logger.warn("Running administrative checks.")
|
||||
if not settings.TOWER_ADMIN_ALERTS:
|
||||
return
|
||||
validation_info = get_licenser().validate()
|
||||
if validation_info['license_type'] != 'open' and validation_info.get('instance_count', 0) < 1:
|
||||
return
|
||||
used_percentage = float(validation_info.get('current_instances', 0)) / float(validation_info.get('instance_count', 100))
|
||||
tower_admin_emails = User.objects.filter(is_superuser=True).values_list('email', flat=True)
|
||||
if (used_percentage * 100) > 90:
|
||||
send_mail("Ansible Tower host usage over 90%",
|
||||
_("Ansible Tower host usage over 90%"),
|
||||
tower_admin_emails,
|
||||
fail_silently=True)
|
||||
if validation_info.get('date_warning', False):
|
||||
send_mail("Ansible Tower license will expire soon",
|
||||
_("Ansible Tower license will expire soon"),
|
||||
tower_admin_emails,
|
||||
fail_silently=True)
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
def purge_old_stdout_files():
|
||||
nowtime = time.time()
|
||||
@@ -451,6 +430,25 @@ def cluster_node_heartbeat():
|
||||
logger.exception('Error marking {} as lost'.format(other_inst.hostname))
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
def awx_k8s_reaper():
|
||||
from awx.main.scheduler.kubernetes import PodManager # prevent circular import
|
||||
for group in InstanceGroup.objects.filter(credential__isnull=False).iterator():
|
||||
if group.is_containerized:
|
||||
logger.debug("Checking for orphaned k8s pods for {}.".format(group))
|
||||
for job in UnifiedJob.objects.filter(
|
||||
pk__in=list(PodManager.list_active_jobs(group))
|
||||
).exclude(status__in=ACTIVE_STATES):
|
||||
logger.debug('{} is no longer active, reaping orphaned k8s pod'.format(job.log_format))
|
||||
try:
|
||||
PodManager(job).delete()
|
||||
except Exception:
|
||||
logger.exception("Failed to delete orphaned pod {} from {}".format(
|
||||
job.log_format, group
|
||||
))
|
||||
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
def awx_isolated_heartbeat():
|
||||
local_hostname = settings.CLUSTER_HOST_ID
|
||||
@@ -704,6 +702,7 @@ class BaseTask(object):
|
||||
|
||||
def __init__(self):
|
||||
self.cleanup_paths = []
|
||||
self.parent_workflow_job_id = None
|
||||
|
||||
def update_model(self, pk, _attempt=0, **updates):
|
||||
"""Reload the model instance from the database and update the
|
||||
@@ -876,12 +875,8 @@ class BaseTask(object):
|
||||
show_paths = self.proot_show_paths + local_paths + \
|
||||
settings.AWX_PROOT_SHOW_PATHS
|
||||
|
||||
# Help the user out by including the collections path inside the bubblewrap environment
|
||||
if getattr(settings, 'AWX_ANSIBLE_COLLECTIONS_PATHS', []):
|
||||
show_paths.extend(settings.AWX_ANSIBLE_COLLECTIONS_PATHS)
|
||||
|
||||
pi_path = settings.AWX_PROOT_BASE_PATH
|
||||
if not self.instance.is_isolated():
|
||||
if not self.instance.is_isolated() and not self.instance.is_containerized:
|
||||
pi_path = tempfile.mkdtemp(
|
||||
prefix='ansible_runner_pi_',
|
||||
dir=settings.AWX_PROOT_BASE_PATH
|
||||
@@ -908,6 +903,31 @@ class BaseTask(object):
|
||||
process_isolation_params['process_isolation_ro_paths'].append(instance.ansible_virtualenv_path)
|
||||
return process_isolation_params
|
||||
|
||||
def build_params_resource_profiling(self, instance, private_data_dir):
|
||||
resource_profiling_params = {}
|
||||
if self.should_use_resource_profiling(instance):
|
||||
cpu_poll_interval = settings.AWX_RESOURCE_PROFILING_CPU_POLL_INTERVAL
|
||||
mem_poll_interval = settings.AWX_RESOURCE_PROFILING_MEMORY_POLL_INTERVAL
|
||||
pid_poll_interval = settings.AWX_RESOURCE_PROFILING_PID_POLL_INTERVAL
|
||||
|
||||
results_dir = os.path.join(private_data_dir, 'artifacts/playbook_profiling')
|
||||
if not os.path.isdir(results_dir):
|
||||
os.makedirs(results_dir, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
|
||||
|
||||
logger.debug('Collected the following resource profiling intervals: cpu: {} mem: {} pid: {}'
|
||||
.format(cpu_poll_interval, mem_poll_interval, pid_poll_interval))
|
||||
|
||||
resource_profiling_params.update({'resource_profiling': True,
|
||||
'resource_profiling_base_cgroup': 'ansible-runner',
|
||||
'resource_profiling_cpu_poll_interval': cpu_poll_interval,
|
||||
'resource_profiling_memory_poll_interval': mem_poll_interval,
|
||||
'resource_profiling_pid_poll_interval': pid_poll_interval,
|
||||
'resource_profiling_results_dir': results_dir})
|
||||
else:
|
||||
logger.debug('Resource profiling not enabled for task')
|
||||
|
||||
return resource_profiling_params
|
||||
|
||||
def _write_extra_vars_file(self, private_data_dir, vars, safe_dict={}):
|
||||
env_path = os.path.join(private_data_dir, 'env')
|
||||
try:
|
||||
@@ -966,13 +986,14 @@ class BaseTask(object):
|
||||
if self.should_use_proot(instance):
|
||||
env['PROOT_TMP_DIR'] = settings.AWX_PROOT_BASE_PATH
|
||||
env['AWX_PRIVATE_DATA_DIR'] = private_data_dir
|
||||
|
||||
if 'ANSIBLE_COLLECTIONS_PATHS' in env:
|
||||
env['ANSIBLE_COLLECTIONS_PATHS'] += os.pathsep + os.pathsep.join(settings.AWX_ANSIBLE_COLLECTIONS_PATHS)
|
||||
else:
|
||||
env['ANSIBLE_COLLECTIONS_PATHS'] = os.pathsep.join(settings.AWX_ANSIBLE_COLLECTIONS_PATHS)
|
||||
return env
|
||||
|
||||
def should_use_resource_profiling(self, job):
|
||||
'''
|
||||
Return whether this task should use resource profiling
|
||||
'''
|
||||
return False
|
||||
|
||||
def should_use_proot(self, instance):
|
||||
'''
|
||||
Return whether this task should use proot.
|
||||
@@ -1057,6 +1078,19 @@ class BaseTask(object):
|
||||
'''
|
||||
Hook for any steps to run after job/task is marked as complete.
|
||||
'''
|
||||
job_profiling_dir = os.path.join(private_data_dir, 'artifacts/playbook_profiling')
|
||||
awx_profiling_dir = '/var/log/tower/playbook_profiling/'
|
||||
if not os.path.exists(awx_profiling_dir):
|
||||
os.mkdir(awx_profiling_dir)
|
||||
if os.path.isdir(job_profiling_dir):
|
||||
shutil.copytree(job_profiling_dir, os.path.join(awx_profiling_dir, str(instance.pk)))
|
||||
|
||||
if instance.is_containerized:
|
||||
from awx.main.scheduler.kubernetes import PodManager # prevent circular import
|
||||
pm = PodManager(instance)
|
||||
logger.debug(f"Deleting pod {pm.pod_name}")
|
||||
pm.delete()
|
||||
|
||||
|
||||
def event_handler(self, event_data):
|
||||
#
|
||||
@@ -1078,6 +1112,8 @@ class BaseTask(object):
|
||||
if event_data.get(self.event_data_key, None):
|
||||
if self.event_data_key != 'job_id':
|
||||
event_data.pop('parent_uuid', None)
|
||||
if self.parent_workflow_job_id:
|
||||
event_data['workflow_job_id'] = self.parent_workflow_job_id
|
||||
should_write_event = False
|
||||
event_data.setdefault(self.event_data_key, self.instance.id)
|
||||
self.dispatcher.dispatch(event_data)
|
||||
@@ -1149,6 +1185,18 @@ class BaseTask(object):
|
||||
'''
|
||||
Run the job/task and capture its output.
|
||||
'''
|
||||
self.instance = self.model.objects.get(pk=pk)
|
||||
containerized = self.instance.is_containerized
|
||||
pod_manager = None
|
||||
if containerized:
|
||||
# Here we are trying to launch a pod before transitioning the job into a running
|
||||
# state. For some scenarios (like waiting for resources to become available) we do this
|
||||
# rather than marking the job as error or failed. This is not always desirable. Cases
|
||||
# such as invalid authentication should surface as an error.
|
||||
pod_manager = self.deploy_container_group_pod(self.instance)
|
||||
if not pod_manager:
|
||||
return
|
||||
|
||||
# self.instance because of the update_model pattern and when it's used in callback handlers
|
||||
self.instance = self.update_model(pk, status='running',
|
||||
start_args='') # blank field to remove encrypted passwords
|
||||
@@ -1167,6 +1215,11 @@ class BaseTask(object):
|
||||
private_data_dir = None
|
||||
isolated_manager_instance = None
|
||||
|
||||
# store a reference to the parent workflow job (if any) so we can include
|
||||
# it in event data JSON
|
||||
if self.instance.spawned_by_workflow:
|
||||
self.parent_workflow_job_id = self.instance.get_workflow_job().id
|
||||
|
||||
try:
|
||||
isolated = self.instance.is_isolated()
|
||||
self.instance.send_notification_templates("running")
|
||||
@@ -1202,6 +1255,8 @@ class BaseTask(object):
|
||||
self.build_extra_vars_file(self.instance, private_data_dir)
|
||||
args = self.build_args(self.instance, private_data_dir, passwords)
|
||||
cwd = self.build_cwd(self.instance, private_data_dir)
|
||||
resource_profiling_params = self.build_params_resource_profiling(self.instance,
|
||||
private_data_dir)
|
||||
process_isolation_params = self.build_params_process_isolation(self.instance,
|
||||
private_data_dir,
|
||||
cwd)
|
||||
@@ -1241,9 +1296,14 @@ class BaseTask(object):
|
||||
'pexpect_timeout': getattr(settings, 'PEXPECT_TIMEOUT', 5),
|
||||
'suppress_ansible_output': True,
|
||||
**process_isolation_params,
|
||||
**resource_profiling_params,
|
||||
},
|
||||
}
|
||||
|
||||
if containerized:
|
||||
# We don't want HOME passed through to container groups.
|
||||
params['envvars'].pop('HOME')
|
||||
|
||||
if isinstance(self.instance, AdHocCommand):
|
||||
params['module'] = self.build_module_name(self.instance)
|
||||
params['module_args'] = self.build_module_args(self.instance)
|
||||
@@ -1262,7 +1322,7 @@ class BaseTask(object):
|
||||
if not params[v]:
|
||||
del params[v]
|
||||
|
||||
if self.instance.is_isolated() is True:
|
||||
if self.instance.is_isolated() or containerized:
|
||||
module_args = None
|
||||
if 'module_args' in params:
|
||||
# if it's adhoc, copy the module args
|
||||
@@ -1273,10 +1333,12 @@ class BaseTask(object):
|
||||
params.pop('inventory'),
|
||||
os.path.join(private_data_dir, 'inventory')
|
||||
)
|
||||
|
||||
ansible_runner.utils.dump_artifacts(params)
|
||||
isolated_manager_instance = isolated_manager.IsolatedManager(
|
||||
cancelled_callback=lambda: self.update_model(self.instance.pk).cancel_flag,
|
||||
check_callback=self.check_handler,
|
||||
pod_manager=pod_manager
|
||||
)
|
||||
status, rc = isolated_manager_instance.run(self.instance,
|
||||
private_data_dir,
|
||||
@@ -1330,6 +1392,41 @@ class BaseTask(object):
|
||||
raise AwxTaskError.TaskError(self.instance, rc)
|
||||
|
||||
|
||||
def deploy_container_group_pod(self, task):
|
||||
from awx.main.scheduler.kubernetes import PodManager # Avoid circular import
|
||||
pod_manager = PodManager(self.instance)
|
||||
try:
|
||||
log_name = task.log_format
|
||||
logger.debug(f"Launching pod for {log_name}.")
|
||||
pod_manager.deploy()
|
||||
except (ApiException, Exception) as exc:
|
||||
if isinstance(exc, ApiException) and exc.status == 403:
|
||||
try:
|
||||
if 'exceeded quota' in json.loads(exc.body)['message']:
|
||||
# If the k8s cluster does not have capacity, we move the
|
||||
# job back into pending and wait until the next run of
|
||||
# the task manager. This does not exactly play well with
|
||||
# our current instance group precendence logic, since it
|
||||
# will just sit here forever if kubernetes returns this
|
||||
# error.
|
||||
logger.warn(exc.body)
|
||||
logger.warn(f"Could not launch pod for {log_name}. Exceeded quota.")
|
||||
self.update_model(task.pk, status='pending')
|
||||
return
|
||||
except Exception:
|
||||
logger.exception(f"Unable to handle response from Kubernetes API for {log_name}.")
|
||||
|
||||
logger.exception(f"Error when launching pod for {log_name}")
|
||||
self.update_model(task.pk, status='error', result_traceback=traceback.format_exc())
|
||||
return
|
||||
|
||||
self.update_model(task.pk, execution_node=pod_manager.pod_name)
|
||||
return pod_manager
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@task()
|
||||
class RunJob(BaseTask):
|
||||
'''
|
||||
@@ -1474,13 +1571,23 @@ class RunJob(BaseTask):
|
||||
if authorize:
|
||||
env['ANSIBLE_NET_AUTH_PASS'] = network_cred.get_input('authorize_password', default='')
|
||||
|
||||
for env_key, folder in (
|
||||
('ANSIBLE_COLLECTIONS_PATHS', 'requirements_collections'),
|
||||
('ANSIBLE_ROLES_PATH', 'requirements_roles')):
|
||||
paths = []
|
||||
path_vars = (
|
||||
('ANSIBLE_COLLECTIONS_PATHS', 'collections_paths', 'requirements_collections', '~/.ansible/collections:/usr/share/ansible/collections'),
|
||||
('ANSIBLE_ROLES_PATH', 'roles_path', 'requirements_roles', '~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles'))
|
||||
|
||||
config_values = read_ansible_config(job.project.get_project_path(), list(map(lambda x: x[1], path_vars)))
|
||||
|
||||
for env_key, config_setting, folder, default in path_vars:
|
||||
paths = default.split(':')
|
||||
if env_key in env:
|
||||
paths.append(env[env_key])
|
||||
paths.append(os.path.join(private_data_dir, folder))
|
||||
for path in env[env_key].split(':'):
|
||||
if path not in paths:
|
||||
paths = [env[env_key]] + paths
|
||||
elif config_setting in config_values:
|
||||
for path in config_values[config_setting].split(':'):
|
||||
if path not in paths:
|
||||
paths = [config_values[config_setting]] + paths
|
||||
paths = [os.path.join(private_data_dir, folder)] + paths
|
||||
env[env_key] = os.pathsep.join(paths)
|
||||
|
||||
return env
|
||||
@@ -1595,10 +1702,18 @@ class RunJob(BaseTask):
|
||||
d[r'Vault password \({}\):\s*?$'.format(vault_id)] = k
|
||||
return d
|
||||
|
||||
def should_use_resource_profiling(self, job):
|
||||
'''
|
||||
Return whether this task should use resource profiling
|
||||
'''
|
||||
return settings.AWX_RESOURCE_PROFILING_ENABLED
|
||||
|
||||
def should_use_proot(self, job):
|
||||
'''
|
||||
Return whether this task should use proot.
|
||||
'''
|
||||
if job.is_containerized:
|
||||
return False
|
||||
return getattr(settings, 'AWX_PROOT_ENABLED', False)
|
||||
|
||||
def pre_run_hook(self, job, private_data_dir):
|
||||
@@ -1659,6 +1774,7 @@ class RunJob(BaseTask):
|
||||
if job.is_isolated() is True:
|
||||
pu_ig = pu_ig.controller
|
||||
pu_en = settings.CLUSTER_HOST_ID
|
||||
|
||||
sync_metafields = dict(
|
||||
launch_type="sync",
|
||||
job_type='run',
|
||||
@@ -1696,29 +1812,11 @@ class RunJob(BaseTask):
|
||||
# up-to-date with project, job is running project current version
|
||||
if job_revision:
|
||||
job = self.update_model(job.pk, scm_revision=job_revision)
|
||||
|
||||
# copy the project directory
|
||||
runner_project_folder = os.path.join(private_data_dir, 'project')
|
||||
if job.project.scm_type == 'git':
|
||||
git_repo = git.Repo(project_path)
|
||||
if not os.path.exists(runner_project_folder):
|
||||
os.mkdir(runner_project_folder)
|
||||
tmp_branch_name = 'awx_internal/{}'.format(uuid4())
|
||||
# always clone based on specific job revision
|
||||
if not job.scm_revision:
|
||||
raise RuntimeError('Unexpectedly could not determine a revision to run from project.')
|
||||
source_branch = git_repo.create_head(tmp_branch_name, job.scm_revision)
|
||||
# git clone must take file:// syntax for source repo or else options like depth will be ignored
|
||||
source_as_uri = Path(project_path).as_uri()
|
||||
git.Repo.clone_from(
|
||||
source_as_uri, runner_project_folder, branch=source_branch,
|
||||
depth=1, single_branch=True, # shallow, do not copy full history
|
||||
recursive=True # include submodules
|
||||
# Project update does not copy the folder, so copy here
|
||||
RunProjectUpdate.make_local_copy(
|
||||
project_path, os.path.join(private_data_dir, 'project'),
|
||||
job.project.scm_type, job_revision
|
||||
)
|
||||
# force option is necessary because remote refs are not counted, although no information is lost
|
||||
git_repo.delete_head(tmp_branch_name, force=True)
|
||||
else:
|
||||
copy_tree(project_path, runner_project_folder)
|
||||
|
||||
if job.inventory.kind == 'smart':
|
||||
# cache smart inventory memberships so that the host_filter query is not
|
||||
@@ -1737,8 +1835,9 @@ class RunJob(BaseTask):
|
||||
os.path.join(private_data_dir, 'artifacts', str(job.id), 'fact_cache'),
|
||||
fact_modification_times,
|
||||
)
|
||||
if isolated_manager_instance:
|
||||
if isolated_manager_instance and not job.is_containerized:
|
||||
isolated_manager_instance.cleanup()
|
||||
|
||||
try:
|
||||
inventory = job.inventory
|
||||
except Inventory.DoesNotExist:
|
||||
@@ -1830,6 +1929,30 @@ class RunProjectUpdate(BaseTask):
|
||||
env['TMP'] = settings.AWX_PROOT_BASE_PATH
|
||||
env['PROJECT_UPDATE_ID'] = str(project_update.pk)
|
||||
env['ANSIBLE_CALLBACK_PLUGINS'] = self.get_path_to('..', 'plugins', 'callback')
|
||||
env['ANSIBLE_GALAXY_IGNORE'] = True
|
||||
# Set up the public Galaxy server, if enabled
|
||||
if settings.PUBLIC_GALAXY_ENABLED:
|
||||
galaxy_servers = [settings.PUBLIC_GALAXY_SERVER]
|
||||
else:
|
||||
galaxy_servers = []
|
||||
# Set up fallback Galaxy servers, if configured
|
||||
if settings.FALLBACK_GALAXY_SERVERS:
|
||||
galaxy_servers = settings.FALLBACK_GALAXY_SERVERS + galaxy_servers
|
||||
# Set up the primary Galaxy server, if configured
|
||||
if settings.PRIMARY_GALAXY_URL:
|
||||
galaxy_servers = [{'id': 'primary_galaxy'}] + galaxy_servers
|
||||
for key in GALAXY_SERVER_FIELDS:
|
||||
value = getattr(settings, 'PRIMARY_GALAXY_{}'.format(key.upper()))
|
||||
if value:
|
||||
galaxy_servers[0][key] = value
|
||||
for server in galaxy_servers:
|
||||
for key in GALAXY_SERVER_FIELDS:
|
||||
if not server.get(key):
|
||||
continue
|
||||
env_key = ('ANSIBLE_GALAXY_SERVER_{}_{}'.format(server.get('id', 'unnamed'), key)).upper()
|
||||
env[env_key] = server[key]
|
||||
# now set the precedence of galaxy servers
|
||||
env['ANSIBLE_GALAXY_SERVER_LIST'] = ','.join([server.get('id', 'unnamed') for server in galaxy_servers])
|
||||
return env
|
||||
|
||||
def _build_scm_url_extra_vars(self, project_update):
|
||||
@@ -1895,8 +2018,8 @@ class RunProjectUpdate(BaseTask):
|
||||
extra_vars.update(extra_vars_new)
|
||||
|
||||
scm_branch = project_update.scm_branch
|
||||
branch_override = bool(project_update.scm_branch != project_update.project.scm_branch)
|
||||
if project_update.job_type == 'run' and scm_branch and (not branch_override):
|
||||
branch_override = bool(scm_branch and project_update.scm_branch != project_update.project.scm_branch)
|
||||
if project_update.job_type == 'run' and (not branch_override):
|
||||
scm_branch = project_update.project.scm_revision
|
||||
elif not scm_branch:
|
||||
scm_branch = {'hg': 'tip'}.get(project_update.scm_type, 'HEAD')
|
||||
@@ -2064,15 +2187,51 @@ class RunProjectUpdate(BaseTask):
|
||||
git_repo = git.Repo(project_path)
|
||||
self.original_branch = git_repo.active_branch
|
||||
|
||||
@staticmethod
|
||||
def make_local_copy(project_path, destination_folder, scm_type, scm_revision):
|
||||
if scm_type == 'git':
|
||||
git_repo = git.Repo(project_path)
|
||||
if not os.path.exists(destination_folder):
|
||||
os.mkdir(destination_folder, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
|
||||
tmp_branch_name = 'awx_internal/{}'.format(uuid4())
|
||||
# always clone based on specific job revision
|
||||
if not scm_revision:
|
||||
raise RuntimeError('Unexpectedly could not determine a revision to run from project.')
|
||||
source_branch = git_repo.create_head(tmp_branch_name, scm_revision)
|
||||
# git clone must take file:// syntax for source repo or else options like depth will be ignored
|
||||
source_as_uri = Path(project_path).as_uri()
|
||||
git.Repo.clone_from(
|
||||
source_as_uri, destination_folder, branch=source_branch,
|
||||
depth=1, single_branch=True, # shallow, do not copy full history
|
||||
)
|
||||
# submodules copied in loop because shallow copies from local HEADs are ideal
|
||||
# and no git clone submodule options are compatible with minimum requirements
|
||||
for submodule in git_repo.submodules:
|
||||
subrepo_path = os.path.abspath(os.path.join(project_path, submodule.path))
|
||||
subrepo_destination_folder = os.path.abspath(os.path.join(destination_folder, submodule.path))
|
||||
subrepo_uri = Path(subrepo_path).as_uri()
|
||||
git.Repo.clone_from(subrepo_uri, subrepo_destination_folder, depth=1, single_branch=True)
|
||||
# force option is necessary because remote refs are not counted, although no information is lost
|
||||
git_repo.delete_head(tmp_branch_name, force=True)
|
||||
else:
|
||||
copy_tree(project_path, destination_folder)
|
||||
|
||||
def post_run_hook(self, instance, status):
|
||||
if self.original_branch:
|
||||
# for git project syncs, non-default branches can be problems
|
||||
# restore to branch the repo was on before this run
|
||||
try:
|
||||
self.original_branch.checkout()
|
||||
except Exception:
|
||||
# this could have failed due to dirty tree, but difficult to predict all cases
|
||||
logger.exception('Failed to restore project repo to prior state after {}'.format(instance.log_format))
|
||||
if self.job_private_data_dir:
|
||||
# copy project folder before resetting to default branch
|
||||
# because some git-tree-specific resources (like submodules) might matter
|
||||
self.make_local_copy(
|
||||
instance.get_project_path(check_if_exists=False), os.path.join(self.job_private_data_dir, 'project'),
|
||||
instance.scm_type, self.playbook_new_revision
|
||||
)
|
||||
if self.original_branch:
|
||||
# for git project syncs, non-default branches can be problems
|
||||
# restore to branch the repo was on before this run
|
||||
try:
|
||||
self.original_branch.checkout()
|
||||
except Exception:
|
||||
# this could have failed due to dirty tree, but difficult to predict all cases
|
||||
logger.exception('Failed to restore project repo to prior state after {}'.format(instance.log_format))
|
||||
self.release_lock(instance)
|
||||
p = instance.project
|
||||
if self.playbook_new_revision:
|
||||
@@ -2172,6 +2331,27 @@ class RunInventoryUpdate(BaseTask):
|
||||
env[str(env_k)] = str(inventory_update.source_vars_dict[env_k])
|
||||
elif inventory_update.source == 'file':
|
||||
raise NotImplementedError('Cannot update file sources through the task system.')
|
||||
|
||||
if inventory_update.source == 'scm' and inventory_update.source_project_update:
|
||||
env_key = 'ANSIBLE_COLLECTIONS_PATHS'
|
||||
config_setting = 'collections_paths'
|
||||
folder = 'requirements_collections'
|
||||
default = '~/.ansible/collections:/usr/share/ansible/collections'
|
||||
|
||||
config_values = read_ansible_config(os.path.join(private_data_dir, 'project'), [config_setting])
|
||||
|
||||
paths = default.split(':')
|
||||
if env_key in env:
|
||||
for path in env[env_key].split(':'):
|
||||
if path not in paths:
|
||||
paths = [env[env_key]] + paths
|
||||
elif config_setting in config_values:
|
||||
for path in config_values[config_setting].split(':'):
|
||||
if path not in paths:
|
||||
paths = [config_values[config_setting]] + paths
|
||||
paths = [os.path.join(private_data_dir, folder)] + paths
|
||||
env[env_key] = os.pathsep.join(paths)
|
||||
|
||||
return env
|
||||
|
||||
def write_args_file(self, private_data_dir, args):
|
||||
@@ -2234,7 +2414,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper()),])
|
||||
# Add arguments for the source inventory script
|
||||
args.append('--source')
|
||||
args.append(self.psuedo_build_inventory(inventory_update, private_data_dir))
|
||||
args.append(self.pseudo_build_inventory(inventory_update, private_data_dir))
|
||||
if src == 'custom':
|
||||
args.append("--custom")
|
||||
args.append('-v%d' % inventory_update.verbosity)
|
||||
@@ -2245,7 +2425,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
def build_inventory(self, inventory_update, private_data_dir):
|
||||
return None # what runner expects in order to not deal with inventory
|
||||
|
||||
def psuedo_build_inventory(self, inventory_update, private_data_dir):
|
||||
def pseudo_build_inventory(self, inventory_update, private_data_dir):
|
||||
"""Inventory imports are ran through a management command
|
||||
we pass the inventory in args to that command, so this is not considered
|
||||
to be "Ansible" inventory (by runner) even though it is
|
||||
@@ -2270,7 +2450,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
# Use the vendored script path
|
||||
inventory_path = self.get_path_to('..', 'plugins', 'inventory', injector.script_name)
|
||||
elif src == 'scm':
|
||||
inventory_path = inventory_update.get_actual_source_path()
|
||||
inventory_path = os.path.join(private_data_dir, 'project', inventory_update.source_path)
|
||||
elif src == 'custom':
|
||||
handle, inventory_path = tempfile.mkstemp(dir=private_data_dir)
|
||||
f = os.fdopen(handle, 'w')
|
||||
@@ -2291,7 +2471,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
'''
|
||||
src = inventory_update.source
|
||||
if src == 'scm' and inventory_update.source_project_update:
|
||||
return inventory_update.source_project_update.get_project_path(check_if_exists=False)
|
||||
return os.path.join(private_data_dir, 'project')
|
||||
if src in CLOUD_PROVIDERS:
|
||||
injector = None
|
||||
if src in InventorySource.injectors:
|
||||
@@ -2327,8 +2507,10 @@ class RunInventoryUpdate(BaseTask):
|
||||
|
||||
project_update_task = local_project_sync._get_task_class()
|
||||
try:
|
||||
project_update_task().run(local_project_sync.id)
|
||||
inventory_update.inventory_source.scm_last_revision = local_project_sync.project.scm_revision
|
||||
sync_task = project_update_task(job_private_data_dir=private_data_dir)
|
||||
sync_task.run(local_project_sync.id)
|
||||
local_project_sync.refresh_from_db()
|
||||
inventory_update.inventory_source.scm_last_revision = local_project_sync.scm_revision
|
||||
inventory_update.inventory_source.save(update_fields=['scm_last_revision'])
|
||||
except Exception:
|
||||
inventory_update = self.update_model(
|
||||
@@ -2336,6 +2518,13 @@ class RunInventoryUpdate(BaseTask):
|
||||
job_explanation=('Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' %
|
||||
('project_update', local_project_sync.name, local_project_sync.id)))
|
||||
raise
|
||||
elif inventory_update.source == 'scm' and inventory_update.launch_type == 'scm' and source_project:
|
||||
# This follows update, not sync, so make copy here
|
||||
project_path = source_project.get_project_path(check_if_exists=False)
|
||||
RunProjectUpdate.make_local_copy(
|
||||
project_path, os.path.join(private_data_dir, 'project'),
|
||||
source_project.scm_type, source_project.scm_revision
|
||||
)
|
||||
|
||||
|
||||
@task()
|
||||
@@ -2518,6 +2707,8 @@ class RunAdHocCommand(BaseTask):
|
||||
'''
|
||||
Return whether this task should use proot.
|
||||
'''
|
||||
if ad_hoc_command.is_containerized:
|
||||
return False
|
||||
return getattr(settings, 'AWX_PROOT_ENABLED', False)
|
||||
|
||||
def final_run_hook(self, adhoc_job, status, private_data_dir, fact_modification_times, isolated_manager_instance=None):
|
||||
|
||||
@@ -154,12 +154,12 @@ def mk_job_template(name, job_type='run',
|
||||
organization=None, inventory=None,
|
||||
credential=None, network_credential=None,
|
||||
cloud_credential=None, persisted=True, extra_vars='',
|
||||
project=None, spec=None):
|
||||
project=None, spec=None, webhook_service=''):
|
||||
if extra_vars:
|
||||
extra_vars = json.dumps(extra_vars)
|
||||
|
||||
jt = JobTemplate(name=name, job_type=job_type, extra_vars=extra_vars,
|
||||
playbook='helloworld.yml')
|
||||
webhook_service=webhook_service, playbook='helloworld.yml')
|
||||
|
||||
jt.inventory = inventory
|
||||
if jt.inventory is None:
|
||||
@@ -200,11 +200,13 @@ def mk_workflow_job(status='new', workflow_job_template=None, extra_vars={},
|
||||
return job
|
||||
|
||||
|
||||
def mk_workflow_job_template(name, extra_vars='', spec=None, organization=None, persisted=True):
|
||||
def mk_workflow_job_template(name, extra_vars='', spec=None, organization=None, persisted=True,
|
||||
webhook_service=''):
|
||||
if extra_vars:
|
||||
extra_vars = json.dumps(extra_vars)
|
||||
|
||||
wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars, organization=organization)
|
||||
wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars, organization=organization,
|
||||
webhook_service=webhook_service)
|
||||
|
||||
wfjt.survey_spec = spec
|
||||
if wfjt.survey_spec:
|
||||
|
||||
@@ -197,7 +197,7 @@ def create_survey_spec(variables=None, default_type='integer', required=True, mi
|
||||
#
|
||||
|
||||
|
||||
def create_job_template(name, roles=None, persisted=True, **kwargs):
|
||||
def create_job_template(name, roles=None, persisted=True, webhook_service='', **kwargs):
|
||||
Objects = generate_objects(["job_template", "jobs",
|
||||
"organization",
|
||||
"inventory",
|
||||
@@ -252,11 +252,10 @@ def create_job_template(name, roles=None, persisted=True, **kwargs):
|
||||
else:
|
||||
spec = None
|
||||
|
||||
jt = mk_job_template(name, project=proj,
|
||||
inventory=inv, credential=cred,
|
||||
jt = mk_job_template(name, project=proj, inventory=inv, credential=cred,
|
||||
network_credential=net_cred, cloud_credential=cloud_cred,
|
||||
job_type=job_type, spec=spec, extra_vars=extra_vars,
|
||||
persisted=persisted)
|
||||
persisted=persisted, webhook_service=webhook_service)
|
||||
|
||||
if 'jobs' in kwargs:
|
||||
for i in kwargs['jobs']:
|
||||
@@ -401,7 +400,7 @@ def generate_workflow_job_template_nodes(workflow_job_template,
|
||||
|
||||
|
||||
# TODO: Implement survey and jobs
|
||||
def create_workflow_job_template(name, organization=None, persisted=True, **kwargs):
|
||||
def create_workflow_job_template(name, organization=None, persisted=True, webhook_service='', **kwargs):
|
||||
Objects = generate_objects(["workflow_job_template",
|
||||
"workflow_job_template_nodes",
|
||||
"survey",], kwargs)
|
||||
@@ -418,7 +417,8 @@ def create_workflow_job_template(name, organization=None, persisted=True, **kwar
|
||||
organization=organization,
|
||||
spec=spec,
|
||||
extra_vars=extra_vars,
|
||||
persisted=persisted)
|
||||
persisted=persisted,
|
||||
webhook_service=webhook_service)
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -496,9 +496,6 @@ def test_falsey_field_data(get, post, organization, admin, field_value):
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('kind, extraneous', [
|
||||
['ssh', 'ssh_key_unlock'],
|
||||
['scm', 'ssh_key_unlock'],
|
||||
['net', 'ssh_key_unlock'],
|
||||
['net', 'authorize_password'],
|
||||
])
|
||||
def test_field_dependencies(get, post, organization, admin, kind, extraneous):
|
||||
|
||||
45
awx/main/tests/functional/api/test_events.py
Normal file
45
awx/main/tests/functional/api/test_events.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import AdHocCommand, AdHocCommandEvent, JobEvent
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('truncate, expected', [
|
||||
(True, False),
|
||||
(False, True),
|
||||
])
|
||||
def test_job_events_sublist_truncation(get, organization_factory, job_template_factory, truncate, expected):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
jt = job_template_factory("jt", organization=objs.organization,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
job = jt.create_unified_job()
|
||||
JobEvent.create_from_data(job_id=job.pk, uuid='abc123', event='runner_on_start',
|
||||
stdout='a' * 1025)
|
||||
|
||||
url = reverse('api:job_job_events_list', kwargs={'pk': job.pk})
|
||||
if not truncate:
|
||||
url += '?no_truncate=1'
|
||||
|
||||
response = get(url, user=objs.superusers.admin, expect=200)
|
||||
assert (len(response.data['results'][0]['stdout']) == 1025) == expected
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('truncate, expected', [
|
||||
(True, False),
|
||||
(False, True),
|
||||
])
|
||||
def test_ad_hoc_events_sublist_truncation(get, organization_factory, job_template_factory, truncate, expected):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
adhoc = AdHocCommand()
|
||||
adhoc.save()
|
||||
AdHocCommandEvent.create_from_data(ad_hoc_command_id=adhoc.pk, uuid='abc123', event='runner_on_start',
|
||||
stdout='a' * 1025)
|
||||
|
||||
url = reverse('api:ad_hoc_command_ad_hoc_command_events_list', kwargs={'pk': adhoc.pk})
|
||||
if not truncate:
|
||||
url += '?no_truncate=1'
|
||||
|
||||
response = get(url, user=objs.superusers.admin, expect=200)
|
||||
assert (len(response.data['results'][0]['stdout']) == 1025) == expected
|
||||
@@ -117,3 +117,10 @@ def test_handle_content_type(post, admin):
|
||||
admin,
|
||||
content_type='text/html',
|
||||
expect=415)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_basic_not_found(get, admin_user):
|
||||
root_url = reverse('api:api_v2_root_view')
|
||||
r = get(root_url + 'fooooooo', user=admin_user, expect=404)
|
||||
assert r.data.get('detail') == 'The requested resource could not be found.'
|
||||
|
||||
@@ -45,6 +45,14 @@ def isolated_instance_group(instance_group, instance):
|
||||
return ig
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def containerized_instance_group(instance_group, kube_credential):
|
||||
ig = InstanceGroup(name="container")
|
||||
ig.credential = kube_credential
|
||||
ig.save()
|
||||
return ig
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def create_job_factory(job_factory, instance_group):
|
||||
def fn(status='running'):
|
||||
@@ -240,3 +248,29 @@ def test_instance_group_order_persistence(get, post, admin, source_model):
|
||||
resp = get(url, admin)
|
||||
assert resp.data['count'] == total
|
||||
assert [ig['name'] for ig in resp.data['results']] == [ig.name for ig in before]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_instance_group_update_fields(patch, instance, instance_group, admin, containerized_instance_group):
|
||||
# policy_instance_ variables can only be updated in instance groups that are NOT containerized
|
||||
# instance group (not containerized)
|
||||
ig_url = reverse("api:instance_group_detail", kwargs={'pk': instance_group.pk})
|
||||
assert not instance_group.is_containerized
|
||||
assert not containerized_instance_group.is_isolated
|
||||
resp = patch(ig_url, {'policy_instance_percentage':15}, admin, expect=200)
|
||||
assert 15 == resp.data['policy_instance_percentage']
|
||||
resp = patch(ig_url, {'policy_instance_minimum':15}, admin, expect=200)
|
||||
assert 15 == resp.data['policy_instance_minimum']
|
||||
resp = patch(ig_url, {'policy_instance_list':[instance.hostname]}, admin)
|
||||
assert [instance.hostname] == resp.data['policy_instance_list']
|
||||
|
||||
# containerized instance group
|
||||
cg_url = reverse("api:instance_group_detail", kwargs={'pk': containerized_instance_group.pk})
|
||||
assert containerized_instance_group.is_containerized
|
||||
assert not containerized_instance_group.is_isolated
|
||||
resp = patch(cg_url, {'policy_instance_percentage':15}, admin, expect=400)
|
||||
assert ["Containerized instances may not be managed via the API"] == resp.data['policy_instance_percentage']
|
||||
resp = patch(cg_url, {'policy_instance_minimum':15}, admin, expect=400)
|
||||
assert ["Containerized instances may not be managed via the API"] == resp.data['policy_instance_minimum']
|
||||
resp = patch(cg_url, {'policy_instance_list':[instance.hostname]}, admin)
|
||||
assert ["Containerized instances may not be managed via the API"] == resp.data['policy_instance_list']
|
||||
|
||||
@@ -127,3 +127,53 @@ def test_post_wfjt_running_notification(get, post, admin, notification_template,
|
||||
response = get(url, admin)
|
||||
assert response.status_code == 200
|
||||
assert len(response.data['results']) == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_search_on_notification_configuration_is_prevented(get, admin):
|
||||
url = reverse('api:notification_template_list')
|
||||
response = get(url, {'notification_configuration__regex': 'ABCDEF'}, admin)
|
||||
assert response.status_code == 403
|
||||
assert response.data == {"detail": "Filtering on notification_configuration is not allowed."}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_wfjt_approval_notification(get, admin, workflow_job_template):
|
||||
url = reverse('api:workflow_job_template_notification_templates_approvals_list', kwargs={'pk': workflow_job_template.pk})
|
||||
response = get(url, admin)
|
||||
assert response.status_code == 200
|
||||
assert len(response.data['results']) == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_post_wfjt_approval_notification(get, post, admin, notification_template, workflow_job_template):
|
||||
url = reverse('api:workflow_job_template_notification_templates_approvals_list', kwargs={'pk': workflow_job_template.pk})
|
||||
response = post(url,
|
||||
dict(id=notification_template.id,
|
||||
associate=True),
|
||||
admin)
|
||||
assert response.status_code == 204
|
||||
response = get(url, admin)
|
||||
assert response.status_code == 200
|
||||
assert len(response.data['results']) == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_org_approval_notification(get, admin, organization):
|
||||
url = reverse('api:organization_notification_templates_approvals_list', kwargs={'pk': organization.pk})
|
||||
response = get(url, admin)
|
||||
assert response.status_code == 200
|
||||
assert len(response.data['results']) == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_post_org_approval_notification(get, post, admin, notification_template, organization):
|
||||
url = reverse('api:organization_notification_templates_approvals_list', kwargs={'pk': organization.pk})
|
||||
response = post(url,
|
||||
dict(id=notification_template.id,
|
||||
associate=True),
|
||||
admin)
|
||||
assert response.status_code == 204
|
||||
response = get(url, admin)
|
||||
assert response.status_code == 200
|
||||
assert len(response.data['results']) == 1
|
||||
|
||||
260
awx/main/tests/functional/api/test_webhooks.py
Normal file
260
awx/main/tests/functional/api/test_webhooks.py
Normal file
@@ -0,0 +1,260 @@
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models.mixins import WebhookTemplateMixin
|
||||
from awx.main.models.credential import Credential, CredentialType
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"user_role, expect", [
|
||||
('superuser', 200),
|
||||
('org admin', 200),
|
||||
('jt admin', 200),
|
||||
('jt execute', 403),
|
||||
('org member', 403),
|
||||
]
|
||||
)
|
||||
def test_get_webhook_key_jt(organization_factory, job_template_factory, get, user_role, expect):
|
||||
objs = organization_factory("org", superusers=['admin'], users=['user'])
|
||||
jt = job_template_factory("jt", organization=objs.organization,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
if user_role == 'superuser':
|
||||
user = objs.superusers.admin
|
||||
else:
|
||||
user = objs.users.user
|
||||
grant_obj = objs.organization if user_role.startswith('org') else jt
|
||||
getattr(grant_obj, '{}_role'.format(user_role.split()[1])).members.add(user)
|
||||
|
||||
url = reverse('api:webhook_key', kwargs={'model_kwarg': 'job_templates', 'pk': jt.pk})
|
||||
response = get(url, user=user, expect=expect)
|
||||
if expect < 400:
|
||||
assert response.data == {'webhook_key': ''}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"user_role, expect", [
|
||||
('superuser', 200),
|
||||
('org admin', 200),
|
||||
('jt admin', 200),
|
||||
('jt execute', 403),
|
||||
('org member', 403),
|
||||
]
|
||||
)
|
||||
def test_get_webhook_key_wfjt(organization_factory, workflow_job_template_factory, get, user_role, expect):
|
||||
objs = organization_factory("org", superusers=['admin'], users=['user'])
|
||||
wfjt = workflow_job_template_factory("wfjt", organization=objs.organization).workflow_job_template
|
||||
if user_role == 'superuser':
|
||||
user = objs.superusers.admin
|
||||
else:
|
||||
user = objs.users.user
|
||||
grant_obj = objs.organization if user_role.startswith('org') else wfjt
|
||||
getattr(grant_obj, '{}_role'.format(user_role.split()[1])).members.add(user)
|
||||
|
||||
url = reverse('api:webhook_key', kwargs={'model_kwarg': 'workflow_job_templates', 'pk': wfjt.pk})
|
||||
response = get(url, user=user, expect=expect)
|
||||
if expect < 400:
|
||||
assert response.data == {'webhook_key': ''}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"user_role, expect", [
|
||||
('superuser', 201),
|
||||
('org admin', 201),
|
||||
('jt admin', 201),
|
||||
('jt execute', 403),
|
||||
('org member', 403),
|
||||
]
|
||||
)
|
||||
def test_post_webhook_key_jt(organization_factory, job_template_factory, post, user_role, expect):
|
||||
objs = organization_factory("org", superusers=['admin'], users=['user'])
|
||||
jt = job_template_factory("jt", organization=objs.organization,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
if user_role == 'superuser':
|
||||
user = objs.superusers.admin
|
||||
else:
|
||||
user = objs.users.user
|
||||
grant_obj = objs.organization if user_role.startswith('org') else jt
|
||||
getattr(grant_obj, '{}_role'.format(user_role.split()[1])).members.add(user)
|
||||
|
||||
url = reverse('api:webhook_key', kwargs={'model_kwarg': 'job_templates', 'pk': jt.pk})
|
||||
response = post(url, {}, user=user, expect=expect)
|
||||
if expect < 400:
|
||||
assert bool(response.data.get('webhook_key'))
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"user_role, expect", [
|
||||
('superuser', 201),
|
||||
('org admin', 201),
|
||||
('jt admin', 201),
|
||||
('jt execute', 403),
|
||||
('org member', 403),
|
||||
]
|
||||
)
|
||||
def test_post_webhook_key_wfjt(organization_factory, workflow_job_template_factory, post, user_role, expect):
|
||||
objs = organization_factory("org", superusers=['admin'], users=['user'])
|
||||
wfjt = workflow_job_template_factory("wfjt", organization=objs.organization).workflow_job_template
|
||||
if user_role == 'superuser':
|
||||
user = objs.superusers.admin
|
||||
else:
|
||||
user = objs.users.user
|
||||
grant_obj = objs.organization if user_role.startswith('org') else wfjt
|
||||
getattr(grant_obj, '{}_role'.format(user_role.split()[1])).members.add(user)
|
||||
|
||||
url = reverse('api:webhook_key', kwargs={'model_kwarg': 'workflow_job_templates', 'pk': wfjt.pk})
|
||||
response = post(url, {}, user=user, expect=expect)
|
||||
if expect < 400:
|
||||
assert bool(response.data.get('webhook_key'))
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"service", [s for s, _ in WebhookTemplateMixin.SERVICES]
|
||||
)
|
||||
def test_set_webhook_service(organization_factory, job_template_factory, patch, service):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
jt = job_template_factory("jt", organization=objs.organization,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
admin = objs.superusers.admin
|
||||
assert (jt.webhook_service, jt.webhook_key) == ('', '')
|
||||
|
||||
url = reverse('api:job_template_detail', kwargs={'pk': jt.pk})
|
||||
patch(url, {'webhook_service': service}, user=admin, expect=200)
|
||||
jt.refresh_from_db()
|
||||
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"service", [s for s, _ in WebhookTemplateMixin.SERVICES]
|
||||
)
|
||||
def test_unset_webhook_service(organization_factory, job_template_factory, patch, service):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
jt = job_template_factory("jt", organization=objs.organization, webhook_service=service,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
admin = objs.superusers.admin
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
|
||||
url = reverse('api:job_template_detail', kwargs={'pk': jt.pk})
|
||||
patch(url, {'webhook_service': ''}, user=admin, expect=200)
|
||||
jt.refresh_from_db()
|
||||
|
||||
assert (jt.webhook_service, jt.webhook_key) == ('', '')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"service", [s for s, _ in WebhookTemplateMixin.SERVICES]
|
||||
)
|
||||
def test_set_webhook_credential(organization_factory, job_template_factory, patch, service):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
jt = job_template_factory("jt", organization=objs.organization, webhook_service=service,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
admin = objs.superusers.admin
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
|
||||
cred_type = CredentialType.defaults['{}_token'.format(service)]()
|
||||
cred_type.save()
|
||||
cred = Credential.objects.create(credential_type=cred_type, name='test-cred',
|
||||
inputs={'token': 'secret'})
|
||||
|
||||
url = reverse('api:job_template_detail', kwargs={'pk': jt.pk})
|
||||
patch(url, {'webhook_credential': cred.pk}, user=admin, expect=200)
|
||||
jt.refresh_from_db()
|
||||
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
assert jt.webhook_credential == cred
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"service,token", [
|
||||
(s, WebhookTemplateMixin.SERVICES[i - 1][0]) for i, (s, _) in enumerate(WebhookTemplateMixin.SERVICES)
|
||||
]
|
||||
)
|
||||
def test_set_wrong_service_webhook_credential(organization_factory, job_template_factory, patch, service, token):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
jt = job_template_factory("jt", organization=objs.organization, webhook_service=service,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
admin = objs.superusers.admin
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
|
||||
cred_type = CredentialType.defaults['{}_token'.format(token)]()
|
||||
cred_type.save()
|
||||
cred = Credential.objects.create(credential_type=cred_type, name='test-cred',
|
||||
inputs={'token': 'secret'})
|
||||
|
||||
url = reverse('api:job_template_detail', kwargs={'pk': jt.pk})
|
||||
response = patch(url, {'webhook_credential': cred.pk}, user=admin, expect=400)
|
||||
jt.refresh_from_db()
|
||||
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
assert jt.webhook_credential is None
|
||||
assert response.data == {'webhook_credential': ["Must match the selected webhook service."]}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"service", [s for s, _ in WebhookTemplateMixin.SERVICES]
|
||||
)
|
||||
def test_set_webhook_credential_without_service(organization_factory, job_template_factory, patch, service):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
jt = job_template_factory("jt", organization=objs.organization,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
admin = objs.superusers.admin
|
||||
assert jt.webhook_service == ''
|
||||
assert jt.webhook_key == ''
|
||||
|
||||
cred_type = CredentialType.defaults['{}_token'.format(service)]()
|
||||
cred_type.save()
|
||||
cred = Credential.objects.create(credential_type=cred_type, name='test-cred',
|
||||
inputs={'token': 'secret'})
|
||||
|
||||
url = reverse('api:job_template_detail', kwargs={'pk': jt.pk})
|
||||
response = patch(url, {'webhook_credential': cred.pk}, user=admin, expect=400)
|
||||
jt.refresh_from_db()
|
||||
|
||||
assert jt.webhook_service == ''
|
||||
assert jt.webhook_key == ''
|
||||
assert jt.webhook_credential is None
|
||||
assert response.data == {'webhook_credential': ["Must match the selected webhook service."]}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"service", [s for s, _ in WebhookTemplateMixin.SERVICES]
|
||||
)
|
||||
def test_unset_webhook_service_with_credential(organization_factory, job_template_factory, patch, service):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
jt = job_template_factory("jt", organization=objs.organization, webhook_service=service,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
admin = objs.superusers.admin
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
|
||||
cred_type = CredentialType.defaults['{}_token'.format(service)]()
|
||||
cred_type.save()
|
||||
cred = Credential.objects.create(credential_type=cred_type, name='test-cred',
|
||||
inputs={'token': 'secret'})
|
||||
jt.webhook_credential = cred
|
||||
jt.save()
|
||||
|
||||
url = reverse('api:job_template_detail', kwargs={'pk': jt.pk})
|
||||
response = patch(url, {'webhook_service': ''}, user=admin, expect=400)
|
||||
jt.refresh_from_db()
|
||||
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
assert jt.webhook_credential == cred
|
||||
assert response.data == {'webhook_credential': ["Must match the selected webhook service."]}
|
||||
@@ -8,6 +8,8 @@ from unittest.mock import PropertyMock
|
||||
|
||||
# Django
|
||||
from django.urls import resolve
|
||||
from django.http import Http404
|
||||
from django.core.handlers.exception import response_for_exception
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.backends.sqlite3.base import SQLiteCursorWrapper
|
||||
@@ -203,6 +205,13 @@ def organization(instance):
|
||||
return Organization.objects.create(name="test-org", description="test-org-desc")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def credentialtype_kube():
|
||||
kube = CredentialType.defaults['kubernetes_bearer_token']()
|
||||
kube.save()
|
||||
return kube
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def credentialtype_ssh():
|
||||
ssh = CredentialType.defaults['ssh']()
|
||||
@@ -336,6 +345,12 @@ def other_external_credential(credentialtype_external):
|
||||
inputs={'url': 'http://testhost.com', 'token': 'secret2'})
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def kube_credential(credentialtype_kube):
|
||||
return Credential.objects.create(credential_type=credentialtype_kube, name='kube-cred',
|
||||
inputs={'host': 'my.cluster', 'bearer_token': 'my-token', 'verify_ssl': False})
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventory(organization):
|
||||
return organization.inventories.create(name="test-inv")
|
||||
@@ -568,8 +583,12 @@ def _request(verb):
|
||||
if 'format' not in kwargs and 'content_type' not in kwargs:
|
||||
kwargs['format'] = 'json'
|
||||
|
||||
view, view_args, view_kwargs = resolve(urllib.parse.urlparse(url)[2])
|
||||
request = getattr(APIRequestFactory(), verb)(url, **kwargs)
|
||||
request_error = None
|
||||
try:
|
||||
view, view_args, view_kwargs = resolve(urllib.parse.urlparse(url)[2])
|
||||
except Http404 as e:
|
||||
request_error = e
|
||||
if isinstance(kwargs.get('cookies', None), dict):
|
||||
for key, value in kwargs['cookies'].items():
|
||||
request.COOKIES[key] = value
|
||||
@@ -578,7 +597,10 @@ def _request(verb):
|
||||
if user:
|
||||
force_authenticate(request, user=user)
|
||||
|
||||
response = view(request, *view_args, **view_kwargs)
|
||||
if not request_error:
|
||||
response = view(request, *view_args, **view_kwargs)
|
||||
else:
|
||||
response = response_for_exception(request, request_error)
|
||||
if middleware:
|
||||
middleware.process_response(request, response)
|
||||
if expect:
|
||||
|
||||
@@ -87,7 +87,7 @@ class TestJobNotificationMixin(object):
|
||||
'use_fact_cache': bool,
|
||||
'verbosity': int},
|
||||
'job_friendly_name': str,
|
||||
'job_summary_dict': str,
|
||||
'job_metadata': str,
|
||||
'url': str}
|
||||
|
||||
|
||||
@@ -144,5 +144,3 @@ class TestJobNotificationMixin(object):
|
||||
|
||||
context_stub = JobNotificationMixin.context_stub()
|
||||
check_structure_and_completeness(TestJobNotificationMixin.CONTEXT_STRUCTURE, context_stub)
|
||||
|
||||
|
||||
|
||||
@@ -147,6 +147,39 @@ class TestMetaVars:
|
||||
assert data['awx_schedule_id'] == schedule.pk
|
||||
assert 'awx_user_name' not in data
|
||||
|
||||
def test_scheduled_workflow_job_node_metavars(self, workflow_job_template):
|
||||
schedule = Schedule.objects.create(
|
||||
name='job-schedule',
|
||||
rrule='DTSTART:20171129T155939z\nFREQ=MONTHLY',
|
||||
unified_job_template=workflow_job_template
|
||||
)
|
||||
|
||||
workflow_job = WorkflowJob.objects.create(
|
||||
name='workflow-job',
|
||||
workflow_job_template=workflow_job_template,
|
||||
schedule=schedule
|
||||
)
|
||||
|
||||
job = Job.objects.create(
|
||||
launch_type='workflow'
|
||||
)
|
||||
workflow_job.workflow_nodes.create(job=job)
|
||||
assert job.awx_meta_vars() == {
|
||||
'awx_job_id': job.id,
|
||||
'tower_job_id': job.id,
|
||||
'awx_job_launch_type': 'workflow',
|
||||
'tower_job_launch_type': 'workflow',
|
||||
'awx_workflow_job_name': 'workflow-job',
|
||||
'tower_workflow_job_name': 'workflow-job',
|
||||
'awx_workflow_job_id': workflow_job.id,
|
||||
'tower_workflow_job_id': workflow_job.id,
|
||||
'awx_parent_job_schedule_id': schedule.id,
|
||||
'tower_parent_job_schedule_id': schedule.id,
|
||||
'awx_parent_job_schedule_name': 'job-schedule',
|
||||
'tower_parent_job_schedule_name': 'job-schedule',
|
||||
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_event_processing_not_finished():
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
|
||||
# Python
|
||||
import pytest
|
||||
from unittest import mock
|
||||
import json
|
||||
|
||||
# AWX
|
||||
from awx.main.models.workflow import (
|
||||
@@ -248,7 +250,6 @@ class TestWorkflowJobTemplate:
|
||||
test_view = WorkflowJobTemplateNodeSuccessNodesList()
|
||||
nodes = wfjt.workflow_job_template_nodes.all()
|
||||
# test cycle validation
|
||||
print(nodes[0].success_nodes.get(id=nodes[1].id).failure_nodes.get(id=nodes[2].id))
|
||||
assert test_view.is_valid_relation(nodes[2], nodes[0]) == {'Error': 'Cycle detected.'}
|
||||
|
||||
def test_always_success_failure_creation(self, wfjt, admin, get):
|
||||
@@ -270,6 +271,103 @@ class TestWorkflowJobTemplate:
|
||||
wfjt2.validate_unique()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestWorkflowJobTemplatePrompts:
|
||||
"""These are tests for prompts that live on the workflow job template model
|
||||
not the node, prompts apply for entire workflow
|
||||
"""
|
||||
@pytest.fixture
|
||||
def wfjt_prompts(self):
|
||||
return WorkflowJobTemplate.objects.create(
|
||||
ask_inventory_on_launch=True,
|
||||
ask_variables_on_launch=True,
|
||||
ask_limit_on_launch=True,
|
||||
ask_scm_branch_on_launch=True
|
||||
)
|
||||
|
||||
@pytest.fixture
|
||||
def prompts_data(self, inventory):
|
||||
return dict(
|
||||
inventory=inventory,
|
||||
extra_vars={'foo': 'bar'},
|
||||
limit='webservers',
|
||||
scm_branch='release-3.3'
|
||||
)
|
||||
|
||||
def test_apply_workflow_job_prompts(self, workflow_job_template, wfjt_prompts, prompts_data, inventory):
|
||||
# null or empty fields used
|
||||
workflow_job = workflow_job_template.create_unified_job()
|
||||
assert workflow_job.limit is None
|
||||
assert workflow_job.inventory is None
|
||||
assert workflow_job.scm_branch is None
|
||||
|
||||
# fields from prompts used
|
||||
workflow_job = workflow_job_template.create_unified_job(**prompts_data)
|
||||
assert json.loads(workflow_job.extra_vars) == {'foo': 'bar'}
|
||||
assert workflow_job.limit == 'webservers'
|
||||
assert workflow_job.inventory == inventory
|
||||
assert workflow_job.scm_branch == 'release-3.3'
|
||||
|
||||
# non-null fields from WFJT used
|
||||
workflow_job_template.inventory = inventory
|
||||
workflow_job_template.limit = 'fooo'
|
||||
workflow_job_template.scm_branch = 'bar'
|
||||
workflow_job = workflow_job_template.create_unified_job()
|
||||
assert workflow_job.limit == 'fooo'
|
||||
assert workflow_job.inventory == inventory
|
||||
assert workflow_job.scm_branch == 'bar'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_process_workflow_job_prompts(self, inventory, workflow_job_template, wfjt_prompts, prompts_data):
|
||||
accepted, rejected, errors = workflow_job_template._accept_or_ignore_job_kwargs(**prompts_data)
|
||||
assert accepted == {}
|
||||
assert rejected == prompts_data
|
||||
assert errors
|
||||
accepted, rejected, errors = wfjt_prompts._accept_or_ignore_job_kwargs(**prompts_data)
|
||||
assert accepted == prompts_data
|
||||
assert rejected == {}
|
||||
assert not errors
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_set_all_the_prompts(self, post, organization, inventory, org_admin):
|
||||
r = post(
|
||||
url = reverse('api:workflow_job_template_list'),
|
||||
data = dict(
|
||||
name='My new workflow',
|
||||
organization=organization.id,
|
||||
inventory=inventory.id,
|
||||
limit='foooo',
|
||||
ask_limit_on_launch=True,
|
||||
scm_branch='bar',
|
||||
ask_scm_branch_on_launch=True
|
||||
),
|
||||
user = org_admin,
|
||||
expect = 201
|
||||
)
|
||||
wfjt = WorkflowJobTemplate.objects.get(id=r.data['id'])
|
||||
assert wfjt.char_prompts == {
|
||||
'limit': 'foooo', 'scm_branch': 'bar'
|
||||
}
|
||||
assert wfjt.ask_scm_branch_on_launch is True
|
||||
assert wfjt.ask_limit_on_launch is True
|
||||
|
||||
launch_url = r.data['related']['launch']
|
||||
with mock.patch('awx.main.queue.CallbackQueueDispatcher.dispatch', lambda self, obj: None):
|
||||
r = post(
|
||||
url = launch_url,
|
||||
data = dict(
|
||||
scm_branch = 'prompt_branch',
|
||||
limit = 'prompt_limit'
|
||||
),
|
||||
user = org_admin,
|
||||
expect=201
|
||||
)
|
||||
assert r.data['limit'] == 'prompt_limit'
|
||||
assert r.data['scm_branch'] == 'prompt_branch'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_workflow_ancestors(organization):
|
||||
# Spawn order of templates grandparent -> parent -> child
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user