mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 03:24:50 -03:30
Compare commits
759 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3277d3afe0 | ||
|
|
45136b6503 | ||
|
|
e9af6af97c | ||
|
|
f86d647571 | ||
|
|
f02357ca16 | ||
|
|
e64b087e9f | ||
|
|
5bb7e69a4d | ||
|
|
a8aed53c10 | ||
|
|
b19539069c | ||
|
|
312cf13777 | ||
|
|
c6033399d0 | ||
|
|
85f118c17d | ||
|
|
0de805ac67 | ||
|
|
c7426fbff4 | ||
|
|
3cbd52a56e | ||
|
|
97a635ef49 | ||
|
|
155ed75f15 | ||
|
|
a664c5eabe | ||
|
|
8b23c6e19a | ||
|
|
a5d9bbb1e6 | ||
|
|
c262df0dfe | ||
|
|
3f113129a9 | ||
|
|
df7e034b96 | ||
|
|
bd8b3a4f74 | ||
|
|
d01088d33e | ||
|
|
0012602b30 | ||
|
|
8ecc1f37f0 | ||
|
|
0ab44e70f9 | ||
|
|
95c9e8e068 | ||
|
|
c49e64e62c | ||
|
|
00c9d756e8 | ||
|
|
16812542f8 | ||
|
|
0bcd1db239 | ||
|
|
9edbcdc7b0 | ||
|
|
9ab58e9757 | ||
|
|
1fae3534a1 | ||
|
|
a038f9fd78 | ||
|
|
ff1e1b2010 | ||
|
|
d6134fb194 | ||
|
|
570ffad52b | ||
|
|
1cf02e1e17 | ||
|
|
2f350cfda7 | ||
|
|
8e2622d117 | ||
|
|
7dd241fcff | ||
|
|
c6a28756f2 | ||
|
|
94eb1aacb8 | ||
|
|
ffb1707e74 | ||
|
|
5e797a5ad5 | ||
|
|
4c92e0af77 | ||
|
|
24b9a6a38d | ||
|
|
857683e548 | ||
|
|
4bf96362cc | ||
|
|
5001d3158d | ||
|
|
ce5bb9197e | ||
|
|
309e89e0f0 | ||
|
|
e27dbfcc0b | ||
|
|
7df448a348 | ||
|
|
e220e9d8d7 | ||
|
|
c8a29bac66 | ||
|
|
11d39bd8cc | ||
|
|
1376b8a149 | ||
|
|
750208b2da | ||
|
|
9d81b00772 | ||
|
|
c7be94c2f2 | ||
|
|
1adf5ee51d | ||
|
|
da998fb196 | ||
|
|
b559860c78 | ||
|
|
a31e2bdac1 | ||
|
|
62e4ebb85d | ||
|
|
aa4f5ccca9 | ||
|
|
fdddba18be | ||
|
|
ad89c5eea7 | ||
|
|
e15bb4de44 | ||
|
|
5f2e1c9705 | ||
|
|
8c5b0cbd57 | ||
|
|
73272e338b | ||
|
|
86ef81cebf | ||
|
|
cd18ec408c | ||
|
|
90c5efa336 | ||
|
|
4134d0b516 | ||
|
|
2123092bdc | ||
|
|
ba7b53b38e | ||
|
|
cac5417916 | ||
|
|
b318f24490 | ||
|
|
c2743d8678 | ||
|
|
60ca843b71 | ||
|
|
766f863655 | ||
|
|
e85fe6a3b7 | ||
|
|
0b190c2d0d | ||
|
|
c7d73c4583 | ||
|
|
7ad2c03480 | ||
|
|
9e44fea7b5 | ||
|
|
baf5bbc53a | ||
|
|
20c24eb275 | ||
|
|
5cf84ddb60 | ||
|
|
85781d0bc1 | ||
|
|
cbed525547 | ||
|
|
31c14b005c | ||
|
|
7574541037 | ||
|
|
e5184e0ed1 | ||
|
|
6282b5bacb | ||
|
|
038fd9271d | ||
|
|
8e26e4edd5 | ||
|
|
027e79b7f5 | ||
|
|
cf89108edf | ||
|
|
e06bf9f87e | ||
|
|
e87055095c | ||
|
|
e672e68a02 | ||
|
|
263c44a09b | ||
|
|
08839e1381 | ||
|
|
faffbc3e65 | ||
|
|
8e296bbf8c | ||
|
|
03d59e1616 | ||
|
|
9efa7b84df | ||
|
|
8e1f7695b1 | ||
|
|
d6adab576f | ||
|
|
a803cedd7c | ||
|
|
d5bdf554f1 | ||
|
|
8f75382b81 | ||
|
|
b93164e1ed | ||
|
|
31bdde00c9 | ||
|
|
ed52e8348b | ||
|
|
008fe42b4d | ||
|
|
d9dbbe6748 | ||
|
|
16ebfe3a63 | ||
|
|
08df2cad68 | ||
|
|
ca039f5338 | ||
|
|
4b83bda306 | ||
|
|
7fc4e8d20a | ||
|
|
4c697ae477 | ||
|
|
844b8a803f | ||
|
|
7fe32ab607 | ||
|
|
cc27c95187 | ||
|
|
9745bfbdb4 | ||
|
|
d6708b2b59 | ||
|
|
c202574ae3 | ||
|
|
7efacb69aa | ||
|
|
a076e84a33 | ||
|
|
bc6648b518 | ||
|
|
ff67d65065 | ||
|
|
aab8495998 | ||
|
|
0f1a92bd51 | ||
|
|
f10dc16014 | ||
|
|
bbc4ec48b9 | ||
|
|
fff8664219 | ||
|
|
bc8f5ad015 | ||
|
|
9fade47bbf | ||
|
|
5ad922a861 | ||
|
|
f34bd632d8 | ||
|
|
d239d55d2a | ||
|
|
d9ad906167 | ||
|
|
d40ab38745 | ||
|
|
8acd4376d9 | ||
|
|
a7a194296c | ||
|
|
166635ac79 | ||
|
|
f10296b1b7 | ||
|
|
e0e9c8321b | ||
|
|
52b145dbf6 | ||
|
|
0594bdf650 | ||
|
|
ba9758ccc7 | ||
|
|
02b13fd4ae | ||
|
|
06c62c4861 | ||
|
|
132555485c | ||
|
|
f1a9e68985 | ||
|
|
c09039e963 | ||
|
|
85c99cc38a | ||
|
|
576ff1007e | ||
|
|
922e779a86 | ||
|
|
8bda048e6d | ||
|
|
093bf6877b | ||
|
|
d59d8562db | ||
|
|
c566c332f9 | ||
|
|
cb4a3a799e | ||
|
|
5a5b46aea0 | ||
|
|
85909c4264 | ||
|
|
9d2c877143 | ||
|
|
8e94c0686a | ||
|
|
52447f59c1 | ||
|
|
04eed02428 | ||
|
|
b45b9333e1 | ||
|
|
62659aefc2 | ||
|
|
a52ccd1086 | ||
|
|
bd9a196ef9 | ||
|
|
64b04e6347 | ||
|
|
15e70d2173 | ||
|
|
b981f3eed6 | ||
|
|
2c1c2f452d | ||
|
|
cf1c9a0559 | ||
|
|
ed3f49a69d | ||
|
|
74b398f920 | ||
|
|
ae0c9ead40 | ||
|
|
d6a0f929a8 | ||
|
|
f5358f748e | ||
|
|
c9e889ca82 | ||
|
|
f502fbfad6 | ||
|
|
b8fe3f648e | ||
|
|
8d3ecf708b | ||
|
|
9289ade1ec | ||
|
|
958c8a4177 | ||
|
|
59413e0a8f | ||
|
|
ad1e7c46c3 | ||
|
|
8fabb1f10d | ||
|
|
895c71f62c | ||
|
|
32a57e9a97 | ||
|
|
584777e21e | ||
|
|
61a756c59d | ||
|
|
b547a8c3ca | ||
|
|
007f33c186 | ||
|
|
aab1cd68b0 | ||
|
|
92cc9a9213 | ||
|
|
b9c675e3a2 | ||
|
|
bd5003ca98 | ||
|
|
d3b0edf75a | ||
|
|
9421781cc7 | ||
|
|
a9059edc65 | ||
|
|
7850e3a835 | ||
|
|
34d02011db | ||
|
|
353692a0ba | ||
|
|
90451e551d | ||
|
|
2457926f0a | ||
|
|
cf27ac295a | ||
|
|
7e40673dd0 | ||
|
|
daa6f35d02 | ||
|
|
cdcf2fa4c2 | ||
|
|
275765b8fc | ||
|
|
2786395808 | ||
|
|
731982c736 | ||
|
|
d2214acd6d | ||
|
|
9d593f0715 | ||
|
|
9e778b24c7 | ||
|
|
bdd28bcb3b | ||
|
|
19a6c70858 | ||
|
|
393474f33d | ||
|
|
b41e6394c5 | ||
|
|
f36f10a702 | ||
|
|
fccd6a2286 | ||
|
|
ea2312259f | ||
|
|
0e2b7767f5 | ||
|
|
82505cd43a | ||
|
|
5b17ce5729 | ||
|
|
82b313c767 | ||
|
|
fec67a3545 | ||
|
|
202af079eb | ||
|
|
f78d7637a4 | ||
|
|
71bd257191 | ||
|
|
82064eb4dc | ||
|
|
bbd625f3aa | ||
|
|
3725ccb43b | ||
|
|
8b22c86b10 | ||
|
|
e6a5d18ebe | ||
|
|
eca191f7f5 | ||
|
|
35fe127891 | ||
|
|
db1ad2de95 | ||
|
|
ac12a9cfe1 | ||
|
|
dacda644ac | ||
|
|
cfa407e001 | ||
|
|
329630ce2a | ||
|
|
1122d28a1b | ||
|
|
a9b299cd98 | ||
|
|
6c1488ed00 | ||
|
|
1f62d223a2 | ||
|
|
da23c4e949 | ||
|
|
6d00d43273 | ||
|
|
77b68e0eb7 | ||
|
|
945d100302 | ||
|
|
c0fd70f189 | ||
|
|
ba4e79fd3a | ||
|
|
db0bd471c3 | ||
|
|
616fe285fa | ||
|
|
b4b2cf76f6 | ||
|
|
4aeda635ff | ||
|
|
7e8c00ee24 | ||
|
|
27c4e35ee4 | ||
|
|
80a17987ff | ||
|
|
10a6a29a07 | ||
|
|
b80eafe4a1 | ||
|
|
6c443a0a6a | ||
|
|
55378c635e | ||
|
|
a4047e414f | ||
|
|
d549877ebd | ||
|
|
28a119ca96 | ||
|
|
758529d7dd | ||
|
|
075d1a2521 | ||
|
|
69924c9544 | ||
|
|
b858001c8f | ||
|
|
82be87566f | ||
|
|
52b8b7676a | ||
|
|
204c05aa3b | ||
|
|
ac34b24868 | ||
|
|
ffe89820e3 | ||
|
|
062c4908c9 | ||
|
|
b6b70e55fb | ||
|
|
6aa6471b7c | ||
|
|
e14d4ddec6 | ||
|
|
84dcda0a61 | ||
|
|
df24f5d28f | ||
|
|
fea7f914d2 | ||
|
|
d4c8167b1b | ||
|
|
a4873d97d8 | ||
|
|
efe4ea6575 | ||
|
|
b415c31b4f | ||
|
|
e91462d085 | ||
|
|
e85ff83be6 | ||
|
|
d500c1bb40 | ||
|
|
885841caea | ||
|
|
f7396cf81a | ||
|
|
286da3a7eb | ||
|
|
40b03eb6ef | ||
|
|
c76c531b7a | ||
|
|
75d3359b6f | ||
|
|
4ad5054222 | ||
|
|
aa34984d7c | ||
|
|
08594682a4 | ||
|
|
d73abda5d1 | ||
|
|
3bc91f123e | ||
|
|
41ba5c0968 | ||
|
|
e8e3a601b2 | ||
|
|
b96c03e456 | ||
|
|
5e9448a854 | ||
|
|
6b17e86f30 | ||
|
|
00337990db | ||
|
|
1a33ae61a7 | ||
|
|
5f7bfaa20a | ||
|
|
178a2c7c49 | ||
|
|
58e5f02129 | ||
|
|
dd6c97ed87 | ||
|
|
7aa424b210 | ||
|
|
e0a363beb8 | ||
|
|
48eb502161 | ||
|
|
151de89c26 | ||
|
|
f5c151d5c4 | ||
|
|
17b34b1e36 | ||
|
|
ee1d118752 | ||
|
|
245931f603 | ||
|
|
095aa77857 | ||
|
|
bb1397a3d4 | ||
|
|
5848f0360a | ||
|
|
83fc2187cc | ||
|
|
4dba9916dc | ||
|
|
8836ed44ce | ||
|
|
992c414737 | ||
|
|
66a8186995 | ||
|
|
fa15696ffe | ||
|
|
82a0dc0024 | ||
|
|
d4b20b7340 | ||
|
|
c0ad5a7768 | ||
|
|
d9ac291115 | ||
|
|
6b86cf6e86 | ||
|
|
771ef275d4 | ||
|
|
2310413dc0 | ||
|
|
edb9d6b16c | ||
|
|
7973a18103 | ||
|
|
747a2283d6 | ||
|
|
9d269d59d6 | ||
|
|
b0c530402f | ||
|
|
50a54c9214 | ||
|
|
8f97dbf781 | ||
|
|
a7a99ed141 | ||
|
|
d6116490c6 | ||
|
|
ff8e896b0f | ||
|
|
fc70d8b321 | ||
|
|
a61306580a | ||
|
|
afe38b8e68 | ||
|
|
505dcf9dd2 | ||
|
|
1d2123a4f9 | ||
|
|
4adf9bab67 | ||
|
|
adac87adf2 | ||
|
|
7dd8e35e8c | ||
|
|
554a63d8fc | ||
|
|
da149d931c | ||
|
|
3182197287 | ||
|
|
9ed4e1682d | ||
|
|
5aa6a94710 | ||
|
|
96689f45c8 | ||
|
|
ce6a276e1f | ||
|
|
8eb1484129 | ||
|
|
1ddf9fd1ed | ||
|
|
17a8e08d93 | ||
|
|
f835c8650b | ||
|
|
aa5a4d42c7 | ||
|
|
57fd6b7280 | ||
|
|
7eb7aad491 | ||
|
|
e2b8adcd09 | ||
|
|
13450fdbf9 | ||
|
|
6be2d84adb | ||
|
|
d2a5af44de | ||
|
|
75bb7aae14 | ||
|
|
98619c5e23 | ||
|
|
35afa37417 | ||
|
|
2f0f692f4a | ||
|
|
5271c993ac | ||
|
|
38112bae22 | ||
|
|
30a6efdb93 | ||
|
|
bffc1bfdd4 | ||
|
|
a7bf31d423 | ||
|
|
1ae1011ccb | ||
|
|
83183cd7ce | ||
|
|
934d7d62ef | ||
|
|
5dce6258e6 | ||
|
|
87f6065a05 | ||
|
|
4ca0d8c72a | ||
|
|
ba8bd25da2 | ||
|
|
df0bd0797c | ||
|
|
3aa7ee8d17 | ||
|
|
aeaab41120 | ||
|
|
1eb61ba5ce | ||
|
|
91d0c47120 | ||
|
|
b96b69360f | ||
|
|
b034295c99 | ||
|
|
c6e47a0a16 | ||
|
|
ca782a495d | ||
|
|
71c625bd83 | ||
|
|
5c2bc09f9d | ||
|
|
58c4ae6a00 | ||
|
|
37509af868 | ||
|
|
41f649b5a2 | ||
|
|
5ef995cd7d | ||
|
|
a32981242b | ||
|
|
c821996051 | ||
|
|
a37a18c0bf | ||
|
|
b11374157d | ||
|
|
2d6743635e | ||
|
|
590341ed7d | ||
|
|
ae980b9a82 | ||
|
|
94b557d8aa | ||
|
|
5dc9ca222f | ||
|
|
79166209ee | ||
|
|
45a8992254 | ||
|
|
def043c383 | ||
|
|
7b6dc13078 | ||
|
|
ee2709a898 | ||
|
|
d9d56b4b50 | ||
|
|
7580f9c2b9 | ||
|
|
d4f4983a89 | ||
|
|
24cedcc560 | ||
|
|
64014faf02 | ||
|
|
9b228d7d2d | ||
|
|
1349449e1e | ||
|
|
185e9a09e0 | ||
|
|
623e0f7cc9 | ||
|
|
8569bf71af | ||
|
|
6471481d02 | ||
|
|
139703aafb | ||
|
|
65aeb2b68a | ||
|
|
0b26297177 | ||
|
|
3e8de196ac | ||
|
|
b14323c985 | ||
|
|
2e04969f17 | ||
|
|
2edca4f357 | ||
|
|
d192297987 | ||
|
|
0bb7e06761 | ||
|
|
4213ca1424 | ||
|
|
3db3430129 | ||
|
|
f7592f6ae7 | ||
|
|
ac82751dfa | ||
|
|
f46db65bad | ||
|
|
d0ffc3f626 | ||
|
|
e9ec80d86b | ||
|
|
56723c3203 | ||
|
|
fdbafe42ab | ||
|
|
7c0554bf7b | ||
|
|
49df11d478 | ||
|
|
5ef7003395 | ||
|
|
640e528fdc | ||
|
|
06e09550af | ||
|
|
74c7c7b532 | ||
|
|
62bc1a8662 | ||
|
|
9469bbc06f | ||
|
|
7cdde96c3c | ||
|
|
bc2a63c415 | ||
|
|
9c6c9c3708 | ||
|
|
6a2e3d2915 | ||
|
|
85977be23c | ||
|
|
3855393cd3 | ||
|
|
bd6e5c2529 | ||
|
|
8a5914affd | ||
|
|
979adfd16c | ||
|
|
5f2381e9ad | ||
|
|
99027e4b30 | ||
|
|
150c0a9fdc | ||
|
|
2c825b792f | ||
|
|
2f9b0733bb | ||
|
|
962668389a | ||
|
|
0699e44b53 | ||
|
|
788a2e5fc8 | ||
|
|
16e6b3f148 | ||
|
|
6140308675 | ||
|
|
518a25430d | ||
|
|
b6ffde75ef | ||
|
|
8d44ab55f1 | ||
|
|
6e9804b713 | ||
|
|
94fa745859 | ||
|
|
eacc7b8eb0 | ||
|
|
73c3b8849b | ||
|
|
bb474b0797 | ||
|
|
c94ebba0b3 | ||
|
|
af90a78df5 | ||
|
|
de68de7f9a | ||
|
|
04ba7aaf89 | ||
|
|
454f76c066 | ||
|
|
9b281bbc8a | ||
|
|
870b76dc59 | ||
|
|
f2676064fd | ||
|
|
4b62f4845a | ||
|
|
bc6edf7af3 | ||
|
|
3dd69a06e7 | ||
|
|
cb8c9567b0 | ||
|
|
d30d51d72c | ||
|
|
693e588a25 | ||
|
|
0f42782feb | ||
|
|
90cac2ec35 | ||
|
|
8343552dfc | ||
|
|
c229e586da | ||
|
|
778b306208 | ||
|
|
415592219c | ||
|
|
3a7756393e | ||
|
|
1a7148dc80 | ||
|
|
d42ffd7353 | ||
|
|
9f8d975a19 | ||
|
|
955bb4a44c | ||
|
|
71511b66ac | ||
|
|
e97fc54deb | ||
|
|
ee27313b42 | ||
|
|
439727f1bd | ||
|
|
76325eefd3 | ||
|
|
4b8a06801c | ||
|
|
38b506bb94 | ||
|
|
61f6e3c4d2 | ||
|
|
640e5391f3 | ||
|
|
1316ace475 | ||
|
|
3282caf629 | ||
|
|
b3b53a8ce4 | ||
|
|
8dd4379bf2 | ||
|
|
b79c686336 | ||
|
|
d08c601690 | ||
|
|
86f8d648cc | ||
|
|
34bdb6d1c3 | ||
|
|
9548c8ae19 | ||
|
|
e147869d75 | ||
|
|
362339d89c | ||
|
|
ebbcefd7df | ||
|
|
6a9940c027 | ||
|
|
8c755dd316 | ||
|
|
fcfd59ebe2 | ||
|
|
ce1d9793ce | ||
|
|
f35ad41e17 | ||
|
|
d52aa11422 | ||
|
|
c628a54c79 | ||
|
|
5d840af223 | ||
|
|
1f149bb086 | ||
|
|
508535be66 | ||
|
|
cb69cac62d | ||
|
|
3ea4a32940 | ||
|
|
d6f93737c4 | ||
|
|
eb0c4fd4d4 | ||
|
|
36571a1275 | ||
|
|
b2b475d1a6 | ||
|
|
01177f3632 | ||
|
|
ac530e1328 | ||
|
|
9605d8049d | ||
|
|
d2e335c7c5 | ||
|
|
678fba1ffb | ||
|
|
34e1b8be1d | ||
|
|
86029934ad | ||
|
|
ec3edb07e8 | ||
|
|
fcc61a5752 | ||
|
|
860715d088 | ||
|
|
e2be392f31 | ||
|
|
f49d566f17 | ||
|
|
83e413b0bf | ||
|
|
e68349b6b5 | ||
|
|
14cc203945 | ||
|
|
ca5de6378a | ||
|
|
1ebe91cbf7 | ||
|
|
154cda7501 | ||
|
|
badba581fd | ||
|
|
a6a50f0eb1 | ||
|
|
b00fc29cdc | ||
|
|
13f628f73d | ||
|
|
e1bdbeaa5c | ||
|
|
b3c264bf21 | ||
|
|
53992d41d5 | ||
|
|
686d4fe26f | ||
|
|
774a4e32cc | ||
|
|
22441d280e | ||
|
|
fe850c247f | ||
|
|
c651029cdb | ||
|
|
54d50d71ab | ||
|
|
ef4f1df9bb | ||
|
|
b5fa1606bd | ||
|
|
3139bc9248 | ||
|
|
a6c0793695 | ||
|
|
5a24e223b7 | ||
|
|
e3c1189f56 | ||
|
|
fdf9dd733b | ||
|
|
9697e1befb | ||
|
|
84a8559ea0 | ||
|
|
8ac8fb9016 | ||
|
|
01bb32ebb0 | ||
|
|
711c240baf | ||
|
|
291528d823 | ||
|
|
1406ea3026 | ||
|
|
e8581f6892 | ||
|
|
fbf182de28 | ||
|
|
5fab9e418b | ||
|
|
d39b931377 | ||
|
|
9028afab07 | ||
|
|
d3b413c125 | ||
|
|
ab1db04164 | ||
|
|
3b89e894db | ||
|
|
bdbbb2a4a2 | ||
|
|
9e8d0758c8 | ||
|
|
9f0657e19a | ||
|
|
6bc09028ca | ||
|
|
e1c7cd7e9f | ||
|
|
806648af89 | ||
|
|
3e3940efd5 | ||
|
|
662033db44 | ||
|
|
eba69142f1 | ||
|
|
8a29276a7d | ||
|
|
611f163289 | ||
|
|
113622c05e | ||
|
|
608567795d | ||
|
|
29fd399b06 | ||
|
|
04f458c007 | ||
|
|
38a7d62745 | ||
|
|
53925f5e98 | ||
|
|
2474a3a2ea | ||
|
|
900fcbf87e | ||
|
|
846e67ee6a | ||
|
|
98daee4823 | ||
|
|
5ed97e0f65 | ||
|
|
e854b179e4 | ||
|
|
26e320582a | ||
|
|
ee864b2df3 | ||
|
|
e309ad25e4 | ||
|
|
55244834a3 | ||
|
|
4e1fbb3e91 | ||
|
|
fe43bab174 | ||
|
|
ed692018cd | ||
|
|
311860e027 | ||
|
|
b5225bd80d | ||
|
|
60fc952716 | ||
|
|
9d93cf8021 | ||
|
|
fe0db4e329 | ||
|
|
3c0a0e1f4a | ||
|
|
70057bc0f2 | ||
|
|
b2e8e3cc3d | ||
|
|
804ec0dde9 | ||
|
|
3e6131c509 | ||
|
|
e7bb5ac3e4 | ||
|
|
f2ccce3478 | ||
|
|
036567817e | ||
|
|
ec1e93cc69 | ||
|
|
04ab736f09 | ||
|
|
ffc6e2218e | ||
|
|
253e0765bd | ||
|
|
2d78534223 | ||
|
|
34645523fd | ||
|
|
90c7514303 | ||
|
|
3c22f99234 | ||
|
|
d8fbf1e21a | ||
|
|
168e03ea0e | ||
|
|
3eecda4edc | ||
|
|
c0d9600b66 | ||
|
|
087b68aa65 | ||
|
|
83ee4fb289 | ||
|
|
51a724451c | ||
|
|
6309c0a426 | ||
|
|
14ef06854d | ||
|
|
ad2e58cd43 | ||
|
|
2ea280bbaf | ||
|
|
788c5d3741 | ||
|
|
0e5abb5fa3 | ||
|
|
5dbcafc392 | ||
|
|
e53c979344 | ||
|
|
2527a78874 | ||
|
|
871f2cf9c5 | ||
|
|
f3dc4abe37 | ||
|
|
7646185e2c | ||
|
|
a9a1c6eb6d | ||
|
|
4b1706401f | ||
|
|
8192b79b1f | ||
|
|
8868fa6416 | ||
|
|
71215d3d03 | ||
|
|
5193209ca7 | ||
|
|
d73dd02e5b | ||
|
|
e23c1477da | ||
|
|
19d6941034 | ||
|
|
d4b2cacb3e | ||
|
|
2f7476c804 | ||
|
|
39ee60a913 | ||
|
|
da43b9b84c | ||
|
|
eaf3a28d57 | ||
|
|
14f8ef4f44 | ||
|
|
ee47e98c50 | ||
|
|
edb7ddb9ae | ||
|
|
f69f43e3ba | ||
|
|
d9a7859a05 | ||
|
|
ff32a5286e | ||
|
|
629e2e89b9 | ||
|
|
ea015190de | ||
|
|
6762702868 | ||
|
|
3e97608914 | ||
|
|
a95394b135 | ||
|
|
276b577103 | ||
|
|
b1666f2692 | ||
|
|
1f8f4e184b | ||
|
|
844f3fde72 | ||
|
|
a3a5db1c44 | ||
|
|
28630cb7fa | ||
|
|
820605b0ca | ||
|
|
bc79093102 | ||
|
|
eb2fc80114 | ||
|
|
eb2de51f86 | ||
|
|
7aa94a9bb5 | ||
|
|
be6f5e18ae | ||
|
|
9777b79818 | ||
|
|
25aa9bc43e | ||
|
|
a79de2b4ed | ||
|
|
7480baf256 | ||
|
|
5babab7af4 | ||
|
|
4e73f4b778 | ||
|
|
4f05955724 | ||
|
|
ebc369bfef | ||
|
|
10d53637ad | ||
|
|
23a7151278 | ||
|
|
0254cf3567 | ||
|
|
f91a02a9e4 | ||
|
|
93b794eaa7 | ||
|
|
927c99a2f4 | ||
|
|
b4759de30d | ||
|
|
11a71f5ffa | ||
|
|
064f871fff | ||
|
|
ffbce2611a | ||
|
|
8b9ddb5922 | ||
|
|
c65b77a20a | ||
|
|
b7e8044d69 | ||
|
|
8b1ca12d8f | ||
|
|
4f546be87a | ||
|
|
e6475f21f6 | ||
|
|
218348412b | ||
|
|
db64717551 | ||
|
|
9edc686ab5 | ||
|
|
c819a78a4b | ||
|
|
11146a071f | ||
|
|
4d31d83e1e | ||
|
|
3a9a884bbc | ||
|
|
8a31be6ffe | ||
|
|
6fd86fed65 | ||
|
|
9d48ba4243 | ||
|
|
80bdb1a67a | ||
|
|
24c16b1c58 | ||
|
|
0fb7c859f4 | ||
|
|
1e5bcca0b9 | ||
|
|
daba25f107 | ||
|
|
9d2441789e | ||
|
|
b4f6b380fd | ||
|
|
444f024bb0 |
8
.gitignore
vendored
8
.gitignore
vendored
@@ -133,4 +133,12 @@ awx/lib/site-packages
|
||||
venv/*
|
||||
use_dev_supervisor.txt
|
||||
|
||||
|
||||
# Ansible module tests
|
||||
awx_collection_test_venv/
|
||||
awx_collection/*.tar.gz
|
||||
awx_collection/galaxy.yml
|
||||
|
||||
.idea/*
|
||||
*.unison.tmp
|
||||
*.#
|
||||
|
||||
@@ -156,8 +156,8 @@ If you start a second terminal session, you can take a look at the running conta
|
||||
|
||||
$ docker ps
|
||||
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
|
||||
aa4a75d6d77b gcr.io/ansible-tower-engineering/awx_devel:devel "/tini -- /bin/sh ..." 23 seconds ago Up 15 seconds 0.0.0.0:5555->5555/tcp, 0.0.0.0:6899-6999->6899-6999/tcp, 0.0.0.0:8013->8013/tcp, 0.0.0.0:8043->8043/tcp, 22/tcp, 0.0.0.0:8080->8080/tcp tools_awx_1
|
||||
e4c0afeb548c postgres:9.6 "docker-entrypoint..." 26 seconds ago Up 23 seconds 5432/tcp tools_postgres_1
|
||||
aa4a75d6d77b gcr.io/ansible-tower-engineering/awx_devel:devel "/tini -- /bin/sh ..." 23 seconds ago Up 15 seconds 0.0.0.0:5555->5555/tcp, 0.0.0.0:7899-7999->7899-7999/tcp, 0.0.0.0:8013->8013/tcp, 0.0.0.0:8043->8043/tcp, 22/tcp, 0.0.0.0:8080->8080/tcp tools_awx_1
|
||||
e4c0afeb548c postgres:10 "docker-entrypoint..." 26 seconds ago Up 23 seconds 5432/tcp tools_postgres_1
|
||||
0089699d5afd tools_logstash "/docker-entrypoin..." 26 seconds ago Up 25 seconds tools_logstash_1
|
||||
4d4ff0ced266 memcached:alpine "docker-entrypoint..." 26 seconds ago Up 25 seconds 0.0.0.0:11211->11211/tcp tools_memcached_1
|
||||
92842acd64cd rabbitmq:3-management "docker-entrypoint..." 26 seconds ago Up 24 seconds 4369/tcp, 5671-5672/tcp, 15671/tcp, 25672/tcp, 0.0.0.0:15672->15672/tcp tools_rabbitmq_1
|
||||
|
||||
@@ -193,7 +193,7 @@ $ eval $(minishift docker-env)
|
||||
|
||||
By default, AWX will deploy a PostgreSQL pod inside of your cluster. You will need to create a [Persistent Volume Claim](https://docs.openshift.org/latest/dev_guide/persistent_volumes.html) which is named `postgresql` by default, and can be overridden by setting the `openshift_pg_pvc_name` variable. For testing and demo purposes, you may set `openshift_pg_emptydir=yes`.
|
||||
|
||||
If you wish to use an external database, in the inventory file, set the value of `pg_hostname`, and update `pg_username`, `pg_password`, `pg_database`, and `pg_port` with the connection information. When setting `pg_hostname` the installer will assume you have configured the database in that location and will not launch the postgresql pod.
|
||||
If you wish to use an external database, in the inventory file, set the value of `pg_hostname`, and update `pg_username`, `pg_password`, `pg_admin_password`, `pg_database`, and `pg_port` with the connection information. When setting `pg_hostname` the installer will assume you have configured the database in that location and will not launch the postgresql pod.
|
||||
|
||||
### Start the build
|
||||
|
||||
@@ -503,7 +503,7 @@ If you wish to tag and push built images to a Docker registry, set the following
|
||||
|
||||
AWX requires access to a PostgreSQL database, and by default, one will be created and deployed in a container, and data will be persisted to a host volume. In this scenario, you must set the value of `postgres_data_dir` to a path that can be mounted to the container. When the container is stopped, the database files will still exist in the specified path.
|
||||
|
||||
If you wish to use an external database, in the inventory file, set the value of `pg_hostname`, and update `pg_username`, `pg_password`, `pg_database`, and `pg_port` with the connection information.
|
||||
If you wish to use an external database, in the inventory file, set the value of `pg_hostname`, and update `pg_username`, `pg_password`, `pg_admin_password`, `pg_database`, and `pg_port` with the connection information.
|
||||
|
||||
### Start the build
|
||||
|
||||
|
||||
50
Makefile
50
Makefile
@@ -18,6 +18,7 @@ COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
COMPOSE_HOST ?= $(shell hostname)
|
||||
|
||||
VENV_BASE ?= /venv
|
||||
COLLECTION_VENV ?= /awx_devel/awx_collection_test_venv
|
||||
SCL_PREFIX ?=
|
||||
CELERY_SCHEDULE_FILE ?= /var/lib/awx/beat.db
|
||||
|
||||
@@ -99,20 +100,22 @@ clean-languages:
|
||||
find . -type f -regex ".*\.mo$$" -delete
|
||||
|
||||
# Remove temporary build files, compiled Python files.
|
||||
clean: clean-ui clean-dist
|
||||
clean: clean-ui clean-api clean-dist
|
||||
rm -rf awx/public
|
||||
rm -rf awx/lib/site-packages
|
||||
rm -rf awx/job_status
|
||||
rm -rf awx/job_output
|
||||
rm -rf reports
|
||||
rm -f awx/awx_test.sqlite3*
|
||||
rm -rf requirements/vendor
|
||||
rm -rf tmp
|
||||
rm -rf $(I18N_FLAG_FILE)
|
||||
mkdir tmp
|
||||
|
||||
clean-api:
|
||||
rm -rf build $(NAME)-$(VERSION) *.egg-info
|
||||
find . -type f -regex ".*\.py[co]$$" -delete
|
||||
find . -type d -name "__pycache__" -delete
|
||||
rm -f awx/awx_test.sqlite3*
|
||||
rm -rf requirements/vendor
|
||||
|
||||
# convenience target to assert environment variables are defined
|
||||
guard-%:
|
||||
@@ -186,7 +189,7 @@ requirements_awx: virtualenv_awx
|
||||
cat requirements/requirements.txt requirements/requirements_git.txt | $(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --no-binary $(SRC_ONLY_PKGS) --ignore-installed -r /dev/stdin ; \
|
||||
fi
|
||||
echo "include-system-site-packages = true" >> $(VENV_BASE)/awx/lib/python$(PYTHON_VERSION)/pyvenv.cfg
|
||||
#$(VENV_BASE)/awx/bin/pip uninstall --yes -r requirements/requirements_tower_uninstall.txt
|
||||
$(VENV_BASE)/awx/bin/pip uninstall --yes -r requirements/requirements_tower_uninstall.txt
|
||||
|
||||
requirements_awx_dev:
|
||||
$(VENV_BASE)/awx/bin/pip install -r requirements/requirements_dev.txt
|
||||
@@ -375,6 +378,31 @@ test:
|
||||
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py2,py3
|
||||
awx-manage check_migrations --dry-run --check -n 'vNNN_missing_migration_file'
|
||||
|
||||
prepare_collection_venv:
|
||||
rm -rf $(COLLECTION_VENV)
|
||||
mkdir $(COLLECTION_VENV)
|
||||
ln -s /usr/lib/python2.7/site-packages/ansible $(COLLECTION_VENV)/ansible
|
||||
$(VENV_BASE)/awx/bin/pip install --target=$(COLLECTION_VENV) git+https://github.com/ansible/tower-cli.git
|
||||
|
||||
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
||||
COLLECTION_PACKAGE ?= awx
|
||||
COLLECTION_NAMESPACE ?= awx
|
||||
|
||||
test_collection:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
PYTHONPATH=$(COLLECTION_VENV):/awx_devel/awx_collection:$PYTHONPATH py.test $(COLLECTION_TEST_DIRS)
|
||||
|
||||
flake8_collection:
|
||||
flake8 awx_collection/ # Different settings, in main exclude list
|
||||
|
||||
test_collection_all: prepare_collection_venv test_collection flake8_collection
|
||||
|
||||
build_collection:
|
||||
ansible-playbook -i localhost, awx_collection/template_galaxy.yml -e collection_package=$(COLLECTION_PACKAGE) -e collection_namespace=$(COLLECTION_NAMESPACE) -e collection_version=$(VERSION)
|
||||
ansible-galaxy collection build awx_collection --output-path=awx_collection
|
||||
|
||||
test_unit:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
@@ -516,6 +544,12 @@ jshint: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) run --prefix awx/ui jshint
|
||||
$(NPM_BIN) run --prefix awx/ui lint
|
||||
|
||||
ui-zuul-lint-and-test: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) run --prefix awx/ui jshint
|
||||
$(NPM_BIN) run --prefix awx/ui lint
|
||||
$(NPM_BIN) --prefix awx/ui run test:ci
|
||||
$(NPM_BIN) --prefix awx/ui run unit
|
||||
|
||||
# END UI TASKS
|
||||
# --------------------------------------
|
||||
|
||||
@@ -531,6 +565,12 @@ ui-next-test:
|
||||
$(NPM_BIN) --prefix awx/ui_next install
|
||||
$(NPM_BIN) run --prefix awx/ui_next test
|
||||
|
||||
ui-next-zuul-lint-and-test:
|
||||
$(NPM_BIN) --prefix awx/ui_next install
|
||||
$(NPM_BIN) run --prefix awx/ui_next lint
|
||||
$(NPM_BIN) run --prefix awx/ui_next prettier-check
|
||||
$(NPM_BIN) run --prefix awx/ui_next test
|
||||
|
||||
# END UI NEXT TASKS
|
||||
# --------------------------------------
|
||||
|
||||
@@ -648,7 +688,7 @@ clean-elk:
|
||||
docker rm tools_kibana_1
|
||||
|
||||
psql-container:
|
||||
docker run -it --net tools_default --rm postgres:9.6 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
docker run -it --net tools_default --rm postgres:10 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
|
||||
VERSION:
|
||||
@echo "awx: $(VERSION)"
|
||||
|
||||
@@ -82,6 +82,16 @@ def find_commands(management_dir):
|
||||
return commands
|
||||
|
||||
|
||||
def oauth2_getattribute(self, attr):
|
||||
# Custom method to override
|
||||
# oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__
|
||||
from django.conf import settings
|
||||
val = settings.OAUTH2_PROVIDER.get(attr)
|
||||
if val is None:
|
||||
val = object.__getattribute__(self, attr)
|
||||
return val
|
||||
|
||||
|
||||
def prepare_env():
|
||||
# Update the default settings environment variable based on current mode.
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE)
|
||||
@@ -93,6 +103,12 @@ def prepare_env():
|
||||
# Monkeypatch Django find_commands to also work with .pyc files.
|
||||
import django.core.management
|
||||
django.core.management.find_commands = find_commands
|
||||
|
||||
# Monkeypatch Oauth2 toolkit settings class to check for settings
|
||||
# in django.conf settings each time, not just once during import
|
||||
import oauth2_provider.settings
|
||||
oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__ = oauth2_getattribute
|
||||
|
||||
# Use the AWX_TEST_DATABASE_* environment variables to specify the test
|
||||
# database settings to use when management command is run as an external
|
||||
# program via unit tests.
|
||||
|
||||
@@ -38,12 +38,15 @@ register(
|
||||
'OAUTH2_PROVIDER',
|
||||
field_class=OAuth2ProviderField,
|
||||
default={'ACCESS_TOKEN_EXPIRE_SECONDS': oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS,
|
||||
'AUTHORIZATION_CODE_EXPIRE_SECONDS': 600},
|
||||
'AUTHORIZATION_CODE_EXPIRE_SECONDS': oauth2_settings.AUTHORIZATION_CODE_EXPIRE_SECONDS,
|
||||
'REFRESH_TOKEN_EXPIRE_SECONDS': oauth2_settings.REFRESH_TOKEN_EXPIRE_SECONDS},
|
||||
label=_('OAuth 2 Timeout Settings'),
|
||||
help_text=_('Dictionary for customizing OAuth 2 timeouts, available items are '
|
||||
'`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number '
|
||||
'of seconds, and `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
|
||||
'authorization codes in the number of seconds.'),
|
||||
'of seconds, `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
|
||||
'authorization codes in the number of seconds, and `REFRESH_TOKEN_EXPIRE_SECONDS`, '
|
||||
'the duration of refresh tokens, after expired access tokens, '
|
||||
'in the number of seconds.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
@@ -80,7 +80,7 @@ class OAuth2ProviderField(fields.DictField):
|
||||
default_error_messages = {
|
||||
'invalid_key_names': _('Invalid key names: {invalid_key_names}'),
|
||||
}
|
||||
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS'}
|
||||
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
|
||||
child = fields.IntegerField(min_value=1)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
|
||||
@@ -126,7 +126,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
'''
|
||||
|
||||
RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by',
|
||||
'search', 'type', 'host_filter')
|
||||
'search', 'type', 'host_filter', 'count_disabled', 'no_truncate')
|
||||
|
||||
SUPPORTED_LOOKUPS = ('exact', 'iexact', 'contains', 'icontains',
|
||||
'startswith', 'istartswith', 'endswith', 'iendswith',
|
||||
|
||||
@@ -92,7 +92,7 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||
current_user = getattr(request, 'user', None)
|
||||
if request.user.is_authenticated:
|
||||
logger.info(smart_text(u"User {} logged in.".format(self.request.user.username)))
|
||||
logger.info(smart_text(u"User {} logged in from {}".format(self.request.user.username,request.META.get('REMOTE_ADDR', None))))
|
||||
ret.set_cookie('userLoggedIn', 'true')
|
||||
current_user = UserSerializer(self.request.user)
|
||||
current_user = smart_text(JSONRenderer().render(current_user.data))
|
||||
@@ -205,6 +205,9 @@ class APIView(views.APIView):
|
||||
response['X-API-Query-Count'] = len(q_times)
|
||||
response['X-API-Query-Time'] = '%0.3fs' % sum(q_times)
|
||||
|
||||
if getattr(self, 'deprecated', False):
|
||||
response['Warning'] = '299 awx "This resource has been deprecated and will be removed in a future release."' # noqa
|
||||
|
||||
return response
|
||||
|
||||
def get_authenticate_header(self, request):
|
||||
@@ -489,9 +492,12 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
qs = self.request.user.get_queryset(self.model).distinct()
|
||||
sublist_qs = getattrd(parent, self.relationship).distinct()
|
||||
sublist_qs = self.get_sublist_queryset(parent)
|
||||
return qs & sublist_qs
|
||||
|
||||
def get_sublist_queryset(self, parent):
|
||||
return getattrd(parent, self.relationship).distinct()
|
||||
|
||||
|
||||
class DestroyAPIView(generics.DestroyAPIView):
|
||||
|
||||
|
||||
@@ -20,8 +20,9 @@ from rest_framework.fields import JSONField as DRFJSONField
|
||||
from rest_framework.request import clone_request
|
||||
|
||||
# AWX
|
||||
from awx.main.fields import JSONField
|
||||
from awx.main.fields import JSONField, ImplicitRoleField
|
||||
from awx.main.models import InventorySource, NotificationTemplate
|
||||
from awx.main.scheduler.kubernetes import PodManager
|
||||
|
||||
|
||||
class Metadata(metadata.SimpleMetadata):
|
||||
@@ -200,6 +201,9 @@ class Metadata(metadata.SimpleMetadata):
|
||||
if not isinstance(meta, dict):
|
||||
continue
|
||||
|
||||
if field == "pod_spec_override":
|
||||
meta['default'] = PodManager().pod_definition
|
||||
|
||||
# Add type choices if available from the serializer.
|
||||
if field == 'type' and hasattr(serializer, 'get_type_choices'):
|
||||
meta['choices'] = serializer.get_type_choices()
|
||||
@@ -252,6 +256,16 @@ class Metadata(metadata.SimpleMetadata):
|
||||
if getattr(view, 'related_search_fields', None):
|
||||
metadata['related_search_fields'] = view.related_search_fields
|
||||
|
||||
# include role names in metadata
|
||||
roles = []
|
||||
model = getattr(view, 'model', None)
|
||||
if model:
|
||||
for field in model._meta.get_fields():
|
||||
if type(field) is ImplicitRoleField:
|
||||
roles.append(field.name)
|
||||
if len(roles) > 0:
|
||||
metadata['object_roles'] = roles
|
||||
|
||||
from rest_framework import generics
|
||||
if isinstance(view, generics.ListAPIView) and hasattr(view, 'paginator'):
|
||||
metadata['max_page_size'] = view.paginator.max_page_size
|
||||
|
||||
@@ -3,14 +3,28 @@
|
||||
|
||||
# Django REST Framework
|
||||
from django.conf import settings
|
||||
from django.core.paginator import Paginator as DjangoPaginator
|
||||
from rest_framework import pagination
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.utils.urls import replace_query_param
|
||||
|
||||
|
||||
class DisabledPaginator(DjangoPaginator):
|
||||
|
||||
@property
|
||||
def num_pages(self):
|
||||
return 1
|
||||
|
||||
@property
|
||||
def count(self):
|
||||
return 200
|
||||
|
||||
|
||||
class Pagination(pagination.PageNumberPagination):
|
||||
|
||||
page_size_query_param = 'page_size'
|
||||
max_page_size = settings.MAX_PAGE_SIZE
|
||||
count_disabled = False
|
||||
|
||||
def get_next_link(self):
|
||||
if not self.page.has_next():
|
||||
@@ -39,3 +53,17 @@ class Pagination(pagination.PageNumberPagination):
|
||||
for pl in context['page_links']]
|
||||
|
||||
return context
|
||||
|
||||
def paginate_queryset(self, queryset, request, **kwargs):
|
||||
self.count_disabled = 'count_disabled' in request.query_params
|
||||
try:
|
||||
if self.count_disabled:
|
||||
self.django_paginator_class = DisabledPaginator
|
||||
return super(Pagination, self).paginate_queryset(queryset, request, **kwargs)
|
||||
finally:
|
||||
self.django_paginator_class = DjangoPaginator
|
||||
|
||||
def get_paginated_response(self, data):
|
||||
if self.count_disabled:
|
||||
return Response({'results': data})
|
||||
return super(Pagination, self).get_paginated_response(data)
|
||||
|
||||
@@ -249,3 +249,8 @@ class InstanceGroupTowerPermission(ModelAccessPermission):
|
||||
if request.method == 'DELETE' and obj.name == "tower":
|
||||
return False
|
||||
return super(InstanceGroupTowerPermission, self).has_object_permission(request, view, obj)
|
||||
|
||||
|
||||
class WebhookKeyPermission(permissions.BasePermission):
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return request.user.can_access(view.model, 'admin', obj, request.data)
|
||||
|
||||
@@ -45,7 +45,6 @@ from polymorphic.models import PolymorphicModel
|
||||
from awx.main.access import get_user_capabilities
|
||||
from awx.main.constants import (
|
||||
SCHEDULEABLE_PROVIDERS,
|
||||
ANSI_SGR_PATTERN,
|
||||
ACTIVE_STATES,
|
||||
CENSOR_VALUE,
|
||||
)
|
||||
@@ -70,7 +69,8 @@ from awx.main.utils import (
|
||||
get_type_for_model, get_model_for_type,
|
||||
camelcase_to_underscore, getattrd, parse_yaml_or_json,
|
||||
has_model_field_prefetched, extract_ansible_vars, encrypt_dict,
|
||||
prefetch_page_capabilities, get_external_account)
|
||||
prefetch_page_capabilities, get_external_account, truncate_stdout,
|
||||
)
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
from awx.main.redact import UriCleaner, REPLACE_STR
|
||||
|
||||
@@ -116,7 +116,7 @@ SUMMARIZABLE_FK_FIELDS = {
|
||||
'project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'),
|
||||
'source_project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'),
|
||||
'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed',),
|
||||
'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'kubernetes', 'credential_type_id'),
|
||||
'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed', 'type'),
|
||||
'job_template': DEFAULT_SUMMARY_FIELDS,
|
||||
'workflow_job_template': DEFAULT_SUMMARY_FIELDS,
|
||||
@@ -135,10 +135,12 @@ SUMMARIZABLE_FK_FIELDS = {
|
||||
'source_script': ('name', 'description'),
|
||||
'role': ('id', 'role_field'),
|
||||
'notification_template': DEFAULT_SUMMARY_FIELDS,
|
||||
'instance_group': {'id', 'name', 'controller_id'},
|
||||
'instance_group': ('id', 'name', 'controller_id', 'is_containerized'),
|
||||
'insights_credential': DEFAULT_SUMMARY_FIELDS,
|
||||
'source_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
'target_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
'webhook_credential': DEFAULT_SUMMARY_FIELDS,
|
||||
'approved_or_denied_by': ('id', 'username', 'first_name', 'last_name'),
|
||||
}
|
||||
|
||||
|
||||
@@ -1261,6 +1263,7 @@ class OrganizationSerializer(BaseSerializer):
|
||||
notification_templates_started = self.reverse('api:organization_notification_templates_started_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_success = self.reverse('api:organization_notification_templates_success_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_error = self.reverse('api:organization_notification_templates_error_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_approvals = self.reverse('api:organization_notification_templates_approvals_list', kwargs={'pk': obj.pk}),
|
||||
object_roles = self.reverse('api:organization_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
access_list = self.reverse('api:organization_access_list', kwargs={'pk': obj.pk}),
|
||||
instance_groups = self.reverse('api:organization_instance_groups_list', kwargs={'pk': obj.pk}),
|
||||
@@ -2513,7 +2516,7 @@ class CredentialSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Credential
|
||||
fields = ('*', 'organization', 'credential_type', 'inputs', 'kind', 'cloud')
|
||||
fields = ('*', 'organization', 'credential_type', 'inputs', 'kind', 'cloud', 'kubernetes')
|
||||
extra_kwargs = {
|
||||
'credential_type': {
|
||||
'label': _('Credential Type'),
|
||||
@@ -2825,6 +2828,25 @@ class JobTemplateMixin(object):
|
||||
d['recent_jobs'] = self._recent_jobs(obj)
|
||||
return d
|
||||
|
||||
def validate(self, attrs):
|
||||
webhook_service = attrs.get('webhook_service', getattr(self.instance, 'webhook_service', None))
|
||||
webhook_credential = attrs.get('webhook_credential', getattr(self.instance, 'webhook_credential', None))
|
||||
|
||||
if webhook_credential:
|
||||
if webhook_credential.credential_type.kind != 'token':
|
||||
raise serializers.ValidationError({
|
||||
'webhook_credential': _("Must be a Personal Access Token."),
|
||||
})
|
||||
|
||||
msg = {'webhook_credential': _("Must match the selected webhook service.")}
|
||||
if webhook_service:
|
||||
if webhook_credential.credential_type.namespace != '{}_token'.format(webhook_service):
|
||||
raise serializers.ValidationError(msg)
|
||||
else:
|
||||
raise serializers.ValidationError(msg)
|
||||
|
||||
return super().validate(attrs)
|
||||
|
||||
|
||||
class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobOptionsSerializer):
|
||||
show_capabilities = ['start', 'schedule', 'copy', 'edit', 'delete']
|
||||
@@ -2837,30 +2859,39 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
|
||||
class Meta:
|
||||
model = JobTemplate
|
||||
fields = ('*', 'host_config_key', 'ask_scm_branch_on_launch', 'ask_diff_mode_on_launch', 'ask_variables_on_launch',
|
||||
'ask_limit_on_launch', 'ask_tags_on_launch',
|
||||
'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch',
|
||||
'ask_credential_on_launch', 'survey_enabled', 'become_enabled', 'diff_mode',
|
||||
'allow_simultaneous', 'custom_virtualenv', 'job_slice_count')
|
||||
fields = (
|
||||
'*', 'host_config_key', 'ask_scm_branch_on_launch', 'ask_diff_mode_on_launch',
|
||||
'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch',
|
||||
'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch',
|
||||
'ask_inventory_on_launch', 'ask_credential_on_launch', 'survey_enabled',
|
||||
'become_enabled', 'diff_mode', 'allow_simultaneous', 'custom_virtualenv',
|
||||
'job_slice_count', 'webhook_service', 'webhook_credential',
|
||||
)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(JobTemplateSerializer, self).get_related(obj)
|
||||
res.update(dict(
|
||||
jobs = self.reverse('api:job_template_jobs_list', kwargs={'pk': obj.pk}),
|
||||
schedules = self.reverse('api:job_template_schedules_list', kwargs={'pk': obj.pk}),
|
||||
activity_stream = self.reverse('api:job_template_activity_stream_list', kwargs={'pk': obj.pk}),
|
||||
launch = self.reverse('api:job_template_launch', kwargs={'pk': obj.pk}),
|
||||
notification_templates_started = self.reverse('api:job_template_notification_templates_started_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_success = self.reverse('api:job_template_notification_templates_success_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_error = self.reverse('api:job_template_notification_templates_error_list', kwargs={'pk': obj.pk}),
|
||||
access_list = self.reverse('api:job_template_access_list', kwargs={'pk': obj.pk}),
|
||||
survey_spec = self.reverse('api:job_template_survey_spec', kwargs={'pk': obj.pk}),
|
||||
labels = self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk}),
|
||||
object_roles = self.reverse('api:job_template_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
instance_groups = self.reverse('api:job_template_instance_groups_list', kwargs={'pk': obj.pk}),
|
||||
slice_workflow_jobs = self.reverse('api:job_template_slice_workflow_jobs_list', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:job_template_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
res.update(
|
||||
jobs=self.reverse('api:job_template_jobs_list', kwargs={'pk': obj.pk}),
|
||||
schedules=self.reverse('api:job_template_schedules_list', kwargs={'pk': obj.pk}),
|
||||
activity_stream=self.reverse('api:job_template_activity_stream_list', kwargs={'pk': obj.pk}),
|
||||
launch=self.reverse('api:job_template_launch', kwargs={'pk': obj.pk}),
|
||||
webhook_key=self.reverse('api:webhook_key', kwargs={'model_kwarg': 'job_templates', 'pk': obj.pk}),
|
||||
webhook_receiver=(
|
||||
self.reverse('api:webhook_receiver_{}'.format(obj.webhook_service),
|
||||
kwargs={'model_kwarg': 'job_templates', 'pk': obj.pk})
|
||||
if obj.webhook_service else ''
|
||||
),
|
||||
notification_templates_started=self.reverse('api:job_template_notification_templates_started_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_success=self.reverse('api:job_template_notification_templates_success_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_error=self.reverse('api:job_template_notification_templates_error_list', kwargs={'pk': obj.pk}),
|
||||
access_list=self.reverse('api:job_template_access_list', kwargs={'pk': obj.pk}),
|
||||
survey_spec=self.reverse('api:job_template_survey_spec', kwargs={'pk': obj.pk}),
|
||||
labels=self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk}),
|
||||
object_roles=self.reverse('api:job_template_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
instance_groups=self.reverse('api:job_template_instance_groups_list', kwargs={'pk': obj.pk}),
|
||||
slice_workflow_jobs=self.reverse('api:job_template_slice_workflow_jobs_list', kwargs={'pk': obj.pk}),
|
||||
copy=self.reverse('api:job_template_copy', kwargs={'pk': obj.pk}),
|
||||
)
|
||||
if obj.host_config_key:
|
||||
res['callback'] = self.reverse('api:job_template_callback', kwargs={'pk': obj.pk})
|
||||
return res
|
||||
@@ -2888,7 +2919,6 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
def validate_extra_vars(self, value):
|
||||
return vars_validate_or_raise(value)
|
||||
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
summary_fields = super(JobTemplateSerializer, self).get_summary_fields(obj)
|
||||
all_creds = []
|
||||
@@ -2929,9 +2959,11 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Job
|
||||
fields = ('*', 'job_template', 'passwords_needed_to_start',
|
||||
'allow_simultaneous', 'artifacts', 'scm_revision',
|
||||
'instance_group', 'diff_mode', 'job_slice_number', 'job_slice_count')
|
||||
fields = (
|
||||
'*', 'job_template', 'passwords_needed_to_start', 'allow_simultaneous',
|
||||
'artifacts', 'scm_revision', 'instance_group', 'diff_mode', 'job_slice_number',
|
||||
'job_slice_count', 'webhook_service', 'webhook_credential', 'webhook_guid',
|
||||
)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(JobSerializer, self).get_related(obj)
|
||||
@@ -3314,29 +3346,42 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
|
||||
'admin', 'execute',
|
||||
{'copy': 'organization.workflow_admin'}
|
||||
]
|
||||
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||
scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJobTemplate
|
||||
fields = ('*', 'extra_vars', 'organization', 'survey_enabled', 'allow_simultaneous',
|
||||
'ask_variables_on_launch', 'inventory', 'ask_inventory_on_launch',)
|
||||
fields = (
|
||||
'*', 'extra_vars', 'organization', 'survey_enabled', 'allow_simultaneous',
|
||||
'ask_variables_on_launch', 'inventory', 'limit', 'scm_branch',
|
||||
'ask_inventory_on_launch', 'ask_scm_branch_on_launch', 'ask_limit_on_launch',
|
||||
'webhook_service', 'webhook_credential',
|
||||
)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(WorkflowJobTemplateSerializer, self).get_related(obj)
|
||||
res.update(dict(
|
||||
res.update(
|
||||
workflow_jobs = self.reverse('api:workflow_job_template_jobs_list', kwargs={'pk': obj.pk}),
|
||||
schedules = self.reverse('api:workflow_job_template_schedules_list', kwargs={'pk': obj.pk}),
|
||||
launch = self.reverse('api:workflow_job_template_launch', kwargs={'pk': obj.pk}),
|
||||
webhook_key=self.reverse('api:webhook_key', kwargs={'model_kwarg': 'workflow_job_templates', 'pk': obj.pk}),
|
||||
webhook_receiver=(
|
||||
self.reverse('api:webhook_receiver_{}'.format(obj.webhook_service),
|
||||
kwargs={'model_kwarg': 'workflow_job_templates', 'pk': obj.pk})
|
||||
if obj.webhook_service else ''
|
||||
),
|
||||
workflow_nodes = self.reverse('api:workflow_job_template_workflow_nodes_list', kwargs={'pk': obj.pk}),
|
||||
labels = self.reverse('api:workflow_job_template_label_list', kwargs={'pk': obj.pk}),
|
||||
activity_stream = self.reverse('api:workflow_job_template_activity_stream_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_started = self.reverse('api:workflow_job_template_notification_templates_started_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_success = self.reverse('api:workflow_job_template_notification_templates_success_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_error = self.reverse('api:workflow_job_template_notification_templates_error_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_approvals = self.reverse('api:workflow_job_template_notification_templates_approvals_list', kwargs={'pk': obj.pk}),
|
||||
access_list = self.reverse('api:workflow_job_template_access_list', kwargs={'pk': obj.pk}),
|
||||
object_roles = self.reverse('api:workflow_job_template_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
survey_spec = self.reverse('api:workflow_job_template_survey_spec', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:workflow_job_template_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
)
|
||||
if obj.organization:
|
||||
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
|
||||
return res
|
||||
@@ -3344,6 +3389,22 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
|
||||
def validate_extra_vars(self, value):
|
||||
return vars_validate_or_raise(value)
|
||||
|
||||
def validate(self, attrs):
|
||||
attrs = super(WorkflowJobTemplateSerializer, self).validate(attrs)
|
||||
|
||||
# process char_prompts, these are not direct fields on the model
|
||||
mock_obj = self.Meta.model()
|
||||
for field_name in ('scm_branch', 'limit'):
|
||||
if field_name in attrs:
|
||||
setattr(mock_obj, field_name, attrs[field_name])
|
||||
attrs.pop(field_name)
|
||||
|
||||
# Model `.save` needs the container dict, not the pseudo fields
|
||||
if mock_obj.char_prompts:
|
||||
attrs['char_prompts'] = mock_obj.char_prompts
|
||||
|
||||
return attrs
|
||||
|
||||
|
||||
class WorkflowJobTemplateWithSpecSerializer(WorkflowJobTemplateSerializer):
|
||||
'''
|
||||
@@ -3356,13 +3417,16 @@ class WorkflowJobTemplateWithSpecSerializer(WorkflowJobTemplateSerializer):
|
||||
|
||||
|
||||
class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
|
||||
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||
scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJob
|
||||
fields = ('*', 'workflow_job_template', 'extra_vars', 'allow_simultaneous',
|
||||
'job_template', 'is_sliced_job',
|
||||
'-execution_node', '-event_processing_finished', '-controller_node',
|
||||
'inventory',)
|
||||
fields = (
|
||||
'*', 'workflow_job_template', 'extra_vars', 'allow_simultaneous', 'job_template',
|
||||
'is_sliced_job', '-execution_node', '-event_processing_finished', '-controller_node',
|
||||
'inventory', 'limit', 'scm_branch', 'webhook_service', 'webhook_credential', 'webhook_guid',
|
||||
)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(WorkflowJobSerializer, self).get_related(obj)
|
||||
@@ -3438,6 +3502,8 @@ class WorkflowApprovalSerializer(UnifiedJobSerializer):
|
||||
kwargs={'pk': obj.workflow_approval_template.pk})
|
||||
res['approve'] = self.reverse('api:workflow_approval_approve', kwargs={'pk': obj.pk})
|
||||
res['deny'] = self.reverse('api:workflow_approval_deny', kwargs={'pk': obj.pk})
|
||||
if obj.approved_or_denied_by:
|
||||
res['approved_or_denied_by'] = self.reverse('api:user_detail', kwargs={'pk': obj.approved_or_denied_by.pk})
|
||||
return res
|
||||
|
||||
|
||||
@@ -3469,7 +3535,7 @@ class WorkflowApprovalTemplateSerializer(UnifiedJobTemplateSerializer):
|
||||
if 'last_job' in res:
|
||||
del res['last_job']
|
||||
|
||||
res.update(dict(jobs = self.reverse('api:workflow_approval_template_jobs_list', kwargs={'pk': obj.pk}),))
|
||||
res.update(jobs = self.reverse('api:workflow_approval_template_jobs_list', kwargs={'pk': obj.pk}))
|
||||
return res
|
||||
|
||||
|
||||
@@ -3596,7 +3662,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
if errors:
|
||||
raise serializers.ValidationError(errors)
|
||||
|
||||
# Model `.save` needs the container dict, not the psuedo fields
|
||||
# Model `.save` needs the container dict, not the pseudo fields
|
||||
if mock_obj.char_prompts:
|
||||
attrs['char_prompts'] = mock_obj.char_prompts
|
||||
|
||||
@@ -3788,25 +3854,17 @@ class JobEventSerializer(BaseSerializer):
|
||||
return d
|
||||
|
||||
def to_representation(self, obj):
|
||||
ret = super(JobEventSerializer, self).to_representation(obj)
|
||||
# Show full stdout for event detail view, truncate only for list view.
|
||||
if hasattr(self.context.get('view', None), 'retrieve'):
|
||||
return ret
|
||||
data = super(JobEventSerializer, self).to_representation(obj)
|
||||
# Show full stdout for playbook_on_* events.
|
||||
if obj and obj.event.startswith('playbook_on'):
|
||||
return ret
|
||||
return data
|
||||
# If the view logic says to not trunctate (request was to the detail view or a param was used)
|
||||
if self.context.get('no_truncate', False):
|
||||
return data
|
||||
max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY
|
||||
if max_bytes > 0 and 'stdout' in ret and len(ret['stdout']) >= max_bytes:
|
||||
ret['stdout'] = ret['stdout'][:(max_bytes - 1)] + u'\u2026'
|
||||
set_count = 0
|
||||
reset_count = 0
|
||||
for m in ANSI_SGR_PATTERN.finditer(ret['stdout']):
|
||||
if m.string[m.start():m.end()] == u'\u001b[0m':
|
||||
reset_count += 1
|
||||
else:
|
||||
set_count += 1
|
||||
ret['stdout'] += u'\u001b[0m' * (set_count - reset_count)
|
||||
return ret
|
||||
if 'stdout' in data:
|
||||
data['stdout'] = truncate_stdout(data['stdout'], max_bytes)
|
||||
return data
|
||||
|
||||
|
||||
class JobEventWebSocketSerializer(JobEventSerializer):
|
||||
@@ -3901,22 +3959,14 @@ class AdHocCommandEventSerializer(BaseSerializer):
|
||||
return res
|
||||
|
||||
def to_representation(self, obj):
|
||||
ret = super(AdHocCommandEventSerializer, self).to_representation(obj)
|
||||
# Show full stdout for event detail view, truncate only for list view.
|
||||
if hasattr(self.context.get('view', None), 'retrieve'):
|
||||
return ret
|
||||
data = super(AdHocCommandEventSerializer, self).to_representation(obj)
|
||||
# If the view logic says to not trunctate (request was to the detail view or a param was used)
|
||||
if self.context.get('no_truncate', False):
|
||||
return data
|
||||
max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY
|
||||
if max_bytes > 0 and 'stdout' in ret and len(ret['stdout']) >= max_bytes:
|
||||
ret['stdout'] = ret['stdout'][:(max_bytes - 1)] + u'\u2026'
|
||||
set_count = 0
|
||||
reset_count = 0
|
||||
for m in ANSI_SGR_PATTERN.finditer(ret['stdout']):
|
||||
if m.string[m.start():m.end()] == u'\u001b[0m':
|
||||
reset_count += 1
|
||||
else:
|
||||
set_count += 1
|
||||
ret['stdout'] += u'\u001b[0m' * (set_count - reset_count)
|
||||
return ret
|
||||
if 'stdout' in data:
|
||||
data['stdout'] = truncate_stdout(data['stdout'], max_bytes)
|
||||
return data
|
||||
|
||||
|
||||
class AdHocCommandEventWebSocketSerializer(AdHocCommandEventSerializer):
|
||||
@@ -4180,12 +4230,16 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
|
||||
queryset=Inventory.objects.all(),
|
||||
required=False, write_only=True
|
||||
)
|
||||
limit = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||
scm_branch = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||
workflow_job_template_data = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJobTemplate
|
||||
fields = ('ask_inventory_on_launch', 'can_start_without_user_input', 'defaults', 'extra_vars',
|
||||
'inventory', 'survey_enabled', 'variables_needed_to_start',
|
||||
fields = ('ask_inventory_on_launch', 'ask_limit_on_launch', 'ask_scm_branch_on_launch',
|
||||
'can_start_without_user_input', 'defaults', 'extra_vars',
|
||||
'inventory', 'limit', 'scm_branch',
|
||||
'survey_enabled', 'variables_needed_to_start',
|
||||
'node_templates_missing', 'node_prompts_rejected',
|
||||
'workflow_job_template_data', 'survey_enabled', 'ask_variables_on_launch')
|
||||
read_only_fields = ('ask_inventory_on_launch', 'ask_variables_on_launch')
|
||||
@@ -4225,9 +4279,14 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
|
||||
|
||||
WFJT_extra_vars = template.extra_vars
|
||||
WFJT_inventory = template.inventory
|
||||
WFJT_limit = template.limit
|
||||
WFJT_scm_branch = template.scm_branch
|
||||
super(WorkflowJobLaunchSerializer, self).validate(attrs)
|
||||
template.extra_vars = WFJT_extra_vars
|
||||
template.inventory = WFJT_inventory
|
||||
template.limit = WFJT_limit
|
||||
template.scm_branch = WFJT_scm_branch
|
||||
|
||||
return accepted
|
||||
|
||||
|
||||
@@ -4347,6 +4406,8 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
for event in messages:
|
||||
if not messages[event]:
|
||||
continue
|
||||
if not isinstance(messages[event], dict):
|
||||
continue
|
||||
body = messages[event].get('body', {})
|
||||
if body:
|
||||
try:
|
||||
@@ -4658,6 +4719,11 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
'Isolated groups have a designated controller group.'),
|
||||
read_only=True
|
||||
)
|
||||
is_containerized = serializers.BooleanField(
|
||||
help_text=_('Indicates whether instances in this group are containerized.'
|
||||
'Containerized groups have a designated Openshift or Kubernetes cluster.'),
|
||||
read_only=True
|
||||
)
|
||||
# NOTE: help_text is duplicated from field definitions, no obvious way of
|
||||
# both defining field details here and also getting the field's help_text
|
||||
policy_instance_percentage = serializers.IntegerField(
|
||||
@@ -4683,8 +4749,9 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
fields = ("id", "type", "url", "related", "name", "created", "modified",
|
||||
"capacity", "committed_capacity", "consumed_capacity",
|
||||
"percent_capacity_remaining", "jobs_running", "jobs_total",
|
||||
"instances", "controller", "is_controller", "is_isolated",
|
||||
"policy_instance_percentage", "policy_instance_minimum", "policy_instance_list")
|
||||
"instances", "controller", "is_controller", "is_isolated", "is_containerized", "credential",
|
||||
"policy_instance_percentage", "policy_instance_minimum", "policy_instance_list",
|
||||
"pod_spec_override", "summary_fields")
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(InstanceGroupSerializer, self).get_related(obj)
|
||||
@@ -4692,6 +4759,9 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
res['instances'] = self.reverse('api:instance_group_instance_list', kwargs={'pk': obj.pk})
|
||||
if obj.controller_id:
|
||||
res['controller'] = self.reverse('api:instance_group_detail', kwargs={'pk': obj.controller_id})
|
||||
if obj.credential:
|
||||
res['credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.credential_id})
|
||||
|
||||
return res
|
||||
|
||||
def validate_policy_instance_list(self, value):
|
||||
@@ -4711,6 +4781,11 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError(_('tower instance group name may not be changed.'))
|
||||
return value
|
||||
|
||||
def validate_credential(self, value):
|
||||
if value and not value.kubernetes:
|
||||
raise serializers.ValidationError(_('Only Kubernetes credentials can be associated with an Instance Group'))
|
||||
return value
|
||||
|
||||
def get_capacity_dict(self):
|
||||
# Store capacity values (globally computed) in the context
|
||||
if 'capacity_map' not in self.context:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{% ifmeth GET %}
|
||||
# List Roles for a Team:
|
||||
|
||||
{% ifmeth GET %}
|
||||
Make a GET request to this resource to retrieve a list of roles associated with the selected team.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{% ifmeth GET %}
|
||||
# List Roles for a User:
|
||||
|
||||
{% ifmeth GET %}
|
||||
Make a GET request to this resource to retrieve a list of roles associated with the selected user.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
|
||||
12
awx/api/templates/api/webhook_key_view.md
Normal file
12
awx/api/templates/api/webhook_key_view.md
Normal file
@@ -0,0 +1,12 @@
|
||||
Webhook Secret Key:
|
||||
|
||||
Make a GET request to this resource to obtain the secret key for a job
|
||||
template or workflow job template configured to be triggered by
|
||||
webhook events. The response will include the following fields:
|
||||
|
||||
* `webhook_key`: Secret key that needs to be copied and added to the
|
||||
webhook configuration of the service this template will be receiving
|
||||
webhook events from (string, read-only)
|
||||
|
||||
Make an empty POST request to this resource to generate a new
|
||||
replacement `webhook_key`.
|
||||
@@ -1,7 +1,7 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
from django.conf.urls import include, url
|
||||
|
||||
from awx.api.views import (
|
||||
JobTemplateList,
|
||||
@@ -45,6 +45,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', JobTemplateLabelList.as_view(), name='job_template_label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', JobTemplateCopy.as_view(), name='job_template_copy'),
|
||||
url(r'^(?P<pk>[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'job_templates'}),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -18,6 +18,7 @@ from awx.api.views import (
|
||||
OrganizationNotificationTemplatesErrorList,
|
||||
OrganizationNotificationTemplatesStartedList,
|
||||
OrganizationNotificationTemplatesSuccessList,
|
||||
OrganizationNotificationTemplatesApprovalList,
|
||||
OrganizationInstanceGroupsList,
|
||||
OrganizationObjectRolesList,
|
||||
OrganizationAccessList,
|
||||
@@ -43,6 +44,8 @@ urls = [
|
||||
name='organization_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', OrganizationNotificationTemplatesSuccessList.as_view(),
|
||||
name='organization_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_approvals/$', OrganizationNotificationTemplatesApprovalList.as_view(),
|
||||
name='organization_notification_templates_approvals_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', OrganizationAccessList.as_view(), name='organization_access_list'),
|
||||
|
||||
@@ -14,6 +14,7 @@ from awx.api.views import (
|
||||
ApiV2RootView,
|
||||
ApiV2PingView,
|
||||
ApiV2ConfigView,
|
||||
ApiV2SubscriptionView,
|
||||
AuthView,
|
||||
UserMeList,
|
||||
DashboardView,
|
||||
@@ -94,6 +95,7 @@ v2_urls = [
|
||||
url(r'^metrics/$', MetricsView.as_view(), name='metrics_view'),
|
||||
url(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'),
|
||||
url(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'),
|
||||
url(r'^config/subscriptions/$', ApiV2SubscriptionView.as_view(), name='api_v2_subscription_view'),
|
||||
url(r'^auth/$', AuthView.as_view()),
|
||||
url(r'^me/$', UserMeList.as_view(), name='user_me_list'),
|
||||
url(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'),
|
||||
|
||||
14
awx/api/urls/webhooks.py
Normal file
14
awx/api/urls/webhooks.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WebhookKeyView,
|
||||
GithubWebhookReceiver,
|
||||
GitlabWebhookReceiver,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
url(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'),
|
||||
url(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'),
|
||||
url(r'^gitlab/$', GitlabWebhookReceiver.as_view(), name='webhook_receiver_gitlab'),
|
||||
]
|
||||
@@ -1,7 +1,7 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
from django.conf.urls import include, url
|
||||
|
||||
from awx.api.views import (
|
||||
WorkflowJobTemplateList,
|
||||
@@ -16,6 +16,7 @@ from awx.api.views import (
|
||||
WorkflowJobTemplateNotificationTemplatesErrorList,
|
||||
WorkflowJobTemplateNotificationTemplatesStartedList,
|
||||
WorkflowJobTemplateNotificationTemplatesSuccessList,
|
||||
WorkflowJobTemplateNotificationTemplatesApprovalList,
|
||||
WorkflowJobTemplateAccessList,
|
||||
WorkflowJobTemplateObjectRolesList,
|
||||
WorkflowJobTemplateLabelList,
|
||||
@@ -38,9 +39,12 @@ urls = [
|
||||
name='workflow_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='workflow_job_template_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_approvals/$', WorkflowJobTemplateNotificationTemplatesApprovalList.as_view(),
|
||||
name='workflow_job_template_notification_templates_approvals_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', WorkflowJobTemplateAccessList.as_view(), name='workflow_job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', WorkflowJobTemplateObjectRolesList.as_view(), name='workflow_job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobTemplateLabelList.as_view(), name='workflow_job_template_label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'workflow_job_templates'}),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -119,6 +119,7 @@ from awx.api.views.organization import ( # noqa
|
||||
OrganizationNotificationTemplatesErrorList,
|
||||
OrganizationNotificationTemplatesStartedList,
|
||||
OrganizationNotificationTemplatesSuccessList,
|
||||
OrganizationNotificationTemplatesApprovalList,
|
||||
OrganizationInstanceGroupsList,
|
||||
OrganizationAccessList,
|
||||
OrganizationObjectRolesList,
|
||||
@@ -147,6 +148,12 @@ from awx.api.views.root import ( # noqa
|
||||
ApiV2RootView,
|
||||
ApiV2PingView,
|
||||
ApiV2ConfigView,
|
||||
ApiV2SubscriptionView,
|
||||
)
|
||||
from awx.api.views.webhooks import ( # noqa
|
||||
WebhookKeyView,
|
||||
GithubWebhookReceiver,
|
||||
GitlabWebhookReceiver,
|
||||
)
|
||||
|
||||
|
||||
@@ -245,13 +252,6 @@ class DashboardView(APIView):
|
||||
'total': hg_projects.count(),
|
||||
'failed': hg_failed_projects.count()}
|
||||
|
||||
user_jobs = get_user_queryset(request.user, models.Job)
|
||||
user_failed_jobs = user_jobs.filter(failed=True)
|
||||
data['jobs'] = {'url': reverse('api:job_list', request=request),
|
||||
'failure_url': reverse('api:job_list', request=request) + "?failed=True",
|
||||
'total': user_jobs.count(),
|
||||
'failed': user_failed_jobs.count()}
|
||||
|
||||
user_list = get_user_queryset(request.user, models.User)
|
||||
team_list = get_user_queryset(request.user, models.Team)
|
||||
credential_list = get_user_queryset(request.user, models.Credential)
|
||||
@@ -2568,10 +2568,34 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
return Response(dict(error=_(
|
||||
"The {min_or_max} limit in survey question {idx} expected to be integer."
|
||||
).format(min_or_max=key, **context)))
|
||||
if qtype in ['multiplechoice', 'multiselect'] and 'choices' not in survey_item:
|
||||
return Response(dict(error=_(
|
||||
"Survey question {idx} of type {survey_item[type]} must specify choices.".format(**context)
|
||||
)))
|
||||
# if it's a multiselect or multiple choice, it must have coices listed
|
||||
# choices and defualts must come in as strings seperated by /n characters.
|
||||
if qtype == 'multiselect' or qtype == 'multiplechoice':
|
||||
if 'choices' in survey_item:
|
||||
if isinstance(survey_item['choices'], str):
|
||||
survey_item['choices'] = '\n'.join(choice for choice in survey_item['choices'].splitlines() if choice.strip() != '')
|
||||
else:
|
||||
return Response(dict(error=_(
|
||||
"Survey question {idx} of type {survey_item[type]} must specify choices.".format(**context)
|
||||
)))
|
||||
# If there is a default string split it out removing extra /n characters.
|
||||
# Note: There can still be extra newline characters added in the API, these are sanitized out using .strip()
|
||||
if 'default' in survey_item:
|
||||
if isinstance(survey_item['default'], str):
|
||||
survey_item['default'] = '\n'.join(choice for choice in survey_item['default'].splitlines() if choice.strip() != '')
|
||||
list_of_defaults = survey_item['default'].splitlines()
|
||||
else:
|
||||
list_of_defaults = survey_item['default']
|
||||
if qtype == 'multiplechoice':
|
||||
# Multiplechoice types should only have 1 default.
|
||||
if len(list_of_defaults) > 1:
|
||||
return Response(dict(error=_(
|
||||
"Multiple Choice (Single Select) can only have one default value.".format(**context)
|
||||
)))
|
||||
if any(item not in survey_item['choices'] for item in list_of_defaults):
|
||||
return Response(dict(error=_(
|
||||
"Default choice must be answered from the choices listed.".format(**context)
|
||||
)))
|
||||
|
||||
# Process encryption substitution
|
||||
if ("default" in survey_item and isinstance(survey_item['default'], str) and
|
||||
@@ -3117,6 +3141,17 @@ class WorkflowJobTemplateCopy(CopyAPIView):
|
||||
data.update(messages)
|
||||
return Response(data)
|
||||
|
||||
def _build_create_dict(self, obj):
|
||||
"""Special processing of fields managed by char_prompts
|
||||
"""
|
||||
r = super(WorkflowJobTemplateCopy, self)._build_create_dict(obj)
|
||||
field_names = set(f.name for f in obj._meta.get_fields())
|
||||
for field_name, ask_field_name in obj.get_ask_mapping().items():
|
||||
if field_name in r and field_name not in field_names:
|
||||
r.setdefault('char_prompts', {})
|
||||
r['char_prompts'][field_name] = r.pop(field_name)
|
||||
return r
|
||||
|
||||
@staticmethod
|
||||
def deep_copy_permission_check_func(user, new_objs):
|
||||
for obj in new_objs:
|
||||
@@ -3145,7 +3180,6 @@ class WorkflowJobTemplateLabelList(JobTemplateLabelList):
|
||||
|
||||
class WorkflowJobTemplateLaunch(RetrieveAPIView):
|
||||
|
||||
|
||||
model = models.WorkflowJobTemplate
|
||||
obj_permission_type = 'start'
|
||||
serializer_class = serializers.WorkflowJobLaunchSerializer
|
||||
@@ -3162,10 +3196,15 @@ class WorkflowJobTemplateLaunch(RetrieveAPIView):
|
||||
extra_vars.setdefault(v, u'')
|
||||
if extra_vars:
|
||||
data['extra_vars'] = extra_vars
|
||||
if obj.ask_inventory_on_launch:
|
||||
data['inventory'] = obj.inventory_id
|
||||
else:
|
||||
data.pop('inventory', None)
|
||||
modified_ask_mapping = models.WorkflowJobTemplate.get_ask_mapping()
|
||||
modified_ask_mapping.pop('extra_vars')
|
||||
for field_name, ask_field_name in obj.get_ask_mapping().items():
|
||||
if not getattr(obj, ask_field_name):
|
||||
data.pop(field_name, None)
|
||||
elif field_name == 'inventory':
|
||||
data[field_name] = getattrd(obj, "%s.%s" % (field_name, 'id'), None)
|
||||
else:
|
||||
data[field_name] = getattr(obj, field_name)
|
||||
return data
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
@@ -3279,6 +3318,11 @@ class WorkflowJobTemplateNotificationTemplatesSuccessList(WorkflowJobTemplateNot
|
||||
relationship = 'notification_templates_success'
|
||||
|
||||
|
||||
class WorkflowJobTemplateNotificationTemplatesApprovalList(WorkflowJobTemplateNotificationTemplatesAnyList):
|
||||
|
||||
relationship = 'notification_templates_approvals'
|
||||
|
||||
|
||||
class WorkflowJobTemplateAccessList(ResourceAccessList):
|
||||
|
||||
model = models.User # needs to be User for AccessLists's
|
||||
@@ -3364,6 +3408,11 @@ class WorkflowJobNotificationsList(SubListAPIView):
|
||||
relationship = 'notifications'
|
||||
search_fields = ('subject', 'notification_type', 'body',)
|
||||
|
||||
def get_sublist_queryset(self, parent):
|
||||
return self.model.objects.filter(Q(unifiedjob_notifications=parent) |
|
||||
Q(unifiedjob_notifications__unified_job_node__workflow_job=parent,
|
||||
unifiedjob_notifications__workflowapproval__isnull=False)).distinct()
|
||||
|
||||
|
||||
class WorkflowJobActivityStreamList(SubListAPIView):
|
||||
|
||||
@@ -3719,12 +3768,23 @@ class JobEventList(ListAPIView):
|
||||
serializer_class = serializers.JobEventSerializer
|
||||
search_fields = ('stdout',)
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
if self.request.query_params.get('no_truncate'):
|
||||
context.update(no_truncate=True)
|
||||
return context
|
||||
|
||||
|
||||
class JobEventDetail(RetrieveAPIView):
|
||||
|
||||
model = models.JobEvent
|
||||
serializer_class = serializers.JobEventSerializer
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
context.update(no_truncate=True)
|
||||
return context
|
||||
|
||||
|
||||
class JobEventChildrenList(SubListAPIView):
|
||||
|
||||
@@ -3953,12 +4013,23 @@ class AdHocCommandEventList(ListAPIView):
|
||||
serializer_class = serializers.AdHocCommandEventSerializer
|
||||
search_fields = ('stdout',)
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
if self.request.query_params.get('no_truncate'):
|
||||
context.update(no_truncate=True)
|
||||
return context
|
||||
|
||||
|
||||
class AdHocCommandEventDetail(RetrieveAPIView):
|
||||
|
||||
model = models.AdHocCommandEvent
|
||||
serializer_class = serializers.AdHocCommandEventSerializer
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
context.update(no_truncate=True)
|
||||
return context
|
||||
|
||||
|
||||
class BaseAdHocCommandEventsList(SubListAPIView):
|
||||
|
||||
|
||||
@@ -70,12 +70,16 @@ class InventoryUpdateEventsList(SubListAPIView):
|
||||
|
||||
class InventoryScriptList(ListCreateAPIView):
|
||||
|
||||
deprecated = True
|
||||
|
||||
model = CustomInventoryScript
|
||||
serializer_class = CustomInventoryScriptSerializer
|
||||
|
||||
|
||||
class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
deprecated = True
|
||||
|
||||
model = CustomInventoryScript
|
||||
serializer_class = CustomInventoryScriptSerializer
|
||||
|
||||
@@ -92,6 +96,8 @@ class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
class InventoryScriptObjectRolesList(SubListAPIView):
|
||||
|
||||
deprecated = True
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = CustomInventoryScript
|
||||
@@ -105,6 +111,8 @@ class InventoryScriptObjectRolesList(SubListAPIView):
|
||||
|
||||
class InventoryScriptCopy(CopyAPIView):
|
||||
|
||||
deprecated = True
|
||||
|
||||
model = CustomInventoryScript
|
||||
copy_return_serializer_class = CustomInventoryScriptSerializer
|
||||
|
||||
|
||||
@@ -195,6 +195,11 @@ class OrganizationNotificationTemplatesSuccessList(OrganizationNotificationTempl
|
||||
relationship = 'notification_templates_success'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesApprovalList(OrganizationNotificationTemplatesAnyList):
|
||||
|
||||
relationship = 'notification_templates_approvals'
|
||||
|
||||
|
||||
class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
|
||||
model = InstanceGroup
|
||||
|
||||
@@ -17,6 +17,8 @@ from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
import requests
|
||||
|
||||
from awx.api.generics import APIView
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.main.utils import (
|
||||
@@ -169,6 +171,45 @@ class ApiV2PingView(APIView):
|
||||
return Response(response)
|
||||
|
||||
|
||||
class ApiV2SubscriptionView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
name = _('Configuration')
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
def check_permissions(self, request):
|
||||
super(ApiV2SubscriptionView, self).check_permissions(request)
|
||||
if not request.user.is_superuser and request.method.lower() not in {'options', 'head'}:
|
||||
self.permission_denied(request) # Raises PermissionDenied exception.
|
||||
|
||||
def post(self, request):
|
||||
from awx.main.utils.common import get_licenser
|
||||
data = request.data.copy()
|
||||
if data.get('rh_password') == '$encrypted$':
|
||||
data['rh_password'] = settings.REDHAT_PASSWORD
|
||||
try:
|
||||
user, pw = data.get('rh_username'), data.get('rh_password')
|
||||
validated = get_licenser().validate_rh(user, pw)
|
||||
if user:
|
||||
settings.REDHAT_USERNAME = data['rh_username']
|
||||
if pw:
|
||||
settings.REDHAT_PASSWORD = data['rh_password']
|
||||
except Exception as exc:
|
||||
msg = _("Invalid License")
|
||||
if (
|
||||
isinstance(exc, requests.exceptions.HTTPError) and
|
||||
getattr(getattr(exc, 'response', None), 'status_code', None) == 401
|
||||
):
|
||||
msg = _("The provided credentials are invalid (HTTP 401).")
|
||||
if isinstance(exc, (ValueError, OSError)) and exc.args:
|
||||
msg = exc.args[0]
|
||||
logger.exception(smart_text(u"Invalid license submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
return Response(validated)
|
||||
|
||||
|
||||
class ApiV2ConfigView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
247
awx/api/views/webhooks.py
Normal file
247
awx/api/views/webhooks.py
Normal file
@@ -0,0 +1,247 @@
|
||||
from hashlib import sha1
|
||||
import hmac
|
||||
import json
|
||||
import logging
|
||||
import urllib.parse
|
||||
|
||||
from django.utils.encoding import force_bytes
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
|
||||
from rest_framework import status
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.response import Response
|
||||
|
||||
from awx.api import serializers
|
||||
from awx.api.generics import APIView, GenericAPIView
|
||||
from awx.api.permissions import WebhookKeyPermission
|
||||
from awx.main.models import Job, JobTemplate, WorkflowJob, WorkflowJobTemplate
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.api.views.webhooks')
|
||||
|
||||
|
||||
class WebhookKeyView(GenericAPIView):
|
||||
serializer_class = serializers.EmptySerializer
|
||||
permission_classes = (WebhookKeyPermission,)
|
||||
|
||||
def get_queryset(self):
|
||||
qs_models = {
|
||||
'job_templates': JobTemplate,
|
||||
'workflow_job_templates': WorkflowJobTemplate,
|
||||
}
|
||||
self.model = qs_models.get(self.kwargs['model_kwarg'])
|
||||
|
||||
return super().get_queryset()
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
|
||||
return Response({'webhook_key': obj.webhook_key})
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
obj.rotate_webhook_key()
|
||||
obj.save(update_fields=['webhook_key'])
|
||||
|
||||
return Response({'webhook_key': obj.webhook_key}, status=status.HTTP_201_CREATED)
|
||||
|
||||
|
||||
class WebhookReceiverBase(APIView):
|
||||
lookup_url_kwarg = None
|
||||
lookup_field = 'pk'
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
authentication_classes = ()
|
||||
|
||||
ref_keys = {}
|
||||
|
||||
def get_queryset(self):
|
||||
qs_models = {
|
||||
'job_templates': JobTemplate,
|
||||
'workflow_job_templates': WorkflowJobTemplate,
|
||||
}
|
||||
model = qs_models.get(self.kwargs['model_kwarg'])
|
||||
if model is None:
|
||||
raise PermissionDenied
|
||||
|
||||
return model.objects.filter(webhook_service=self.service).exclude(webhook_key='')
|
||||
|
||||
def get_object(self):
|
||||
queryset = self.get_queryset()
|
||||
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
|
||||
filter_kwargs = {self.lookup_field: self.kwargs[lookup_url_kwarg]}
|
||||
|
||||
obj = queryset.filter(**filter_kwargs).first()
|
||||
if obj is None:
|
||||
raise PermissionDenied
|
||||
|
||||
return obj
|
||||
|
||||
def get_event_type(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_event_guid(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_event_status_api(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_event_ref(self):
|
||||
key = self.ref_keys.get(self.get_event_type(), '')
|
||||
value = self.request.data
|
||||
for element in key.split('.'):
|
||||
try:
|
||||
if element.isdigit():
|
||||
value = value[int(element)]
|
||||
else:
|
||||
value = (value or {}).get(element)
|
||||
except Exception:
|
||||
value = None
|
||||
if value == '0000000000000000000000000000000000000000': # a deleted ref
|
||||
value = None
|
||||
return value
|
||||
|
||||
def get_signature(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def check_signature(self, obj):
|
||||
if not obj.webhook_key:
|
||||
raise PermissionDenied
|
||||
|
||||
mac = hmac.new(force_bytes(obj.webhook_key), msg=force_bytes(self.request.body), digestmod=sha1)
|
||||
logger.debug("header signature: %s", self.get_signature())
|
||||
logger.debug("calculated signature: %s", force_bytes(mac.hexdigest()))
|
||||
if not hmac.compare_digest(force_bytes(mac.hexdigest()), self.get_signature()):
|
||||
raise PermissionDenied
|
||||
|
||||
@csrf_exempt
|
||||
def post(self, request, *args, **kwargs):
|
||||
# Ensure that the full contents of the request are captured for multiple uses.
|
||||
request.body
|
||||
|
||||
logger.debug(
|
||||
"headers: {}\n"
|
||||
"data: {}\n".format(request.headers, request.data)
|
||||
)
|
||||
obj = self.get_object()
|
||||
self.check_signature(obj)
|
||||
|
||||
event_type = self.get_event_type()
|
||||
event_guid = self.get_event_guid()
|
||||
event_ref = self.get_event_ref()
|
||||
status_api = self.get_event_status_api()
|
||||
|
||||
kwargs = {
|
||||
'unified_job_template_id': obj.id,
|
||||
'webhook_service': obj.webhook_service,
|
||||
'webhook_guid': event_guid,
|
||||
}
|
||||
if WorkflowJob.objects.filter(**kwargs).exists() or Job.objects.filter(**kwargs).exists():
|
||||
# Short circuit if this webhook has already been received and acted upon.
|
||||
logger.debug("Webhook previously received, returning without action.")
|
||||
return Response({'message': _("Webhook previously received, aborting.")},
|
||||
status=status.HTTP_202_ACCEPTED)
|
||||
|
||||
kwargs = {
|
||||
'_eager_fields': {
|
||||
'launch_type': 'webhook',
|
||||
'webhook_service': obj.webhook_service,
|
||||
'webhook_credential': obj.webhook_credential,
|
||||
'webhook_guid': event_guid,
|
||||
},
|
||||
'extra_vars': json.dumps({
|
||||
'tower_webhook_event_type': event_type,
|
||||
'tower_webhook_event_guid': event_guid,
|
||||
'tower_webhook_event_ref': event_ref,
|
||||
'tower_webhook_status_api': status_api,
|
||||
'tower_webhook_payload': request.data,
|
||||
})
|
||||
}
|
||||
|
||||
new_job = obj.create_unified_job(**kwargs)
|
||||
new_job.signal_start()
|
||||
|
||||
return Response({'message': "Job queued."}, status=status.HTTP_202_ACCEPTED)
|
||||
|
||||
|
||||
class GithubWebhookReceiver(WebhookReceiverBase):
|
||||
service = 'github'
|
||||
|
||||
ref_keys = {
|
||||
'pull_request': 'pull_request.head.sha',
|
||||
'pull_request_review': 'pull_request.head.sha',
|
||||
'pull_request_review_comment': 'pull_request.head.sha',
|
||||
'push': 'after',
|
||||
'release': 'release.tag_name',
|
||||
'commit_comment': 'comment.commit_id',
|
||||
'create': 'ref',
|
||||
'page_build': 'build.commit',
|
||||
}
|
||||
|
||||
def get_event_type(self):
|
||||
return self.request.META.get('HTTP_X_GITHUB_EVENT')
|
||||
|
||||
def get_event_guid(self):
|
||||
return self.request.META.get('HTTP_X_GITHUB_DELIVERY')
|
||||
|
||||
def get_event_status_api(self):
|
||||
if self.get_event_type() != 'pull_request':
|
||||
return
|
||||
return self.request.data.get('pull_request', {}).get('statuses_url')
|
||||
|
||||
def get_signature(self):
|
||||
header_sig = self.request.META.get('HTTP_X_HUB_SIGNATURE')
|
||||
if not header_sig:
|
||||
logger.debug("Expected signature missing from header key HTTP_X_HUB_SIGNATURE")
|
||||
raise PermissionDenied
|
||||
hash_alg, signature = header_sig.split('=')
|
||||
if hash_alg != 'sha1':
|
||||
logger.debug("Unsupported signature type, expected: sha1, received: {}".format(hash_alg))
|
||||
raise PermissionDenied
|
||||
return force_bytes(signature)
|
||||
|
||||
|
||||
class GitlabWebhookReceiver(WebhookReceiverBase):
|
||||
service = 'gitlab'
|
||||
|
||||
ref_keys = {
|
||||
'Push Hook': 'checkout_sha',
|
||||
'Tag Push Hook': 'checkout_sha',
|
||||
'Merge Request Hook': 'object_attributes.last_commit.id',
|
||||
}
|
||||
|
||||
def get_event_type(self):
|
||||
return self.request.META.get('HTTP_X_GITLAB_EVENT')
|
||||
|
||||
def get_event_guid(self):
|
||||
# GitLab does not provide a unique identifier on events, so construct one.
|
||||
h = sha1()
|
||||
h.update(force_bytes(self.request.body))
|
||||
return h.hexdigest()
|
||||
|
||||
def get_event_status_api(self):
|
||||
if self.get_event_type() != 'Merge Request Hook':
|
||||
return
|
||||
project = self.request.data.get('project', {})
|
||||
repo_url = project.get('web_url')
|
||||
if not repo_url:
|
||||
return
|
||||
parsed = urllib.parse.urlparse(repo_url)
|
||||
|
||||
return "{}://{}/api/v4/projects/{}/statuses/{}".format(
|
||||
parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
|
||||
|
||||
def get_signature(self):
|
||||
return force_bytes(self.request.META.get('HTTP_X_GITLAB_TOKEN') or '')
|
||||
|
||||
def check_signature(self, obj):
|
||||
if not obj.webhook_key:
|
||||
raise PermissionDenied
|
||||
|
||||
# GitLab only returns the secret token, not an hmac hash. Use
|
||||
# the hmac `compare_digest` helper function to prevent timing
|
||||
# analysis by attackers.
|
||||
if not hmac.compare_digest(force_bytes(obj.webhook_key), self.get_signature()):
|
||||
raise PermissionDenied
|
||||
@@ -10,8 +10,8 @@ from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.fields import ( # noqa
|
||||
BooleanField, CharField, ChoiceField, DictField, EmailField, IntegerField,
|
||||
ListField, NullBooleanField
|
||||
BooleanField, CharField, ChoiceField, DictField, EmailField,
|
||||
IntegerField, ListField, NullBooleanField
|
||||
)
|
||||
|
||||
logger = logging.getLogger('awx.conf.fields')
|
||||
@@ -121,11 +121,14 @@ class URLField(CharField):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
schemes = kwargs.pop('schemes', None)
|
||||
regex = kwargs.pop('regex', None)
|
||||
self.allow_plain_hostname = kwargs.pop('allow_plain_hostname', False)
|
||||
super(URLField, self).__init__(**kwargs)
|
||||
validator_kwargs = dict(message=_('Enter a valid URL'))
|
||||
if schemes is not None:
|
||||
validator_kwargs['schemes'] = schemes
|
||||
if regex is not None:
|
||||
validator_kwargs['regex'] = regex
|
||||
self.validators.append(URLValidator(**validator_kwargs))
|
||||
|
||||
def to_representation(self, value):
|
||||
|
||||
@@ -317,10 +317,19 @@ class BaseAccess(object):
|
||||
validation_info['time_remaining'] = 99999999
|
||||
validation_info['grace_period_remaining'] = 99999999
|
||||
|
||||
report_violation = lambda message: logger.error(message)
|
||||
|
||||
if (
|
||||
validation_info.get('trial', False) is True or
|
||||
validation_info['instance_count'] == 10 # basic 10 license
|
||||
):
|
||||
def report_violation(message):
|
||||
raise PermissionDenied(message)
|
||||
|
||||
if check_expiration and validation_info.get('time_remaining', None) is None:
|
||||
raise PermissionDenied(_("License is missing."))
|
||||
if check_expiration and validation_info.get("grace_period_remaining") <= 0:
|
||||
raise PermissionDenied(_("License has expired."))
|
||||
elif check_expiration and validation_info.get("grace_period_remaining") <= 0:
|
||||
report_violation(_("License has expired."))
|
||||
|
||||
free_instances = validation_info.get('free_instances', 0)
|
||||
available_instances = validation_info.get('available_instances', 0)
|
||||
@@ -328,11 +337,11 @@ class BaseAccess(object):
|
||||
if add_host_name:
|
||||
host_exists = Host.objects.filter(name=add_host_name).exists()
|
||||
if not host_exists and free_instances == 0:
|
||||
raise PermissionDenied(_("License count of %s instances has been reached.") % available_instances)
|
||||
report_violation(_("License count of %s instances has been reached.") % available_instances)
|
||||
elif not host_exists and free_instances < 0:
|
||||
raise PermissionDenied(_("License count of %s instances has been exceeded.") % available_instances)
|
||||
report_violation(_("License count of %s instances has been exceeded.") % available_instances)
|
||||
elif not add_host_name and free_instances < 0:
|
||||
raise PermissionDenied(_("Host count exceeds available instances."))
|
||||
report_violation(_("Host count exceeds available instances."))
|
||||
|
||||
def check_org_host_limit(self, data, add_host_name=None):
|
||||
validation_info = get_licenser().validate()
|
||||
@@ -652,7 +661,7 @@ class UserAccess(BaseAccess):
|
||||
if obj.is_superuser and super_users.count() == 1:
|
||||
# cannot delete the last active superuser
|
||||
return False
|
||||
if self.user.is_superuser:
|
||||
if self.can_admin(obj, None, allow_orphans=True):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
@@ -5,10 +5,9 @@ import os
|
||||
import os.path
|
||||
import tempfile
|
||||
import shutil
|
||||
import subprocess
|
||||
import requests
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.timezone import now, timedelta
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
@@ -81,17 +80,16 @@ def gather(dest=None, module=None, collection_type='scheduled'):
|
||||
last_run = state.last_run
|
||||
logger.debug("Last analytics run was: {}".format(last_run))
|
||||
|
||||
max_interval = now() - timedelta(days=7)
|
||||
max_interval = now() - timedelta(weeks=4)
|
||||
if last_run < max_interval or not last_run:
|
||||
last_run = max_interval
|
||||
|
||||
|
||||
if _valid_license() is False:
|
||||
logger.exception("Invalid License provided, or No License Provided")
|
||||
return "Error: Invalid License provided, or No License Provided"
|
||||
|
||||
if not settings.INSIGHTS_TRACKING_STATE:
|
||||
logger.error("Insights analytics not enabled")
|
||||
|
||||
if collection_type != 'dry-run' and not settings.INSIGHTS_TRACKING_STATE:
|
||||
logger.error("Automation Analytics not enabled. Use --dry-run to gather locally without sending.")
|
||||
return
|
||||
|
||||
if module is None:
|
||||
@@ -146,30 +144,39 @@ def gather(dest=None, module=None, collection_type='scheduled'):
|
||||
|
||||
def ship(path):
|
||||
"""
|
||||
Ship gathered metrics via the Insights agent
|
||||
Ship gathered metrics to the Insights API
|
||||
"""
|
||||
if not path:
|
||||
logger.error('Automation Analytics TAR not found')
|
||||
return
|
||||
if "Error:" in str(path):
|
||||
return
|
||||
try:
|
||||
agent = 'insights-client'
|
||||
if shutil.which(agent) is None:
|
||||
logger.error('could not find {} on PATH'.format(agent))
|
||||
return
|
||||
logger.debug('shipping analytics file: {}'.format(path))
|
||||
try:
|
||||
cmd = [
|
||||
agent, '--payload', path, '--content-type', settings.INSIGHTS_AGENT_MIME
|
||||
]
|
||||
output = smart_str(subprocess.check_output(cmd, timeout=60 * 5))
|
||||
logger.debug(output)
|
||||
# reset the `last_run` when data is shipped
|
||||
run_now = now()
|
||||
state = TowerAnalyticsState.get_solo()
|
||||
state.last_run = run_now
|
||||
state.save()
|
||||
|
||||
except subprocess.CalledProcessError:
|
||||
logger.exception('{} failure:'.format(cmd))
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.exception('{} timeout:'.format(cmd))
|
||||
url = getattr(settings, 'AUTOMATION_ANALYTICS_URL', None)
|
||||
if not url:
|
||||
logger.error('AUTOMATION_ANALYTICS_URL is not set')
|
||||
return
|
||||
rh_user = getattr(settings, 'REDHAT_USERNAME', None)
|
||||
rh_password = getattr(settings, 'REDHAT_PASSWORD', None)
|
||||
if not rh_user:
|
||||
return logger.error('REDHAT_USERNAME is not set')
|
||||
if not rh_password:
|
||||
return logger.error('REDHAT_PASSWORD is not set')
|
||||
with open(path, 'rb') as f:
|
||||
files = {'file': (os.path.basename(path), f, settings.INSIGHTS_AGENT_MIME)}
|
||||
response = requests.post(url,
|
||||
files=files,
|
||||
verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
|
||||
auth=(rh_user, rh_password),
|
||||
timeout=(31, 31))
|
||||
if response.status_code != 202:
|
||||
return logger.exception('Upload failed with status {}, {}'.format(response.status_code,
|
||||
response.text))
|
||||
run_now = now()
|
||||
state = TowerAnalyticsState.get_solo()
|
||||
state.last_run = run_now
|
||||
state.save()
|
||||
finally:
|
||||
# cleanup tar.gz
|
||||
os.remove(path)
|
||||
|
||||
224
awx/main/conf.py
224
awx/main/conf.py
@@ -2,12 +2,14 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from distutils.version import LooseVersion as Version
|
||||
|
||||
# Django
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework import serializers
|
||||
from rest_framework.fields import FloatField
|
||||
|
||||
# Tower
|
||||
from awx.conf import fields, register, register_validate
|
||||
@@ -153,7 +155,7 @@ register(
|
||||
register(
|
||||
'AUTOMATION_ANALYTICS_URL',
|
||||
field_class=fields.URLField,
|
||||
default='https://cloud.redhat.com',
|
||||
default='https://example.com',
|
||||
schemes=('http', 'https'),
|
||||
allow_plain_hostname=True, # Allow hostname only without TLD.
|
||||
label=_('Automation Analytics upload URL.'),
|
||||
@@ -298,6 +300,16 @@ register(
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_ISOLATED_HOST_KEY_CHECKING',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Isolated host key checking'),
|
||||
help_text=_('When set to True, AWX will enforce strict host key checking for communication with isolated nodes.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
default=False
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_ISOLATED_KEY_GENERATION',
|
||||
field_class=fields.BooleanField,
|
||||
@@ -335,6 +347,53 @@ register(
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_RESOURCE_PROFILING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Enable detailed resource profiling on all playbook runs'),
|
||||
help_text=_('If set, detailed resource profiling data will be collected on all jobs. '
|
||||
'This data can be gathered with `sosreport`.'), # noqa
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_RESOURCE_PROFILING_CPU_POLL_INTERVAL',
|
||||
field_class=FloatField,
|
||||
default='0.25',
|
||||
label=_('Interval (in seconds) between polls for cpu usage.'),
|
||||
help_text=_('Interval (in seconds) between polls for cpu usage. '
|
||||
'Setting this lower than the default will affect playbook performance.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
required=False,
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_RESOURCE_PROFILING_MEMORY_POLL_INTERVAL',
|
||||
field_class=FloatField,
|
||||
default='0.25',
|
||||
label=_('Interval (in seconds) between polls for memory usage.'),
|
||||
help_text=_('Interval (in seconds) between polls for memory usage. '
|
||||
'Setting this lower than the default will affect playbook performance.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
required=False,
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_RESOURCE_PROFILING_PID_POLL_INTERVAL',
|
||||
field_class=FloatField,
|
||||
default='0.25',
|
||||
label=_('Interval (in seconds) between polls for PID count.'),
|
||||
help_text=_('Interval (in seconds) between polls for PID count. '
|
||||
'Setting this lower than the default will affect playbook performance.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
required=False,
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_TASK_ENV',
|
||||
field_class=fields.KeyValueField,
|
||||
@@ -350,12 +409,21 @@ register(
|
||||
'INSIGHTS_TRACKING_STATE',
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Gather data for Automation Insights'),
|
||||
help_text=_('Enables Tower to gather data on automation and send it to Red Hat Insights.'),
|
||||
label=_('Gather data for Automation Analytics'),
|
||||
help_text=_('Enables Tower to gather data on automation and send it to Red Hat.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'PROJECT_UPDATE_VVV',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Run Project Updates With Higher Verbosity'),
|
||||
help_text=_('Adds the CLI -vvv flag to ansible-playbook runs of project_update.yml used for project updates.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_ROLES_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
@@ -376,6 +444,75 @@ register(
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'PRIMARY_GALAXY_URL',
|
||||
field_class=fields.URLField,
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
label=_('Primary Galaxy Server URL'),
|
||||
help_text=_(
|
||||
'For organizations that run their own Galaxy service, this gives the option to specify a '
|
||||
'host as the primary galaxy server. Requirements will be downloaded from the primary if the '
|
||||
'specific role or collection is available there. If the content is not avilable in the primary, '
|
||||
'or if this field is left blank, it will default to galaxy.ansible.com.'
|
||||
),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs'
|
||||
)
|
||||
|
||||
register(
|
||||
'PRIMARY_GALAXY_USERNAME',
|
||||
field_class=fields.CharField,
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
label=_('Primary Galaxy Server Username'),
|
||||
help_text=_('For using a galaxy server at higher precedence than the public Ansible Galaxy. '
|
||||
'The username to use for basic authentication against the Galaxy instance, '
|
||||
'this is mutually exclusive with PRIMARY_GALAXY_TOKEN.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs'
|
||||
)
|
||||
|
||||
register(
|
||||
'PRIMARY_GALAXY_PASSWORD',
|
||||
field_class=fields.CharField,
|
||||
encrypted=True,
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
label=_('Primary Galaxy Server Password'),
|
||||
help_text=_('For using a galaxy server at higher precedence than the public Ansible Galaxy. '
|
||||
'The password to use for basic authentication against the Galaxy instance, '
|
||||
'this is mutually exclusive with PRIMARY_GALAXY_TOKEN.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs'
|
||||
)
|
||||
|
||||
register(
|
||||
'PRIMARY_GALAXY_TOKEN',
|
||||
field_class=fields.CharField,
|
||||
encrypted=True,
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
label=_('Primary Galaxy Server Token'),
|
||||
help_text=_('For using a galaxy server at higher precedence than the public Ansible Galaxy. '
|
||||
'The token to use for connecting with the Galaxy instance, '
|
||||
'this is mutually exclusive with corresponding username and password settings.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs'
|
||||
)
|
||||
|
||||
register(
|
||||
'PRIMARY_GALAXY_AUTH_URL',
|
||||
field_class=fields.CharField,
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
label=_('Primary Galaxy Authentication URL'),
|
||||
help_text=_('For using a galaxy server at higher precedence than the public Ansible Galaxy. '
|
||||
'The token_endpoint of a Keycloak server.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs'
|
||||
)
|
||||
|
||||
register(
|
||||
'STDOUT_MAX_BYTES_DISPLAY',
|
||||
field_class=fields.IntegerField,
|
||||
@@ -616,6 +753,16 @@ register(
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
register(
|
||||
'LOG_AGGREGATOR_AUDIT',
|
||||
field_class=fields.BooleanField,
|
||||
allow_null=True,
|
||||
default=False,
|
||||
label=_('Enabled external log aggregation auditing'),
|
||||
help_text=_('When enabled, all external logs emitted by Tower will also be written to /var/log/tower/external.log. This is an experimental setting intended to be used for debugging external log aggregation issues (and may be subject to change in the future).'), # noqa
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
|
||||
|
||||
register(
|
||||
@@ -646,4 +793,75 @@ def logging_validate(serializer, attrs):
|
||||
return attrs
|
||||
|
||||
|
||||
def galaxy_validate(serializer, attrs):
|
||||
"""Ansible Galaxy config options have mutual exclusivity rules, these rules
|
||||
are enforced here on serializer validation so that users will not be able
|
||||
to save settings which obviously break all project updates.
|
||||
"""
|
||||
prefix = 'PRIMARY_GALAXY_'
|
||||
|
||||
from awx.main.constants import GALAXY_SERVER_FIELDS
|
||||
if not any('{}{}'.format(prefix, subfield.upper()) in attrs for subfield in GALAXY_SERVER_FIELDS):
|
||||
return attrs
|
||||
|
||||
def _new_value(setting_name):
|
||||
if setting_name in attrs:
|
||||
return attrs[setting_name]
|
||||
elif not serializer.instance:
|
||||
return ''
|
||||
return getattr(serializer.instance, setting_name, '')
|
||||
|
||||
galaxy_data = {}
|
||||
for subfield in GALAXY_SERVER_FIELDS:
|
||||
galaxy_data[subfield] = _new_value('{}{}'.format(prefix, subfield.upper()))
|
||||
errors = {}
|
||||
if not galaxy_data['url']:
|
||||
for k, v in galaxy_data.items():
|
||||
if v:
|
||||
setting_name = '{}{}'.format(prefix, k.upper())
|
||||
errors.setdefault(setting_name, [])
|
||||
errors[setting_name].append(_(
|
||||
'Cannot provide field if PRIMARY_GALAXY_URL is not set.'
|
||||
))
|
||||
for k in GALAXY_SERVER_FIELDS:
|
||||
if galaxy_data[k]:
|
||||
setting_name = '{}{}'.format(prefix, k.upper())
|
||||
if (not serializer.instance) or (not getattr(serializer.instance, setting_name, '')):
|
||||
# new auth is applied, so check if compatible with version
|
||||
from awx.main.utils import get_ansible_version
|
||||
current_version = get_ansible_version()
|
||||
min_version = '2.9'
|
||||
if Version(current_version) < Version(min_version):
|
||||
errors.setdefault(setting_name, [])
|
||||
errors[setting_name].append(_(
|
||||
'Galaxy server settings are not available until Ansible {min_version}, '
|
||||
'you are running {current_version}.'
|
||||
).format(min_version=min_version, current_version=current_version))
|
||||
if (galaxy_data['password'] or galaxy_data['username']) and (galaxy_data['token'] or galaxy_data['auth_url']):
|
||||
for k in ('password', 'username', 'token', 'auth_url'):
|
||||
setting_name = '{}{}'.format(prefix, k.upper())
|
||||
if setting_name in attrs:
|
||||
errors.setdefault(setting_name, [])
|
||||
errors[setting_name].append(_(
|
||||
'Setting Galaxy token and authentication URL is mutually exclusive with username and password.'
|
||||
))
|
||||
if bool(galaxy_data['username']) != bool(galaxy_data['password']):
|
||||
msg = _('If authenticating via username and password, both must be provided.')
|
||||
for k in ('username', 'password'):
|
||||
setting_name = '{}{}'.format(prefix, k.upper())
|
||||
errors.setdefault(setting_name, [])
|
||||
errors[setting_name].append(msg)
|
||||
if bool(galaxy_data['token']) != bool(galaxy_data['auth_url']):
|
||||
msg = _('If authenticating via token, both token and authentication URL must be provided.')
|
||||
for k in ('token', 'auth_url'):
|
||||
setting_name = '{}{}'.format(prefix, k.upper())
|
||||
errors.setdefault(setting_name, [])
|
||||
errors[setting_name].append(msg)
|
||||
|
||||
if errors:
|
||||
raise serializers.ValidationError(errors)
|
||||
return attrs
|
||||
|
||||
|
||||
register_validate('logging', logging_validate)
|
||||
register_validate('jobs', galaxy_validate)
|
||||
|
||||
@@ -51,3 +51,7 @@ LOGGER_BLACKLIST = (
|
||||
# loggers that may be called getting logging settings
|
||||
'awx.conf'
|
||||
)
|
||||
|
||||
# these correspond to both AWX and Ansible settings to keep naming consistent
|
||||
# for instance, settings.PRIMARY_GALAXY_AUTH_URL vs env var ANSIBLE_GALAXY_SERVER_FOO_AUTH_URL
|
||||
GALAXY_SERVER_FIELDS = ('url', 'username', 'password', 'token', 'auth_url')
|
||||
|
||||
@@ -101,7 +101,7 @@ def aim_backend(**kwargs):
|
||||
|
||||
|
||||
aim_plugin = CredentialPlugin(
|
||||
'CyberArk AIM Secret Lookup',
|
||||
'CyberArk AIM Central Credential Provider Lookup',
|
||||
inputs=aim_inputs,
|
||||
backend=aim_backend
|
||||
)
|
||||
|
||||
@@ -103,6 +103,8 @@ def kv_backend(**kwargs):
|
||||
|
||||
sess = requests.Session()
|
||||
sess.headers['Authorization'] = 'Bearer {}'.format(token)
|
||||
# Compatability header for older installs of Hashicorp Vault
|
||||
sess.headers['X-Vault-Token'] = token
|
||||
|
||||
if api_version == 'v2':
|
||||
if kwargs.get('secret_version'):
|
||||
@@ -158,6 +160,8 @@ def ssh_backend(**kwargs):
|
||||
|
||||
sess = requests.Session()
|
||||
sess.headers['Authorization'] = 'Bearer {}'.format(token)
|
||||
# Compatability header for older installs of Hashicorp Vault
|
||||
sess.headers['X-Vault-Token'] = token
|
||||
# https://www.vaultproject.io/api/secret/ssh/index.html#sign-ssh-key
|
||||
request_url = '/'.join([url, secret_path, 'sign', role]).rstrip('/')
|
||||
resp = sess.post(request_url, **request_kwargs)
|
||||
|
||||
@@ -33,7 +33,11 @@ def reap(instance=None, status='failed', excluded_uuids=[]):
|
||||
'''
|
||||
Reap all jobs in waiting|running for this instance.
|
||||
'''
|
||||
me = instance or Instance.objects.me()
|
||||
me = instance
|
||||
if me is None:
|
||||
(changed, me) = Instance.objects.get_or_register()
|
||||
if changed:
|
||||
logger.info("Registered tower node '{}'".format(me.hostname))
|
||||
now = tz_now()
|
||||
workflow_ctype_id = ContentType.objects.get_for_model(WorkflowJob).id
|
||||
jobs = UnifiedJob.objects.filter(
|
||||
|
||||
@@ -11,7 +11,9 @@ from django.conf import settings
|
||||
import ansible_runner
|
||||
|
||||
import awx
|
||||
from awx.main.utils import get_system_task_capacity
|
||||
from awx.main.utils import (
|
||||
get_system_task_capacity
|
||||
)
|
||||
from awx.main.queue import CallbackQueueDispatcher
|
||||
|
||||
logger = logging.getLogger('awx.isolated.manager')
|
||||
@@ -29,7 +31,7 @@ def set_pythonpath(venv_libdir, env):
|
||||
|
||||
class IsolatedManager(object):
|
||||
|
||||
def __init__(self, cancelled_callback=None, check_callback=None):
|
||||
def __init__(self, cancelled_callback=None, check_callback=None, pod_manager=None):
|
||||
"""
|
||||
:param cancelled_callback: a callable - which returns `True` or `False`
|
||||
- signifying if the job has been prematurely
|
||||
@@ -40,11 +42,29 @@ class IsolatedManager(object):
|
||||
self.idle_timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
self.started_at = None
|
||||
self.captured_command_artifact = False
|
||||
self.instance = None
|
||||
self.pod_manager = pod_manager
|
||||
|
||||
def build_inventory(self, hosts):
|
||||
if self.instance and self.instance.is_containerized:
|
||||
inventory = {'all': {'hosts': {}}}
|
||||
for host in hosts:
|
||||
inventory['all']['hosts'][host] = {
|
||||
"ansible_connection": "kubectl",
|
||||
"ansible_kubectl_config": self.pod_manager.kube_config
|
||||
}
|
||||
else:
|
||||
inventory = '\n'.join([
|
||||
'{} ansible_ssh_user={}'.format(host, settings.AWX_ISOLATED_USERNAME)
|
||||
for host in hosts
|
||||
])
|
||||
|
||||
return inventory
|
||||
|
||||
def build_runner_params(self, hosts, verbosity=1):
|
||||
env = dict(os.environ.items())
|
||||
env['ANSIBLE_RETRY_FILES_ENABLED'] = 'False'
|
||||
env['ANSIBLE_HOST_KEY_CHECKING'] = 'False'
|
||||
env['ANSIBLE_HOST_KEY_CHECKING'] = str(settings.AWX_ISOLATED_HOST_KEY_CHECKING)
|
||||
env['ANSIBLE_LIBRARY'] = os.path.join(os.path.dirname(awx.__file__), 'plugins', 'isolated')
|
||||
set_pythonpath(os.path.join(settings.ANSIBLE_VENV_PATH, 'lib'), env)
|
||||
|
||||
@@ -69,17 +89,12 @@ class IsolatedManager(object):
|
||||
else:
|
||||
playbook_logger.info(runner_obj.stdout.read())
|
||||
|
||||
inventory = '\n'.join([
|
||||
'{} ansible_ssh_user={}'.format(host, settings.AWX_ISOLATED_USERNAME)
|
||||
for host in hosts
|
||||
])
|
||||
|
||||
return {
|
||||
'project_dir': os.path.abspath(os.path.join(
|
||||
os.path.dirname(awx.__file__),
|
||||
'playbooks'
|
||||
)),
|
||||
'inventory': inventory,
|
||||
'inventory': self.build_inventory(hosts),
|
||||
'envvars': env,
|
||||
'finished_callback': finished_callback,
|
||||
'verbosity': verbosity,
|
||||
@@ -153,6 +168,12 @@ class IsolatedManager(object):
|
||||
runner_obj = self.run_management_playbook('run_isolated.yml',
|
||||
self.private_data_dir,
|
||||
extravars=extravars)
|
||||
|
||||
if runner_obj.status == 'failed':
|
||||
self.instance.result_traceback = runner_obj.stdout.read()
|
||||
self.instance.save(update_fields=['result_traceback'])
|
||||
return 'error', runner_obj.rc
|
||||
|
||||
return runner_obj.status, runner_obj.rc
|
||||
|
||||
def check(self, interval=None):
|
||||
@@ -175,6 +196,7 @@ class IsolatedManager(object):
|
||||
rc = None
|
||||
last_check = time.time()
|
||||
dispatcher = CallbackQueueDispatcher()
|
||||
|
||||
while status == 'failed':
|
||||
canceled = self.cancelled_callback() if self.cancelled_callback else False
|
||||
if not canceled and time.time() - last_check < interval:
|
||||
@@ -279,7 +301,6 @@ class IsolatedManager(object):
|
||||
|
||||
|
||||
def cleanup(self):
|
||||
# If the job failed for any reason, make a last-ditch effort at cleanup
|
||||
extravars = {
|
||||
'private_data_dir': self.private_data_dir,
|
||||
'cleanup_dirs': [
|
||||
@@ -393,6 +414,7 @@ class IsolatedManager(object):
|
||||
[instance.execution_node],
|
||||
verbosity=min(5, self.instance.verbosity)
|
||||
)
|
||||
|
||||
status, rc = self.dispatch(playbook, module, module_args)
|
||||
if status == 'successful':
|
||||
status, rc = self.check()
|
||||
|
||||
17
awx/main/management/commands/check_db.py
Normal file
17
awx/main/management/commands/check_db.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Checks connection to the database, and prints out connection info if not connected"""
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT version()")
|
||||
version = str(cursor.fetchone()[0])
|
||||
|
||||
return "Database Version: {}".format(version)
|
||||
@@ -11,8 +11,10 @@ class Command(BaseCommand):
|
||||
help = 'Gather AWX analytics data'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--dry-run', dest='dry-run', action='store_true',
|
||||
help='Gather analytics without shipping. Works even if analytics are disabled in settings.')
|
||||
parser.add_argument('--ship', dest='ship', action='store_true',
|
||||
help='Enable to ship metrics via insights-client')
|
||||
help='Enable to ship metrics to the Red Hat Cloud')
|
||||
|
||||
def init_logging(self):
|
||||
self.logger = logging.getLogger('awx.main.analytics')
|
||||
@@ -23,9 +25,14 @@ class Command(BaseCommand):
|
||||
self.logger.propagate = False
|
||||
|
||||
def handle(self, *args, **options):
|
||||
tgz = gather(collection_type='manual')
|
||||
self.init_logging()
|
||||
opt_ship = options.get('ship')
|
||||
opt_dry_run = options.get('dry-run')
|
||||
if opt_ship and opt_dry_run:
|
||||
self.logger.error('Both --ship and --dry-run cannot be processed at the same time.')
|
||||
return
|
||||
tgz = gather(collection_type='manual' if not opt_dry_run else 'dry-run')
|
||||
if tgz:
|
||||
self.logger.debug(tgz)
|
||||
if options.get('ship'):
|
||||
if opt_ship:
|
||||
ship(tgz)
|
||||
|
||||
@@ -919,7 +919,8 @@ class Command(BaseCommand):
|
||||
new_count = Host.objects.active_count()
|
||||
if time_remaining <= 0 and not license_info.get('demo', False):
|
||||
logger.error(LICENSE_EXPIRED_MESSAGE)
|
||||
raise CommandError("License has expired!")
|
||||
if license_info.get('trial', False) is True:
|
||||
raise CommandError("License has expired!")
|
||||
# special check for tower-type inventory sources
|
||||
# but only if running the plugin
|
||||
TOWER_SOURCE_FILES = ['tower.yml', 'tower.yaml']
|
||||
@@ -936,7 +937,11 @@ class Command(BaseCommand):
|
||||
logger.error(DEMO_LICENSE_MESSAGE % d)
|
||||
else:
|
||||
logger.error(LICENSE_MESSAGE % d)
|
||||
raise CommandError('License count exceeded!')
|
||||
if (
|
||||
license_info.get('trial', False) is True or
|
||||
license_info['instance_count'] == 10 # basic 10 license
|
||||
):
|
||||
raise CommandError('License count exceeded!')
|
||||
|
||||
def check_org_host_limit(self):
|
||||
license_info = get_licenser().validate()
|
||||
|
||||
@@ -33,6 +33,7 @@ class Command(BaseCommand):
|
||||
]):
|
||||
ssh_key = settings.AWX_ISOLATED_PRIVATE_KEY
|
||||
env = dict(os.environ.items())
|
||||
env['ANSIBLE_HOST_KEY_CHECKING'] = str(settings.AWX_ISOLATED_HOST_KEY_CHECKING)
|
||||
set_pythonpath(os.path.join(settings.ANSIBLE_VENV_PATH, 'lib'), env)
|
||||
res = ansible_runner.interface.run(
|
||||
private_data_dir=path,
|
||||
|
||||
@@ -221,8 +221,9 @@ class InstanceGroupManager(models.Manager):
|
||||
elif t.status == 'running':
|
||||
# Subtract capacity from all groups that contain the instance
|
||||
if t.execution_node not in instance_ig_mapping:
|
||||
logger.warning('Detected %s running inside lost instance, '
|
||||
'may still be waiting for reaper.', t.log_format)
|
||||
if not t.is_containerized:
|
||||
logger.warning('Detected %s running inside lost instance, '
|
||||
'may still be waiting for reaper.', t.log_format)
|
||||
if t.instance_group:
|
||||
impacted_groups = [t.instance_group.name]
|
||||
else:
|
||||
|
||||
28
awx/main/migrations/0088_v360_dashboard_optimizations.py
Normal file
28
awx/main/migrations/0088_v360_dashboard_optimizations.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 2.2.4 on 2019-09-10 21:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0087_v360_update_credential_injector_help_text'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='finished',
|
||||
field=models.DateTimeField(db_index=True, default=None, editable=False, help_text='The date and time the job finished execution.', null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='launch_type',
|
||||
field=models.CharField(choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('sync', 'Sync'), ('scm', 'SCM Update')], db_index=True, default='manual', editable=False, max_length=20),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='created',
|
||||
field=models.DateTimeField(db_index=True, default=None, editable=False),
|
||||
),
|
||||
]
|
||||
23
awx/main/migrations/0089_v360_new_job_event_types.py
Normal file
23
awx/main/migrations/0089_v360_new_job_event_types.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 2.2.4 on 2019-09-12 13:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0088_v360_dashboard_optimizations'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='jobevent',
|
||||
name='event',
|
||||
field=models.CharField(choices=[('runner_on_failed', 'Host Failed'), ('runner_on_start', 'Host Started'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')], max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdateevent',
|
||||
name='event',
|
||||
field=models.CharField(choices=[('runner_on_failed', 'Host Failed'), ('runner_on_start', 'Host Started'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')], max_length=100),
|
||||
),
|
||||
]
|
||||
59
awx/main/migrations/0090_v360_WFJT_prompts.py
Normal file
59
awx/main/migrations/0090_v360_WFJT_prompts.py
Normal file
@@ -0,0 +1,59 @@
|
||||
# Generated by Django 2.2.2 on 2019-07-23 17:56
|
||||
|
||||
import awx.main.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0089_v360_new_job_event_types'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='ask_limit_on_launch',
|
||||
field=awx.main.fields.AskForField(blank=True, default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='ask_scm_branch_on_launch',
|
||||
field=awx.main.fields.AskForField(blank=True, default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='char_prompts',
|
||||
field=awx.main.fields.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='joblaunchconfig',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='joblaunchconfigs', to='main.Inventory'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='schedule',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='schedules', to='main.Inventory'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjob',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='workflowjobs', to='main.Inventory'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobnode',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='workflowjobnodes', to='main.Inventory'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='workflowjobtemplates', to='main.Inventory'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplatenode',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='workflowjobtemplatenodes', to='main.Inventory'),
|
||||
),
|
||||
]
|
||||
28
awx/main/migrations/0091_v360_approval_node_notifications.py
Normal file
28
awx/main/migrations/0091_v360_approval_node_notifications.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 2.2.4 on 2019-09-11 13:44
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0090_v360_WFJT_prompts'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notification_templates_approvals',
|
||||
field=models.ManyToManyField(blank=True, related_name='organization_notification_templates_for_approvals', to='main.NotificationTemplate'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='notification_templates_approvals',
|
||||
field=models.ManyToManyField(blank=True, related_name='workflowjobtemplate_notification_templates_for_approvals', to='main.NotificationTemplate'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobnode',
|
||||
name='do_not_run',
|
||||
field=models.BooleanField(default=False, help_text='Indicates that a job will not be created when True. Workflow runtime semantics will mark this True if the node is in a path that will decidedly not be ran. A value of False means the node may not run.'),
|
||||
),
|
||||
]
|
||||
49
awx/main/migrations/0092_v360_webhook_mixin.py
Normal file
49
awx/main/migrations/0092_v360_webhook_mixin.py
Normal file
@@ -0,0 +1,49 @@
|
||||
# Generated by Django 2.2.4 on 2019-09-12 14:49
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0091_v360_approval_node_notifications'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='webhook_credential',
|
||||
field=models.ForeignKey(blank=True, help_text='Personal Access Token for posting back the status to the service API', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='jobtemplates', to='main.Credential'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='webhook_key',
|
||||
field=models.CharField(blank=True, help_text='Shared secret that the webhook service will use to sign requests', max_length=64),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='webhook_service',
|
||||
field=models.CharField(blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='webhook_credential',
|
||||
field=models.ForeignKey(blank=True, help_text='Personal Access Token for posting back the status to the service API', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workflowjobtemplates', to='main.Credential'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='webhook_key',
|
||||
field=models.CharField(blank=True, help_text='Shared secret that the webhook service will use to sign requests', max_length=64),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='webhook_service',
|
||||
field=models.CharField(blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='launch_type',
|
||||
field=models.CharField(choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('webhook', 'Webhook'), ('sync', 'Sync'), ('scm', 'SCM Update')], db_index=True, default='manual', editable=False, max_length=20),
|
||||
),
|
||||
]
|
||||
27
awx/main/migrations/0093_v360_personal_access_tokens.py
Normal file
27
awx/main/migrations/0093_v360_personal_access_tokens.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 2.2.4 on 2019-09-12 14:50
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
from awx.main.models import CredentialType
|
||||
from awx.main.utils.common import set_current_apps
|
||||
|
||||
|
||||
def setup_tower_managed_defaults(apps, schema_editor):
|
||||
set_current_apps(apps)
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0092_v360_webhook_mixin'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='credentialtype',
|
||||
name='kind',
|
||||
field=models.CharField(choices=[('ssh', 'Machine'), ('vault', 'Vault'), ('net', 'Network'), ('scm', 'Source Control'), ('cloud', 'Cloud'), ('token', 'Personal Access Token'), ('insights', 'Insights'), ('external', 'External')], max_length=32),
|
||||
),
|
||||
migrations.RunPython(setup_tower_managed_defaults),
|
||||
]
|
||||
44
awx/main/migrations/0094_v360_webhook_mixin2.py
Normal file
44
awx/main/migrations/0094_v360_webhook_mixin2.py
Normal file
@@ -0,0 +1,44 @@
|
||||
# Generated by Django 2.2.4 on 2019-09-12 14:52
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0093_v360_personal_access_tokens'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='webhook_credential',
|
||||
field=models.ForeignKey(blank=True, help_text='Personal Access Token for posting back the status to the service API', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='jobs', to='main.Credential'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='webhook_guid',
|
||||
field=models.CharField(blank=True, help_text='Unique identifier of the event that triggered this webhook', max_length=128),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='webhook_service',
|
||||
field=models.CharField(blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjob',
|
||||
name='webhook_credential',
|
||||
field=models.ForeignKey(blank=True, help_text='Personal Access Token for posting back the status to the service API', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workflowjobs', to='main.Credential'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjob',
|
||||
name='webhook_guid',
|
||||
field=models.CharField(blank=True, help_text='Unique identifier of the event that triggered this webhook', max_length=128),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjob',
|
||||
name='webhook_service',
|
||||
field=models.CharField(blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 2.2.4 on 2019-10-04 00:50
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0094_v360_webhook_mixin2'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='instance',
|
||||
name='version',
|
||||
field=models.CharField(blank=True, max_length=120),
|
||||
),
|
||||
]
|
||||
38
awx/main/migrations/0096_v360_container_groups.py
Normal file
38
awx/main/migrations/0096_v360_container_groups.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Generated by Django 2.2.4 on 2019-09-16 23:50
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
from awx.main.models import CredentialType
|
||||
from awx.main.utils.common import set_current_apps
|
||||
|
||||
|
||||
def create_new_credential_types(apps, schema_editor):
|
||||
set_current_apps(apps)
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0095_v360_increase_instance_version_length'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='instancegroup',
|
||||
name='credential',
|
||||
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='instancegroups', to='main.Credential'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='instancegroup',
|
||||
name='pod_spec_override',
|
||||
field=models.TextField(blank=True, default=''),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credentialtype',
|
||||
name='kind',
|
||||
field=models.CharField(choices=[('ssh', 'Machine'), ('vault', 'Vault'), ('net', 'Network'), ('scm', 'Source Control'), ('cloud', 'Cloud'), ('token', 'Personal Access Token'), ('insights', 'Insights'), ('external', 'External'), ('kubernetes', 'Kubernetes')], max_length=32),
|
||||
),
|
||||
migrations.RunPython(create_new_credential_types)
|
||||
]
|
||||
@@ -0,0 +1,21 @@
|
||||
# Generated by Django 2.2.4 on 2019-10-11 15:40
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('main', '0096_v360_container_groups'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='workflowapproval',
|
||||
name='approved_or_denied_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'workflowapproval', 'model_name': 'workflowapproval', 'app_label': 'main'}(class)s_approved+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,31 @@
|
||||
# Generated by Django 2.2.4 on 2019-10-16 19:51
|
||||
|
||||
from django.db import migrations
|
||||
from awx.main.models import CredentialType
|
||||
|
||||
|
||||
def update_cyberark_aim_name(apps, schema_editor):
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
aim_types = apps.get_model('main', 'CredentialType').objects.filter(
|
||||
namespace='aim'
|
||||
).order_by('id')
|
||||
|
||||
if aim_types.count() == 2:
|
||||
original, renamed = aim_types.all()
|
||||
apps.get_model('main', 'Credential').objects.filter(
|
||||
credential_type_id=original.id
|
||||
).update(
|
||||
credential_type_id=renamed.id
|
||||
)
|
||||
original.delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0097_v360_workflowapproval_approved_or_denied_by'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(update_cyberark_aim_name)
|
||||
]
|
||||
@@ -150,6 +150,14 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
||||
def supports_isolation(cls):
|
||||
return True
|
||||
|
||||
@property
|
||||
def is_containerized(self):
|
||||
return bool(self.instance_group and self.instance_group.is_containerized)
|
||||
|
||||
@property
|
||||
def can_run_containerized(self):
|
||||
return True
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:ad_hoc_command_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
|
||||
@@ -64,7 +64,7 @@ def build_safe_env(env):
|
||||
for k, v in safe_env.items():
|
||||
if k == 'AWS_ACCESS_KEY_ID':
|
||||
continue
|
||||
elif k.startswith('ANSIBLE_') and not k.startswith('ANSIBLE_NET'):
|
||||
elif k.startswith('ANSIBLE_') and not k.startswith('ANSIBLE_NET') and not k.startswith('ANSIBLE_GALAXY_SERVER'):
|
||||
continue
|
||||
elif hidden_re.search(k):
|
||||
safe_env[k] = HIDDEN_PASSWORD
|
||||
@@ -135,6 +135,10 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
def cloud(self):
|
||||
return self.credential_type.kind == 'cloud'
|
||||
|
||||
@property
|
||||
def kubernetes(self):
|
||||
return self.credential_type.kind == 'kubernetes'
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:credential_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
@@ -151,7 +155,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
@property
|
||||
def has_encrypted_ssh_key_data(self):
|
||||
try:
|
||||
ssh_key_data = decrypt_field(self, 'ssh_key_data')
|
||||
ssh_key_data = self.get_input('ssh_key_data')
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
@@ -322,8 +326,10 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
('net', _('Network')),
|
||||
('scm', _('Source Control')),
|
||||
('cloud', _('Cloud')),
|
||||
('token', _('Personal Access Token')),
|
||||
('insights', _('Insights')),
|
||||
('external', _('External')),
|
||||
('kubernetes', _('Kubernetes')),
|
||||
)
|
||||
|
||||
kind = models.CharField(
|
||||
@@ -633,9 +639,6 @@ ManagedCredentialType(
|
||||
'secret': True,
|
||||
'ask_at_runtime': True
|
||||
}],
|
||||
'dependencies': {
|
||||
'ssh_key_unlock': ['ssh_key_data'],
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
@@ -667,9 +670,6 @@ ManagedCredentialType(
|
||||
'type': 'string',
|
||||
'secret': True
|
||||
}],
|
||||
'dependencies': {
|
||||
'ssh_key_unlock': ['ssh_key_data'],
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
@@ -738,7 +738,6 @@ ManagedCredentialType(
|
||||
'secret': True,
|
||||
}],
|
||||
'dependencies': {
|
||||
'ssh_key_unlock': ['ssh_key_data'],
|
||||
'authorize_password': ['authorize'],
|
||||
},
|
||||
'required': ['username'],
|
||||
@@ -975,6 +974,40 @@ ManagedCredentialType(
|
||||
}
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='github_token',
|
||||
kind='token',
|
||||
name=ugettext_noop('GitHub Personal Access Token'),
|
||||
managed_by_tower=True,
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'token',
|
||||
'label': ugettext_noop('Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': ugettext_noop('This token needs to come from your profile settings in GitHub')
|
||||
}],
|
||||
'required': ['token'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='gitlab_token',
|
||||
kind='token',
|
||||
name=ugettext_noop('GitLab Personal Access Token'),
|
||||
managed_by_tower=True,
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'token',
|
||||
'label': ugettext_noop('Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': ugettext_noop('This token needs to come from your profile settings in GitLab')
|
||||
}],
|
||||
'required': ['token'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='insights',
|
||||
kind='insights',
|
||||
@@ -1090,6 +1123,38 @@ ManagedCredentialType(
|
||||
)
|
||||
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='kubernetes_bearer_token',
|
||||
kind='kubernetes',
|
||||
name=ugettext_noop('OpenShift or Kubernetes API Bearer Token'),
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'host',
|
||||
'label': ugettext_noop('OpenShift or Kubernetes API Endpoint'),
|
||||
'type': 'string',
|
||||
'help_text': ugettext_noop('The OpenShift or Kubernetes API Endpoint to authenticate with.')
|
||||
},{
|
||||
'id': 'bearer_token',
|
||||
'label': ugettext_noop('API authentication bearer token.'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},{
|
||||
'id': 'verify_ssl',
|
||||
'label': ugettext_noop('Verify SSL'),
|
||||
'type': 'boolean',
|
||||
'default': True,
|
||||
},{
|
||||
'id': 'ssl_ca_cert',
|
||||
'label': ugettext_noop('Certificate Authority data'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
}],
|
||||
'required': ['host', 'bearer_token'],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class CredentialInputSource(PrimordialModel):
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -83,6 +83,7 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
# - runner_on*
|
||||
# - playbook_on_task_start (once for each task within a play)
|
||||
# - runner_on_failed
|
||||
# - runner_on_start
|
||||
# - runner_on_ok
|
||||
# - runner_on_error (not used for v2)
|
||||
# - runner_on_skipped
|
||||
@@ -102,6 +103,7 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
EVENT_TYPES = [
|
||||
# (level, event, verbose name, failed)
|
||||
(3, 'runner_on_failed', _('Host Failed'), True),
|
||||
(3, 'runner_on_start', _('Host Started'), False),
|
||||
(3, 'runner_on_ok', _('Host OK'), False),
|
||||
(3, 'runner_on_error', _('Host Failure'), True),
|
||||
(3, 'runner_on_skipped', _('Host Skipped'), False),
|
||||
@@ -322,7 +324,10 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
kwargs.pop('created', None)
|
||||
|
||||
sanitize_event_keys(kwargs, cls.VALID_KEYS)
|
||||
workflow_job_id = kwargs.pop('workflow_job_id', None)
|
||||
job_event = cls.objects.create(**kwargs)
|
||||
if workflow_job_id:
|
||||
setattr(job_event, 'workflow_job_id', workflow_job_id)
|
||||
analytics_logger.info('Event data saved.', extra=dict(python_objects=dict(job_event=job_event)))
|
||||
return job_event
|
||||
|
||||
@@ -394,7 +399,7 @@ class JobEvent(BasePlaybookEvent):
|
||||
An event/message logged from the callback when running a job.
|
||||
'''
|
||||
|
||||
VALID_KEYS = BasePlaybookEvent.VALID_KEYS + ['job_id']
|
||||
VALID_KEYS = BasePlaybookEvent.VALID_KEYS + ['job_id', 'workflow_job_id']
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
@@ -528,7 +533,7 @@ class JobEvent(BasePlaybookEvent):
|
||||
|
||||
class ProjectUpdateEvent(BasePlaybookEvent):
|
||||
|
||||
VALID_KEYS = BasePlaybookEvent.VALID_KEYS + ['project_update_id']
|
||||
VALID_KEYS = BasePlaybookEvent.VALID_KEYS + ['project_update_id', 'workflow_job_id']
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
@@ -614,6 +619,7 @@ class BaseCommandEvent(CreatedModifiedModel):
|
||||
kwargs.pop('created', None)
|
||||
|
||||
sanitize_event_keys(kwargs, cls.VALID_KEYS)
|
||||
kwargs.pop('workflow_job_id', None)
|
||||
event = cls.objects.create(**kwargs)
|
||||
if isinstance(event, AdHocCommandEvent):
|
||||
analytics_logger.info(
|
||||
@@ -637,7 +643,7 @@ class BaseCommandEvent(CreatedModifiedModel):
|
||||
|
||||
class AdHocCommandEvent(BaseCommandEvent):
|
||||
|
||||
VALID_KEYS = BaseCommandEvent.VALID_KEYS + ['ad_hoc_command_id', 'event']
|
||||
VALID_KEYS = BaseCommandEvent.VALID_KEYS + ['ad_hoc_command_id', 'event', 'workflow_job_id']
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
@@ -745,7 +751,7 @@ class AdHocCommandEvent(BaseCommandEvent):
|
||||
|
||||
class InventoryUpdateEvent(BaseCommandEvent):
|
||||
|
||||
VALID_KEYS = BaseCommandEvent.VALID_KEYS + ['inventory_update_id']
|
||||
VALID_KEYS = BaseCommandEvent.VALID_KEYS + ['inventory_update_id', 'workflow_job_id']
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
|
||||
@@ -18,7 +18,7 @@ from awx import __version__ as awx_application_version
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.managers import InstanceManager, InstanceGroupManager
|
||||
from awx.main.fields import JSONField
|
||||
from awx.main.models.base import BaseModel, HasEditsMixin
|
||||
from awx.main.models.base import BaseModel, HasEditsMixin, prevent_search
|
||||
from awx.main.models.unified_jobs import UnifiedJob
|
||||
from awx.main.utils import get_cpu_capacity, get_mem_capacity, get_system_task_capacity
|
||||
from awx.main.models.mixins import RelatedJobsMixin
|
||||
@@ -59,7 +59,7 @@ class Instance(HasPolicyEditsMixin, BaseModel):
|
||||
null=True,
|
||||
editable=False,
|
||||
)
|
||||
version = models.CharField(max_length=24, blank=True)
|
||||
version = models.CharField(max_length=120, blank=True)
|
||||
capacity = models.PositiveIntegerField(
|
||||
default=100,
|
||||
editable=False,
|
||||
@@ -176,6 +176,18 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
|
||||
null=True,
|
||||
on_delete=models.CASCADE
|
||||
)
|
||||
credential = models.ForeignKey(
|
||||
'Credential',
|
||||
related_name='%(class)ss',
|
||||
blank=True,
|
||||
null=True,
|
||||
default=None,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
pod_spec_override = prevent_search(models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
))
|
||||
policy_instance_percentage = models.IntegerField(
|
||||
default=0,
|
||||
help_text=_("Percentage of Instances to automatically assign to this group")
|
||||
@@ -218,6 +230,10 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
|
||||
def is_isolated(self):
|
||||
return bool(self.controller)
|
||||
|
||||
@property
|
||||
def is_containerized(self):
|
||||
return bool(self.credential and self.credential.kubernetes)
|
||||
|
||||
'''
|
||||
RelatedJobsMixin
|
||||
'''
|
||||
@@ -271,7 +287,8 @@ def schedule_policy_task():
|
||||
@receiver(post_save, sender=InstanceGroup)
|
||||
def on_instance_group_saved(sender, instance, created=False, raw=False, **kwargs):
|
||||
if created or instance.has_policy_changes():
|
||||
schedule_policy_task()
|
||||
if not instance.is_containerized:
|
||||
schedule_policy_task()
|
||||
|
||||
|
||||
@receiver(post_save, sender=Instance)
|
||||
@@ -282,7 +299,8 @@ def on_instance_saved(sender, instance, created=False, raw=False, **kwargs):
|
||||
|
||||
@receiver(post_delete, sender=InstanceGroup)
|
||||
def on_instance_group_deleted(sender, instance, using, **kwargs):
|
||||
schedule_policy_task()
|
||||
if not instance.is_containerized:
|
||||
schedule_policy_task()
|
||||
|
||||
|
||||
@receiver(post_delete, sender=Instance)
|
||||
|
||||
@@ -1501,7 +1501,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, CustomVirtualE
|
||||
@classmethod
|
||||
def _get_unified_job_field_names(cls):
|
||||
return set(f.name for f in InventorySourceOptions._meta.fields) | set(
|
||||
['name', 'description', 'schedule', 'credentials', 'inventory']
|
||||
['name', 'description', 'credentials', 'inventory']
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
@@ -39,7 +39,7 @@ from awx.main.models.notifications import (
|
||||
NotificationTemplate,
|
||||
JobNotificationMixin,
|
||||
)
|
||||
from awx.main.utils import parse_yaml_or_json, getattr_dne
|
||||
from awx.main.utils import parse_yaml_or_json, getattr_dne, NullablePromptPseudoField
|
||||
from awx.main.fields import ImplicitRoleField, JSONField, AskForField
|
||||
from awx.main.models.mixins import (
|
||||
ResourceMixin,
|
||||
@@ -48,6 +48,8 @@ from awx.main.models.mixins import (
|
||||
TaskManagerJobMixin,
|
||||
CustomVirtualEnvMixin,
|
||||
RelatedJobsMixin,
|
||||
WebhookMixin,
|
||||
WebhookTemplateMixin,
|
||||
)
|
||||
|
||||
|
||||
@@ -187,7 +189,7 @@ class JobOptions(BaseModel):
|
||||
return needed
|
||||
|
||||
|
||||
class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin):
|
||||
class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin, WebhookTemplateMixin):
|
||||
'''
|
||||
A job template is a reusable job definition for applying a project (with
|
||||
playbook) to an inventory source with a given credential.
|
||||
@@ -271,7 +273,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
@classmethod
|
||||
def _get_unified_job_field_names(cls):
|
||||
return set(f.name for f in JobOptions._meta.fields) | set(
|
||||
['name', 'description', 'schedule', 'survey_passwords', 'labels', 'credentials',
|
||||
['name', 'description', 'survey_passwords', 'labels', 'credentials',
|
||||
'job_slice_number', 'job_slice_count']
|
||||
)
|
||||
|
||||
@@ -484,7 +486,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
return UnifiedJob.objects.filter(unified_job_template=self)
|
||||
|
||||
|
||||
class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskManagerJobMixin, CustomVirtualEnvMixin):
|
||||
class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskManagerJobMixin, CustomVirtualEnvMixin, WebhookMixin):
|
||||
'''
|
||||
A job applies a project (with playbook) to an inventory source with a given
|
||||
credential. It represents a single invocation of ansible-playbook with the
|
||||
@@ -627,15 +629,17 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
|
||||
@property
|
||||
def task_impact(self):
|
||||
# NOTE: We sorta have to assume the host count matches and that forks default to 5
|
||||
from awx.main.models.inventory import Host
|
||||
if self.launch_type == 'callback':
|
||||
count_hosts = 2
|
||||
else:
|
||||
count_hosts = Host.objects.filter(inventory__jobs__pk=self.pk).count()
|
||||
if self.job_slice_count > 1:
|
||||
# Integer division intentional
|
||||
count_hosts = (count_hosts + self.job_slice_count - self.job_slice_number) // self.job_slice_count
|
||||
# If for some reason we can't count the hosts then lets assume the impact as forks
|
||||
if self.inventory is not None:
|
||||
count_hosts = self.inventory.hosts.count()
|
||||
if self.job_slice_count > 1:
|
||||
# Integer division intentional
|
||||
count_hosts = (count_hosts + self.job_slice_count - self.job_slice_number) // self.job_slice_count
|
||||
else:
|
||||
count_hosts = 5 if self.forks == 0 else self.forks
|
||||
return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1
|
||||
|
||||
@property
|
||||
@@ -666,6 +670,14 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
def processed_hosts(self):
|
||||
return self._get_hosts(job_host_summaries__processed__gt=0)
|
||||
|
||||
@property
|
||||
def ignored_hosts(self):
|
||||
return self._get_hosts(job_host_summaries__ignored__gt=0)
|
||||
|
||||
@property
|
||||
def rescued_hosts(self):
|
||||
return self._get_hosts(job_host_summaries__rescued__gt=0)
|
||||
|
||||
def notification_data(self, block=5):
|
||||
data = super(Job, self).notification_data()
|
||||
all_hosts = {}
|
||||
@@ -684,7 +696,9 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
failures=h.failures,
|
||||
ok=h.ok,
|
||||
processed=h.processed,
|
||||
skipped=h.skipped) # TODO: update with rescued, ignored (see https://github.com/ansible/awx/issues/4394)
|
||||
skipped=h.skipped,
|
||||
rescued=h.rescued,
|
||||
ignored=h.ignored)
|
||||
data.update(dict(inventory=self.inventory.name if self.inventory else None,
|
||||
project=self.project.name if self.project else None,
|
||||
playbook=self.playbook,
|
||||
@@ -706,6 +720,14 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
return "$hidden due to Ansible no_log flag$"
|
||||
return artifacts
|
||||
|
||||
@property
|
||||
def can_run_containerized(self):
|
||||
return any([ig for ig in self.preferred_instance_groups if ig.is_containerized])
|
||||
|
||||
@property
|
||||
def is_containerized(self):
|
||||
return bool(self.instance_group and self.instance_group.is_containerized)
|
||||
|
||||
@property
|
||||
def preferred_instance_groups(self):
|
||||
if self.project is not None and self.project.organization is not None:
|
||||
@@ -829,25 +851,6 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
host.save()
|
||||
|
||||
|
||||
# Add on aliases for the non-related-model fields
|
||||
class NullablePromptPsuedoField(object):
|
||||
"""
|
||||
Interface for psuedo-property stored in `char_prompts` dict
|
||||
Used in LaunchTimeConfig and submodels
|
||||
"""
|
||||
def __init__(self, field_name):
|
||||
self.field_name = field_name
|
||||
|
||||
def __get__(self, instance, type=None):
|
||||
return instance.char_prompts.get(self.field_name, None)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
if value in (None, {}):
|
||||
instance.char_prompts.pop(self.field_name, None)
|
||||
else:
|
||||
instance.char_prompts[self.field_name] = value
|
||||
|
||||
|
||||
class LaunchTimeConfigBase(BaseModel):
|
||||
'''
|
||||
Needed as separate class from LaunchTimeConfig because some models
|
||||
@@ -868,6 +871,7 @@ class LaunchTimeConfigBase(BaseModel):
|
||||
null=True,
|
||||
default=None,
|
||||
on_delete=models.SET_NULL,
|
||||
help_text=_('Inventory applied as a prompt, assuming job template prompts for inventory')
|
||||
)
|
||||
# All standard fields are stored in this dictionary field
|
||||
# This is a solution to the nullable CharField problem, specific to prompting
|
||||
@@ -904,21 +908,14 @@ class LaunchTimeConfigBase(BaseModel):
|
||||
data[prompt_name] = prompt_val
|
||||
return data
|
||||
|
||||
def display_extra_vars(self):
|
||||
'''
|
||||
Hides fields marked as passwords in survey.
|
||||
'''
|
||||
if self.survey_passwords:
|
||||
extra_vars = parse_yaml_or_json(self.extra_vars).copy()
|
||||
for key, value in self.survey_passwords.items():
|
||||
if key in extra_vars:
|
||||
extra_vars[key] = value
|
||||
return extra_vars
|
||||
else:
|
||||
return self.extra_vars
|
||||
|
||||
def display_extra_data(self):
|
||||
return self.display_extra_vars()
|
||||
for field_name in JobTemplate.get_ask_mapping().keys():
|
||||
if field_name == 'extra_vars':
|
||||
continue
|
||||
try:
|
||||
LaunchTimeConfigBase._meta.get_field(field_name)
|
||||
except FieldDoesNotExist:
|
||||
setattr(LaunchTimeConfigBase, field_name, NullablePromptPseudoField(field_name))
|
||||
|
||||
|
||||
class LaunchTimeConfig(LaunchTimeConfigBase):
|
||||
@@ -953,14 +950,21 @@ class LaunchTimeConfig(LaunchTimeConfigBase):
|
||||
def extra_vars(self, extra_vars):
|
||||
self.extra_data = extra_vars
|
||||
|
||||
def display_extra_vars(self):
|
||||
'''
|
||||
Hides fields marked as passwords in survey.
|
||||
'''
|
||||
if hasattr(self, 'survey_passwords') and self.survey_passwords:
|
||||
extra_vars = parse_yaml_or_json(self.extra_vars).copy()
|
||||
for key, value in self.survey_passwords.items():
|
||||
if key in extra_vars:
|
||||
extra_vars[key] = value
|
||||
return extra_vars
|
||||
else:
|
||||
return self.extra_vars
|
||||
|
||||
for field_name in JobTemplate.get_ask_mapping().keys():
|
||||
if field_name == 'extra_vars':
|
||||
continue
|
||||
try:
|
||||
LaunchTimeConfig._meta.get_field(field_name)
|
||||
except FieldDoesNotExist:
|
||||
setattr(LaunchTimeConfig, field_name, NullablePromptPsuedoField(field_name))
|
||||
def display_extra_data(self):
|
||||
return self.display_extra_vars()
|
||||
|
||||
|
||||
class JobLaunchConfig(LaunchTimeConfig):
|
||||
|
||||
@@ -1,31 +1,37 @@
|
||||
# Python
|
||||
import os
|
||||
import json
|
||||
from copy import copy, deepcopy
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
import requests
|
||||
|
||||
# Django
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.auth.models import User # noqa
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.db.models.query import QuerySet
|
||||
from django.utils.crypto import get_random_string
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# AWX
|
||||
from awx.main.models.base import prevent_search
|
||||
from awx.main.models.rbac import (
|
||||
Role, RoleAncestorEntry, get_roles_on_resource
|
||||
)
|
||||
from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices
|
||||
from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_licenser
|
||||
from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted
|
||||
from awx.main.utils.polymorphic import build_polymorphic_ctypes_map
|
||||
from awx.main.fields import JSONField, AskForField
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.models.mixins')
|
||||
|
||||
|
||||
__all__ = ['ResourceMixin', 'SurveyJobTemplateMixin', 'SurveyJobMixin',
|
||||
'TaskManagerUnifiedJobMixin', 'TaskManagerJobMixin', 'TaskManagerProjectUpdateMixin',
|
||||
'TaskManagerInventoryUpdateMixin', 'CustomVirtualEnvMixin']
|
||||
@@ -247,7 +253,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
else:
|
||||
choice_list = copy(survey_element['choices'])
|
||||
if isinstance(choice_list, str):
|
||||
choice_list = choice_list.split('\n')
|
||||
choice_list = [choice for choice in choice_list.splitlines() if choice.strip() != '']
|
||||
for val in data[survey_element['variable']]:
|
||||
if val not in choice_list:
|
||||
errors.append("Value %s for '%s' expected to be one of %s." % (val, survey_element['variable'],
|
||||
@@ -255,7 +261,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
elif survey_element['type'] == 'multiplechoice':
|
||||
choice_list = copy(survey_element['choices'])
|
||||
if isinstance(choice_list, str):
|
||||
choice_list = choice_list.split('\n')
|
||||
choice_list = [choice for choice in choice_list.splitlines() if choice.strip() != '']
|
||||
if survey_element['variable'] in data:
|
||||
if data[survey_element['variable']] not in choice_list:
|
||||
errors.append("Value %s for '%s' expected to be one of %s." % (data[survey_element['variable']],
|
||||
@@ -483,3 +489,139 @@ class RelatedJobsMixin(object):
|
||||
raise RuntimeError("Programmer error. Expected _get_active_jobs() to return a QuerySet.")
|
||||
|
||||
return [dict(id=t[0], type=mapping[t[1]]) for t in jobs.values_list('id', 'polymorphic_ctype_id')]
|
||||
|
||||
|
||||
class WebhookTemplateMixin(models.Model):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
SERVICES = [
|
||||
('github', "GitHub"),
|
||||
('gitlab', "GitLab"),
|
||||
]
|
||||
|
||||
webhook_service = models.CharField(
|
||||
max_length=16,
|
||||
choices=SERVICES,
|
||||
blank=True,
|
||||
help_text=_('Service that webhook requests will be accepted from')
|
||||
)
|
||||
webhook_key = prevent_search(models.CharField(
|
||||
max_length=64,
|
||||
blank=True,
|
||||
help_text=_('Shared secret that the webhook service will use to sign requests')
|
||||
))
|
||||
webhook_credential = models.ForeignKey(
|
||||
'Credential',
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='%(class)ss',
|
||||
help_text=_('Personal Access Token for posting back the status to the service API')
|
||||
)
|
||||
|
||||
def rotate_webhook_key(self):
|
||||
self.webhook_key = get_random_string(length=50)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
update_fields = kwargs.get('update_fields')
|
||||
|
||||
if not self.pk or self._values_have_edits({'webhook_service': self.webhook_service}):
|
||||
if self.webhook_service:
|
||||
self.rotate_webhook_key()
|
||||
else:
|
||||
self.webhook_key = ''
|
||||
|
||||
if update_fields and 'webhook_service' in update_fields:
|
||||
update_fields.add('webhook_key')
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
|
||||
class WebhookMixin(models.Model):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
SERVICES = WebhookTemplateMixin.SERVICES
|
||||
|
||||
webhook_service = models.CharField(
|
||||
max_length=16,
|
||||
choices=SERVICES,
|
||||
blank=True,
|
||||
help_text=_('Service that webhook requests will be accepted from')
|
||||
)
|
||||
webhook_credential = models.ForeignKey(
|
||||
'Credential',
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='%(class)ss',
|
||||
help_text=_('Personal Access Token for posting back the status to the service API')
|
||||
)
|
||||
webhook_guid = models.CharField(
|
||||
blank=True,
|
||||
max_length=128,
|
||||
help_text=_('Unique identifier of the event that triggered this webhook')
|
||||
)
|
||||
|
||||
def update_webhook_status(self, status):
|
||||
if not self.webhook_credential:
|
||||
logger.debug("No credential configured to post back webhook status, skipping.")
|
||||
return
|
||||
|
||||
status_api = self.extra_vars_dict.get('tower_webhook_status_api')
|
||||
if not status_api:
|
||||
logger.debug("Webhook event did not have a status API endpoint associated, skipping.")
|
||||
return
|
||||
|
||||
service_header = {
|
||||
'github': ('Authorization', 'token {}'),
|
||||
'gitlab': ('PRIVATE-TOKEN', '{}'),
|
||||
}
|
||||
service_statuses = {
|
||||
'github': {
|
||||
'pending': 'pending',
|
||||
'successful': 'success',
|
||||
'failed': 'failure',
|
||||
'canceled': 'failure', # GitHub doesn't have a 'canceled' status :(
|
||||
'error': 'error',
|
||||
},
|
||||
'gitlab': {
|
||||
'pending': 'pending',
|
||||
'running': 'running',
|
||||
'successful': 'success',
|
||||
'failed': 'failed',
|
||||
'error': 'failed', # GitLab doesn't have an 'error' status distinct from 'failed' :(
|
||||
'canceled': 'canceled',
|
||||
},
|
||||
}
|
||||
|
||||
statuses = service_statuses[self.webhook_service]
|
||||
if status not in statuses:
|
||||
logger.debug("Skipping webhook job status change: '{}'".format(status))
|
||||
return
|
||||
try:
|
||||
license_type = get_licenser().validate().get('license_type')
|
||||
data = {
|
||||
'state': statuses[status],
|
||||
'context': 'ansible/awx' if license_type == 'open' else 'ansible/tower',
|
||||
'target_url': self.get_ui_url(),
|
||||
}
|
||||
k, v = service_header[self.webhook_service]
|
||||
headers = {
|
||||
k: v.format(self.webhook_credential.get_input('token')),
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
response = requests.post(status_api, data=json.dumps(data), headers=headers, timeout=30)
|
||||
except Exception:
|
||||
logger.exception("Posting webhook status caused an error.")
|
||||
return
|
||||
|
||||
if response.status_code < 400:
|
||||
logger.debug("Webhook status update sent.")
|
||||
else:
|
||||
logger.error(
|
||||
"Posting webhook status failed, code: {}\n"
|
||||
"{}\n"
|
||||
"Payload sent: {}".format(response.status_code, response.text, json.dumps(data))
|
||||
)
|
||||
|
||||
@@ -17,7 +17,7 @@ from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models.base import CommonModelNameNotUnique, CreatedModifiedModel
|
||||
from awx.main.models.base import CommonModelNameNotUnique, CreatedModifiedModel, prevent_search
|
||||
from awx.main.utils import encrypt_field, decrypt_field, set_environ
|
||||
from awx.main.notifications.email_backend import CustomEmailBackend
|
||||
from awx.main.notifications.slack_backend import SlackBackend
|
||||
@@ -70,7 +70,7 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
||||
choices=NOTIFICATION_TYPE_CHOICES,
|
||||
)
|
||||
|
||||
notification_configuration = JSONField(blank=False)
|
||||
notification_configuration = prevent_search(JSONField(blank=False))
|
||||
|
||||
def default_messages():
|
||||
return {'started': None, 'success': None, 'error': None}
|
||||
|
||||
@@ -3,7 +3,7 @@ import re
|
||||
|
||||
# Django
|
||||
from django.core.validators import RegexValidator
|
||||
from django.db import models
|
||||
from django.db import models, connection
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.conf import settings
|
||||
@@ -121,7 +121,7 @@ class OAuth2AccessToken(AbstractAccessToken):
|
||||
valid = super(OAuth2AccessToken, self).is_valid(scopes)
|
||||
if valid:
|
||||
self.last_used = now()
|
||||
self.save(update_fields=['last_used'])
|
||||
connection.on_commit(lambda: self.save(update_fields=['last_used']))
|
||||
return valid
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
@@ -51,6 +51,11 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
|
||||
default=0,
|
||||
help_text=_('Maximum number of hosts allowed to be managed by this organization.'),
|
||||
)
|
||||
notification_templates_approvals = models.ManyToManyField(
|
||||
"NotificationTemplate",
|
||||
blank=True,
|
||||
related_name='%(class)s_notification_templates_for_approvals'
|
||||
)
|
||||
|
||||
admin_role = ImplicitRoleField(
|
||||
parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
|
||||
@@ -329,7 +329,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
@classmethod
|
||||
def _get_unified_job_field_names(cls):
|
||||
return set(f.name for f in ProjectOptions._meta.fields) | set(
|
||||
['name', 'description', 'schedule']
|
||||
['name', 'description']
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
@@ -119,10 +119,11 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
|
||||
tzinfo = r._dtstart.tzinfo
|
||||
if tzinfo is utc:
|
||||
return 'UTC'
|
||||
fname = tzinfo._filename
|
||||
for zone in all_zones:
|
||||
if fname.endswith(zone):
|
||||
return zone
|
||||
fname = getattr(tzinfo, '_filename', None)
|
||||
if fname:
|
||||
for zone in all_zones:
|
||||
if fname.endswith(zone):
|
||||
return zone
|
||||
logger.warn('Could not detect valid zoneinfo for {}'.format(self.rrule))
|
||||
return ''
|
||||
|
||||
|
||||
@@ -42,9 +42,9 @@ from awx.main.utils import (
|
||||
camelcase_to_underscore, get_model_for_type,
|
||||
encrypt_dict, decrypt_field, _inventory_updates,
|
||||
copy_model_by_class, copy_m2m_relationships,
|
||||
get_type_for_model, parse_yaml_or_json, getattr_dne
|
||||
get_type_for_model, parse_yaml_or_json, getattr_dne,
|
||||
polymorphic, schedule_task_manager
|
||||
)
|
||||
from awx.main.utils import polymorphic, schedule_task_manager
|
||||
from awx.main.constants import ACTIVE_STATES, CAN_CANCEL
|
||||
from awx.main.redact import UriCleaner, REPLACE_STR
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
@@ -532,6 +532,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
('scheduled', _('Scheduled')), # Job was started from a schedule.
|
||||
('dependency', _('Dependency')), # Job was started as a dependency of another job.
|
||||
('workflow', _('Workflow')), # Job was started from a workflow job.
|
||||
('webhook', _('Webhook')), # Job was started from a webhook event.
|
||||
('sync', _('Sync')), # Job was started from a project sync.
|
||||
('scm', _('SCM Update')) # Job was created as an Inventory SCM sync.
|
||||
]
|
||||
@@ -559,11 +560,17 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
related_name='%(class)s_unified_jobs',
|
||||
on_delete=polymorphic.SET_NULL,
|
||||
)
|
||||
created = models.DateTimeField(
|
||||
default=None,
|
||||
editable=False,
|
||||
db_index=True, # add an index, this is a commonly queried field
|
||||
)
|
||||
launch_type = models.CharField(
|
||||
max_length=20,
|
||||
choices=LAUNCH_TYPE_CHOICES,
|
||||
default='manual',
|
||||
editable=False,
|
||||
db_index=True
|
||||
)
|
||||
schedule = models.ForeignKey( # Which schedule entry was responsible for starting this job.
|
||||
'Schedule',
|
||||
@@ -621,6 +628,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
default=None,
|
||||
editable=False,
|
||||
help_text=_("The date and time the job finished execution."),
|
||||
db_index=True,
|
||||
)
|
||||
elapsed = models.DecimalField(
|
||||
max_digits=12,
|
||||
@@ -706,6 +714,10 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
def supports_isolation(cls):
|
||||
return False
|
||||
|
||||
@property
|
||||
def can_run_containerized(self):
|
||||
return False
|
||||
|
||||
def _get_parent_field_name(self):
|
||||
return 'unified_job_template' # Override in subclasses.
|
||||
|
||||
@@ -1199,6 +1211,8 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
|
||||
def websocket_emit_status(self, status):
|
||||
connection.on_commit(lambda: self._websocket_emit_status(status))
|
||||
if hasattr(self, 'update_webhook_status'):
|
||||
connection.on_commit(lambda: self.update_webhook_status(status))
|
||||
|
||||
def notification_data(self):
|
||||
return dict(id=self.id,
|
||||
@@ -1379,9 +1393,13 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
|
||||
wj = self.get_workflow_job()
|
||||
if wj:
|
||||
schedule = getattr_dne(wj, 'schedule')
|
||||
for name in ('awx', 'tower'):
|
||||
r['{}_workflow_job_id'.format(name)] = wj.pk
|
||||
r['{}_workflow_job_name'.format(name)] = wj.name
|
||||
if schedule:
|
||||
r['{}_parent_job_schedule_id'.format(name)] = schedule.pk
|
||||
r['{}_parent_job_schedule_name'.format(name)] = schedule.name
|
||||
|
||||
if not created_by:
|
||||
schedule = getattr_dne(self, 'schedule')
|
||||
@@ -1411,3 +1429,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
|
||||
def is_isolated(self):
|
||||
return bool(self.controller_node)
|
||||
|
||||
@property
|
||||
def is_containerized(self):
|
||||
return False
|
||||
|
||||
@@ -3,14 +3,19 @@
|
||||
|
||||
# Python
|
||||
import logging
|
||||
from copy import copy
|
||||
from urllib.parse import urljoin
|
||||
|
||||
# Django
|
||||
from django.db import models
|
||||
from django.db import connection, models
|
||||
from django.conf import settings
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
#from django import settings as tower_settings
|
||||
|
||||
# Django-CRUM
|
||||
from crum import get_current_user
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import (prevent_search, accepts_json, UnifiedJobTemplate,
|
||||
@@ -19,7 +24,7 @@ from awx.main.models.notifications import (
|
||||
NotificationTemplate,
|
||||
JobNotificationMixin
|
||||
)
|
||||
from awx.main.models.base import BaseModel, CreatedModifiedModel, VarsDictProperty
|
||||
from awx.main.models.base import CreatedModifiedModel, VarsDictProperty
|
||||
from awx.main.models.rbac import (
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
ROLE_SINGLETON_SYSTEM_AUDITOR
|
||||
@@ -30,6 +35,8 @@ from awx.main.models.mixins import (
|
||||
SurveyJobTemplateMixin,
|
||||
SurveyJobMixin,
|
||||
RelatedJobsMixin,
|
||||
WebhookMixin,
|
||||
WebhookTemplateMixin,
|
||||
)
|
||||
from awx.main.models.jobs import LaunchTimeConfigBase, LaunchTimeConfig, JobTemplate
|
||||
from awx.main.models.credential import Credential
|
||||
@@ -38,9 +45,6 @@ from awx.main.fields import JSONField
|
||||
from awx.main.utils import schedule_task_manager
|
||||
|
||||
|
||||
from copy import copy
|
||||
from urllib.parse import urljoin
|
||||
|
||||
__all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowJobNode',
|
||||
'WorkflowJobTemplateNode', 'WorkflowApprovalTemplate', 'WorkflowApproval']
|
||||
|
||||
@@ -196,7 +200,7 @@ class WorkflowJobNode(WorkflowNodeBase):
|
||||
)
|
||||
do_not_run = models.BooleanField(
|
||||
default=False,
|
||||
help_text=_("Indidcates that a job will not be created when True. Workflow runtime "
|
||||
help_text=_("Indicates that a job will not be created when True. Workflow runtime "
|
||||
"semantics will mark this True if the node is in a path that will "
|
||||
"decidedly not be ran. A value of False means the node may not run."),
|
||||
)
|
||||
@@ -207,11 +211,14 @@ class WorkflowJobNode(WorkflowNodeBase):
|
||||
def prompts_dict(self, *args, **kwargs):
|
||||
r = super(WorkflowJobNode, self).prompts_dict(*args, **kwargs)
|
||||
# Explanation - WFJT extra_vars still break pattern, so they are not
|
||||
# put through prompts processing, but inventory is only accepted
|
||||
# put through prompts processing, but inventory and others are only accepted
|
||||
# if JT prompts for it, so it goes through this mechanism
|
||||
if self.workflow_job and self.workflow_job.inventory_id:
|
||||
# workflow job inventory takes precedence
|
||||
r['inventory'] = self.workflow_job.inventory
|
||||
if self.workflow_job:
|
||||
if self.workflow_job.inventory_id:
|
||||
# workflow job inventory takes precedence
|
||||
r['inventory'] = self.workflow_job.inventory
|
||||
if self.workflow_job.char_prompts:
|
||||
r.update(self.workflow_job.char_prompts)
|
||||
return r
|
||||
|
||||
def get_job_kwargs(self):
|
||||
@@ -298,7 +305,7 @@ class WorkflowJobNode(WorkflowNodeBase):
|
||||
return data
|
||||
|
||||
|
||||
class WorkflowJobOptions(BaseModel):
|
||||
class WorkflowJobOptions(LaunchTimeConfigBase):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
@@ -318,10 +325,11 @@ class WorkflowJobOptions(BaseModel):
|
||||
|
||||
@classmethod
|
||||
def _get_unified_job_field_names(cls):
|
||||
return set(f.name for f in WorkflowJobOptions._meta.fields) | set(
|
||||
# NOTE: if other prompts are added to WFJT, put fields in WJOptions, remove inventory
|
||||
['name', 'description', 'schedule', 'survey_passwords', 'labels', 'inventory']
|
||||
r = set(f.name for f in WorkflowJobOptions._meta.fields) | set(
|
||||
['name', 'description', 'survey_passwords', 'labels', 'limit', 'scm_branch']
|
||||
)
|
||||
r.remove('char_prompts') # needed due to copying launch config to launch config
|
||||
return r
|
||||
|
||||
def _create_workflow_nodes(self, old_node_list, user=None):
|
||||
node_links = {}
|
||||
@@ -355,7 +363,7 @@ class WorkflowJobOptions(BaseModel):
|
||||
return new_workflow_job
|
||||
|
||||
|
||||
class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTemplateMixin, ResourceMixin, RelatedJobsMixin):
|
||||
class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTemplateMixin, ResourceMixin, RelatedJobsMixin, WebhookTemplateMixin):
|
||||
|
||||
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')]
|
||||
FIELDS_TO_PRESERVE_AT_COPY = [
|
||||
@@ -372,19 +380,24 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='workflows',
|
||||
)
|
||||
inventory = models.ForeignKey(
|
||||
'Inventory',
|
||||
related_name='%(class)ss',
|
||||
blank=True,
|
||||
null=True,
|
||||
default=None,
|
||||
on_delete=models.SET_NULL,
|
||||
help_text=_('Inventory applied to all job templates in workflow that prompt for inventory.'),
|
||||
)
|
||||
ask_inventory_on_launch = AskForField(
|
||||
blank=True,
|
||||
default=False,
|
||||
)
|
||||
ask_limit_on_launch = AskForField(
|
||||
blank=True,
|
||||
default=False,
|
||||
)
|
||||
ask_scm_branch_on_launch = AskForField(
|
||||
blank=True,
|
||||
default=False,
|
||||
)
|
||||
notification_templates_approvals = models.ManyToManyField(
|
||||
"NotificationTemplate",
|
||||
blank=True,
|
||||
related_name='%(class)s_notification_templates_for_approvals'
|
||||
)
|
||||
|
||||
admin_role = ImplicitRoleField(parent_role=[
|
||||
'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
'organization.workflow_admin_role'
|
||||
@@ -438,9 +451,22 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
.filter(unifiedjobtemplate_notification_templates_for_started__in=[self]))
|
||||
success_notification_templates = list(base_notification_templates
|
||||
.filter(unifiedjobtemplate_notification_templates_for_success__in=[self]))
|
||||
approval_notification_templates = list(base_notification_templates
|
||||
.filter(workflowjobtemplate_notification_templates_for_approvals__in=[self]))
|
||||
# Get Organization NotificationTemplates
|
||||
if self.organization is not None:
|
||||
error_notification_templates = set(error_notification_templates + list(base_notification_templates.filter(
|
||||
organization_notification_templates_for_errors=self.organization)))
|
||||
started_notification_templates = set(started_notification_templates + list(base_notification_templates.filter(
|
||||
organization_notification_templates_for_started=self.organization)))
|
||||
success_notification_templates = set(success_notification_templates + list(base_notification_templates.filter(
|
||||
organization_notification_templates_for_success=self.organization)))
|
||||
approval_notification_templates = set(approval_notification_templates + list(base_notification_templates.filter(
|
||||
organization_notification_templates_for_approvals=self.organization)))
|
||||
return dict(error=list(error_notification_templates),
|
||||
started=list(started_notification_templates),
|
||||
success=list(success_notification_templates))
|
||||
success=list(success_notification_templates),
|
||||
approvals=list(approval_notification_templates))
|
||||
|
||||
def create_unified_job(self, **kwargs):
|
||||
workflow_job = super(WorkflowJobTemplate, self).create_unified_job(**kwargs)
|
||||
@@ -515,7 +541,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
return WorkflowJob.objects.filter(workflow_job_template=self)
|
||||
|
||||
|
||||
class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificationMixin, LaunchTimeConfigBase):
|
||||
class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificationMixin, WebhookMixin):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('id',)
|
||||
@@ -646,7 +672,7 @@ class WorkflowApprovalTemplate(UnifiedJobTemplate):
|
||||
return self.workflowjobtemplatenodes.first().workflow_job_template
|
||||
|
||||
|
||||
class WorkflowApproval(UnifiedJob):
|
||||
class WorkflowApproval(UnifiedJob, JobNotificationMixin):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
|
||||
@@ -667,6 +693,14 @@ class WorkflowApproval(UnifiedJob):
|
||||
default=False,
|
||||
help_text=_("Shows when an approval node (with a timeout assigned to it) has timed out.")
|
||||
)
|
||||
approved_or_denied_by = models.ForeignKey(
|
||||
'auth.User',
|
||||
related_name='%s(class)s_approved+',
|
||||
default=None,
|
||||
null=True,
|
||||
editable=False,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
|
||||
|
||||
@classmethod
|
||||
@@ -680,26 +714,78 @@ class WorkflowApproval(UnifiedJob):
|
||||
def event_class(self):
|
||||
return None
|
||||
|
||||
def get_ui_url(self):
|
||||
return urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.workflow_job.id))
|
||||
|
||||
def _get_parent_field_name(self):
|
||||
return 'workflow_approval_template'
|
||||
|
||||
def approve(self, request=None):
|
||||
self.status = 'successful'
|
||||
self.approved_or_denied_by = get_current_user()
|
||||
self.save()
|
||||
self.send_approval_notification('approved')
|
||||
self.websocket_emit_status(self.status)
|
||||
schedule_task_manager()
|
||||
return reverse('api:workflow_approval_approve', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
def deny(self, request=None):
|
||||
self.status = 'failed'
|
||||
self.approved_or_denied_by = get_current_user()
|
||||
self.save()
|
||||
self.send_approval_notification('denied')
|
||||
self.websocket_emit_status(self.status)
|
||||
schedule_task_manager()
|
||||
return reverse('api:workflow_approval_deny', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
def signal_start(self, **kwargs):
|
||||
can_start = super(WorkflowApproval, self).signal_start(**kwargs)
|
||||
self.send_approval_notification('running')
|
||||
return can_start
|
||||
|
||||
def send_approval_notification(self, approval_status):
|
||||
from awx.main.tasks import send_notifications # avoid circular import
|
||||
if self.workflow_job_template is None:
|
||||
return
|
||||
for nt in self.workflow_job_template.notification_templates["approvals"]:
|
||||
try:
|
||||
(notification_subject, notification_body) = self.build_approval_notification_message(nt, approval_status)
|
||||
except Exception:
|
||||
raise NotImplementedError("build_approval_notification_message() does not exist")
|
||||
|
||||
# Use kwargs to force late-binding
|
||||
# https://stackoverflow.com/a/3431699/10669572
|
||||
def send_it(local_nt=nt, local_subject=notification_subject, local_body=notification_body):
|
||||
def _func():
|
||||
send_notifications.delay([local_nt.generate_notification(local_subject, local_body).id],
|
||||
job_id=self.id)
|
||||
return _func
|
||||
connection.on_commit(send_it())
|
||||
|
||||
def build_approval_notification_message(self, nt, approval_status):
|
||||
subject = []
|
||||
workflow_url = urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.workflow_job.id))
|
||||
subject.append(('The approval node "{}"').format(self.workflow_approval_template.name))
|
||||
if approval_status == 'running':
|
||||
subject.append(('needs review. This node can be viewed at: {}').format(workflow_url))
|
||||
if approval_status == 'approved':
|
||||
subject.append(('was approved. {}').format(workflow_url))
|
||||
if approval_status == 'timed_out':
|
||||
subject.append(('has timed out. {}').format(workflow_url))
|
||||
elif approval_status == 'denied':
|
||||
subject.append(('was denied. {}').format(workflow_url))
|
||||
subject = " ".join(subject)
|
||||
body = self.notification_data()
|
||||
body['body'] = subject
|
||||
|
||||
return subject, body
|
||||
|
||||
@property
|
||||
def workflow_job_template(self):
|
||||
return self.unified_job_node.workflow_job.unified_job_template
|
||||
try:
|
||||
return self.unified_job_node.workflow_job.unified_job_template
|
||||
except ObjectDoesNotExist:
|
||||
return None
|
||||
|
||||
@property
|
||||
def workflow_job(self):
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import re
|
||||
import urllib.parse as urlparse
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
REPLACE_STR = '$encrypted$'
|
||||
|
||||
|
||||
@@ -10,14 +12,22 @@ class UriCleaner(object):
|
||||
|
||||
@staticmethod
|
||||
def remove_sensitive(cleartext):
|
||||
if settings.PRIMARY_GALAXY_URL:
|
||||
exclude_list = [settings.PRIMARY_GALAXY_URL] + [server['url'] for server in settings.FALLBACK_GALAXY_SERVERS]
|
||||
else:
|
||||
exclude_list = [server['url'] for server in settings.FALLBACK_GALAXY_SERVERS]
|
||||
redactedtext = cleartext
|
||||
text_index = 0
|
||||
while True:
|
||||
match = UriCleaner.SENSITIVE_URI_PATTERN.search(redactedtext, text_index)
|
||||
if not match:
|
||||
break
|
||||
uri_str = match.group(1)
|
||||
# Do not redact items from the exclude list
|
||||
if any(uri_str.startswith(exclude_uri) for exclude_uri in exclude_list):
|
||||
text_index = match.start() + len(uri_str)
|
||||
continue
|
||||
try:
|
||||
uri_str = match.group(1)
|
||||
# May raise a ValueError if invalid URI for one reason or another
|
||||
o = urlparse.urlsplit(uri_str)
|
||||
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
#
|
||||
|
||||
from awx.main.scheduler.task_manager import TaskManager # noqa
|
||||
from .task_manager import TaskManager
|
||||
|
||||
__all__ = ['TaskManager']
|
||||
|
||||
183
awx/main/scheduler/kubernetes.py
Normal file
183
awx/main/scheduler/kubernetes.py
Normal file
@@ -0,0 +1,183 @@
|
||||
import collections
|
||||
import os
|
||||
import stat
|
||||
import time
|
||||
import yaml
|
||||
import tempfile
|
||||
import logging
|
||||
from base64 import b64encode
|
||||
|
||||
from django.conf import settings
|
||||
from kubernetes import client, config
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
from awx.main.utils.common import parse_yaml_or_json
|
||||
|
||||
logger = logging.getLogger('awx.main.scheduler')
|
||||
|
||||
|
||||
class PodManager(object):
|
||||
|
||||
def __init__(self, task=None):
|
||||
self.task = task
|
||||
|
||||
def deploy(self):
|
||||
if not self.credential.kubernetes:
|
||||
raise RuntimeError('Pod deployment cannot occur without a Kubernetes credential')
|
||||
|
||||
self.kube_api.create_namespaced_pod(body=self.pod_definition,
|
||||
namespace=self.namespace,
|
||||
_request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT)
|
||||
|
||||
num_retries = settings.AWX_CONTAINER_GROUP_POD_LAUNCH_RETRIES
|
||||
for retry_attempt in range(num_retries - 1):
|
||||
logger.debug(f"Checking for pod {self.pod_name}. Attempt {retry_attempt + 1} of {num_retries}")
|
||||
pod = self.kube_api.read_namespaced_pod(name=self.pod_name,
|
||||
namespace=self.namespace,
|
||||
_request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT)
|
||||
if pod.status.phase != 'Pending':
|
||||
break
|
||||
else:
|
||||
logger.debug(f"Pod {self.pod_name} is Pending.")
|
||||
time.sleep(settings.AWX_CONTAINER_GROUP_POD_LAUNCH_RETRY_DELAY)
|
||||
continue
|
||||
|
||||
if pod.status.phase == 'Running':
|
||||
logger.debug(f"Pod {self.pod_name} is online.")
|
||||
return pod
|
||||
else:
|
||||
logger.warn(f"Pod {self.pod_name} did not start. Status is {pod.status.phase}.")
|
||||
|
||||
@classmethod
|
||||
def list_active_jobs(self, instance_group):
|
||||
task = collections.namedtuple('Task', 'id instance_group')(
|
||||
id='',
|
||||
instance_group=instance_group
|
||||
)
|
||||
pm = PodManager(task)
|
||||
try:
|
||||
for pod in pm.kube_api.list_namespaced_pod(
|
||||
pm.namespace,
|
||||
label_selector='ansible-awx={}'.format(settings.INSTALL_UUID)
|
||||
).to_dict().get('items', []):
|
||||
job = pod['metadata'].get('labels', {}).get('ansible-awx-job-id')
|
||||
if job:
|
||||
try:
|
||||
yield int(job)
|
||||
except ValueError:
|
||||
pass
|
||||
except Exception:
|
||||
logger.exception('Failed to list pods for container group {}'.format(instance_group))
|
||||
|
||||
def delete(self):
|
||||
return self.kube_api.delete_namespaced_pod(name=self.pod_name,
|
||||
namespace=self.namespace,
|
||||
_request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT)
|
||||
|
||||
@property
|
||||
def namespace(self):
|
||||
return self.pod_definition['metadata']['namespace']
|
||||
|
||||
@property
|
||||
def credential(self):
|
||||
return self.task.instance_group.credential
|
||||
|
||||
@cached_property
|
||||
def kube_config(self):
|
||||
return generate_tmp_kube_config(self.credential, self.namespace)
|
||||
|
||||
@cached_property
|
||||
def kube_api(self):
|
||||
my_client = config.new_client_from_config(config_file=self.kube_config)
|
||||
return client.CoreV1Api(api_client=my_client)
|
||||
|
||||
@property
|
||||
def pod_name(self):
|
||||
return f"awx-job-{self.task.id}"
|
||||
|
||||
@property
|
||||
def pod_definition(self):
|
||||
default_pod_spec = {
|
||||
"apiVersion": "v1",
|
||||
"kind": "Pod",
|
||||
"metadata": {
|
||||
"namespace": settings.AWX_CONTAINER_GROUP_DEFAULT_NAMESPACE
|
||||
},
|
||||
"spec": {
|
||||
"containers": [{
|
||||
"image": settings.AWX_CONTAINER_GROUP_DEFAULT_IMAGE,
|
||||
"tty": True,
|
||||
"stdin": True,
|
||||
"imagePullPolicy": "Always",
|
||||
"args": [
|
||||
'sleep', 'infinity'
|
||||
]
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
pod_spec_override = {}
|
||||
if self.task and self.task.instance_group.pod_spec_override:
|
||||
pod_spec_override = parse_yaml_or_json(
|
||||
self.task.instance_group.pod_spec_override)
|
||||
pod_spec = {**default_pod_spec, **pod_spec_override}
|
||||
|
||||
if self.task:
|
||||
pod_spec['metadata']['name'] = self.pod_name
|
||||
pod_spec['metadata']['labels'] = {
|
||||
'ansible-awx': settings.INSTALL_UUID,
|
||||
'ansible-awx-job-id': str(self.task.id)
|
||||
}
|
||||
pod_spec['spec']['containers'][0]['name'] = self.pod_name
|
||||
|
||||
return pod_spec
|
||||
|
||||
|
||||
def generate_tmp_kube_config(credential, namespace):
|
||||
host_input = credential.get_input('host')
|
||||
config = {
|
||||
"apiVersion": "v1",
|
||||
"kind": "Config",
|
||||
"preferences": {},
|
||||
"clusters": [
|
||||
{
|
||||
"name": host_input,
|
||||
"cluster": {
|
||||
"server": host_input
|
||||
}
|
||||
}
|
||||
],
|
||||
"users": [
|
||||
{
|
||||
"name": host_input,
|
||||
"user": {
|
||||
"token": credential.get_input('bearer_token')
|
||||
}
|
||||
}
|
||||
],
|
||||
"contexts": [
|
||||
{
|
||||
"name": host_input,
|
||||
"context": {
|
||||
"cluster": host_input,
|
||||
"user": host_input,
|
||||
"namespace": namespace
|
||||
}
|
||||
}
|
||||
],
|
||||
"current-context": host_input
|
||||
}
|
||||
|
||||
if credential.get_input('verify_ssl'):
|
||||
config["clusters"][0]["cluster"]["certificate-authority-data"] = b64encode(
|
||||
credential.get_input('ssl_ca_cert').encode() # encode to bytes
|
||||
).decode() # decode the base64 data into a str
|
||||
else:
|
||||
config["clusters"][0]["cluster"]["insecure-skip-tls-verify"] = True
|
||||
|
||||
fd, path = tempfile.mkstemp(prefix='kubeconfig')
|
||||
with open(path, 'wb') as temp:
|
||||
temp.write(yaml.dump(config).encode())
|
||||
temp.flush()
|
||||
os.chmod(temp.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
return path
|
||||
@@ -251,6 +251,20 @@ class TaskManager():
|
||||
task.controller_node = controller_node
|
||||
logger.debug('Submitting isolated {} to queue {} controlled by {}.'.format(
|
||||
task.log_format, task.execution_node, controller_node))
|
||||
elif rampart_group.is_containerized:
|
||||
task.instance_group = rampart_group
|
||||
if not task.supports_isolation():
|
||||
# project updates and inventory updates don't *actually* run in pods,
|
||||
# so just pick *any* non-isolated, non-containerized host and use it
|
||||
for group in InstanceGroup.objects.all():
|
||||
if group.is_containerized or group.controller_id:
|
||||
continue
|
||||
match = group.find_largest_idle_instance()
|
||||
if match:
|
||||
task.execution_node = match.hostname
|
||||
logger.debug('Submitting containerized {} to queue {}.'.format(
|
||||
task.log_format, task.execution_node))
|
||||
break
|
||||
else:
|
||||
task.instance_group = rampart_group
|
||||
if instance is not None:
|
||||
@@ -447,7 +461,7 @@ class TaskManager():
|
||||
for rampart_group in preferred_instance_groups:
|
||||
if idle_instance_that_fits is None:
|
||||
idle_instance_that_fits = rampart_group.find_largest_idle_instance()
|
||||
if self.get_remaining_capacity(rampart_group.name) <= 0:
|
||||
if not rampart_group.is_containerized and self.get_remaining_capacity(rampart_group.name) <= 0:
|
||||
logger.debug("Skipping group {} capacity <= 0".format(rampart_group.name))
|
||||
continue
|
||||
|
||||
@@ -456,10 +470,11 @@ class TaskManager():
|
||||
logger.debug("Starting dependent {} in group {} instance {}".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname))
|
||||
elif not execution_instance and idle_instance_that_fits:
|
||||
execution_instance = idle_instance_that_fits
|
||||
logger.debug("Starting dependent {} in group {} on idle instance {}".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname))
|
||||
if execution_instance:
|
||||
if not rampart_group.is_containerized:
|
||||
execution_instance = idle_instance_that_fits
|
||||
logger.debug("Starting dependent {} in group {} on idle instance {}".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname))
|
||||
if execution_instance or rampart_group.is_containerized:
|
||||
self.graph[rampart_group.name]['graph'].add_job(task)
|
||||
tasks_to_fail = [t for t in dependency_tasks if t != task]
|
||||
tasks_to_fail += [dependent_task]
|
||||
@@ -492,10 +507,16 @@ class TaskManager():
|
||||
self.start_task(task, None, task.get_jobs_fail_chain(), None)
|
||||
continue
|
||||
for rampart_group in preferred_instance_groups:
|
||||
if task.can_run_containerized and rampart_group.is_containerized:
|
||||
self.graph[rampart_group.name]['graph'].add_job(task)
|
||||
self.start_task(task, rampart_group, task.get_jobs_fail_chain(), None)
|
||||
found_acceptable_queue = True
|
||||
break
|
||||
|
||||
if idle_instance_that_fits is None:
|
||||
idle_instance_that_fits = rampart_group.find_largest_idle_instance()
|
||||
remaining_capacity = self.get_remaining_capacity(rampart_group.name)
|
||||
if remaining_capacity <= 0:
|
||||
if not rampart_group.is_containerized and self.get_remaining_capacity(rampart_group.name) <= 0:
|
||||
logger.debug("Skipping group {}, remaining_capacity {} <= 0".format(
|
||||
rampart_group.name, remaining_capacity))
|
||||
continue
|
||||
@@ -505,10 +526,11 @@ class TaskManager():
|
||||
logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity))
|
||||
elif not execution_instance and idle_instance_that_fits:
|
||||
execution_instance = idle_instance_that_fits
|
||||
logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity))
|
||||
if execution_instance:
|
||||
if not rampart_group.is_containerized:
|
||||
execution_instance = idle_instance_that_fits
|
||||
logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity))
|
||||
if execution_instance or rampart_group.is_containerized:
|
||||
self.graph[rampart_group.name]['graph'].add_job(task)
|
||||
self.start_task(task, rampart_group, task.get_jobs_fail_chain(), execution_instance)
|
||||
found_acceptable_queue = True
|
||||
@@ -533,6 +555,7 @@ class TaskManager():
|
||||
logger.warn(timeout_message)
|
||||
task.timed_out = True
|
||||
task.status = 'failed'
|
||||
task.send_approval_notification('timed_out')
|
||||
task.websocket_emit_status(task.status)
|
||||
task.job_explanation = timeout_message
|
||||
task.save(update_fields=['status', 'job_explanation', 'timed_out'])
|
||||
|
||||
@@ -684,16 +684,18 @@ def save_user_session_membership(sender, **kwargs):
|
||||
return
|
||||
if UserSessionMembership.objects.filter(user=user_id, session=session).exists():
|
||||
return
|
||||
UserSessionMembership(user_id=user_id, session=session, created=timezone.now()).save()
|
||||
expired = UserSessionMembership.get_memberships_over_limit(user_id)
|
||||
for membership in expired:
|
||||
Session.objects.filter(session_key__in=[membership.session_id]).delete()
|
||||
membership.delete()
|
||||
if len(expired):
|
||||
consumers.emit_channel_notification(
|
||||
'control-limit_reached_{}'.format(user_id),
|
||||
dict(group_name='control', reason='limit_reached')
|
||||
)
|
||||
# check if user_id from session has an id match in User before saving
|
||||
if User.objects.filter(id=int(user_id)).exists():
|
||||
UserSessionMembership(user_id=user_id, session=session, created=timezone.now()).save()
|
||||
expired = UserSessionMembership.get_memberships_over_limit(user_id)
|
||||
for membership in expired:
|
||||
Session.objects.filter(session_key__in=[membership.session_id]).delete()
|
||||
membership.delete()
|
||||
if len(expired):
|
||||
consumers.emit_channel_notification(
|
||||
'control-limit_reached_{}'.format(user_id),
|
||||
dict(group_name='control', reason='limit_reached')
|
||||
)
|
||||
|
||||
|
||||
@receiver(post_save, sender=OAuth2AccessToken)
|
||||
|
||||
@@ -40,6 +40,9 @@ from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
|
||||
# Kubernetes
|
||||
from kubernetes.client.rest import ApiException
|
||||
|
||||
# Django-CRUM
|
||||
from crum import impersonate
|
||||
|
||||
@@ -52,7 +55,7 @@ import ansible_runner
|
||||
|
||||
# AWX
|
||||
from awx import __version__ as awx_application_version
|
||||
from awx.main.constants import CLOUD_PROVIDERS, PRIVILEGE_ESCALATION_METHODS, STANDARD_INVENTORY_UPDATE_ENV
|
||||
from awx.main.constants import CLOUD_PROVIDERS, PRIVILEGE_ESCALATION_METHODS, STANDARD_INVENTORY_UPDATE_ENV, GALAXY_SERVER_FIELDS
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.models import (
|
||||
Schedule, TowerScheduleState, Instance, InstanceGroup,
|
||||
@@ -73,6 +76,7 @@ from awx.main.utils import (get_ssh_version, update_scm_url,
|
||||
ignore_inventory_computed_fields,
|
||||
ignore_inventory_group_removal, extract_ansible_vars, schedule_task_manager,
|
||||
get_awx_version)
|
||||
from awx.main.utils.ansible import read_ansible_config
|
||||
from awx.main.utils.common import get_ansible_version, _get_ansible_version, get_custom_venv_choices
|
||||
from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja
|
||||
from awx.main.utils.reload import stop_local_services
|
||||
@@ -251,6 +255,9 @@ def apply_cluster_membership_policies():
|
||||
# On a differential basis, apply instances to non-isolated groups
|
||||
with transaction.atomic():
|
||||
for g in actual_groups:
|
||||
if g.obj.is_containerized:
|
||||
logger.debug('Skipping containerized group {} for policy calculation'.format(g.obj.name))
|
||||
continue
|
||||
instances_to_add = set(g.instances) - set(g.prior_instances)
|
||||
instances_to_remove = set(g.prior_instances) - set(g.instances)
|
||||
if instances_to_add:
|
||||
@@ -323,7 +330,7 @@ def send_notifications(notification_list, job_id=None):
|
||||
notification.status = "successful"
|
||||
notification.notifications_sent = sent
|
||||
except Exception as e:
|
||||
logger.error("Send Notification Failed {}".format(e))
|
||||
logger.exception("Send Notification Failed {}".format(e))
|
||||
notification.status = "failed"
|
||||
notification.error = smart_str(e)
|
||||
update_fields.append('error')
|
||||
@@ -451,6 +458,25 @@ def cluster_node_heartbeat():
|
||||
logger.exception('Error marking {} as lost'.format(other_inst.hostname))
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
def awx_k8s_reaper():
|
||||
from awx.main.scheduler.kubernetes import PodManager # prevent circular import
|
||||
for group in InstanceGroup.objects.filter(credential__isnull=False).iterator():
|
||||
if group.is_containerized:
|
||||
logger.debug("Checking for orphaned k8s pods for {}.".format(group))
|
||||
for job in UnifiedJob.objects.filter(
|
||||
pk__in=list(PodManager.list_active_jobs(group))
|
||||
).exclude(status__in=ACTIVE_STATES):
|
||||
logger.debug('{} is no longer active, reaping orphaned k8s pod'.format(job.log_format))
|
||||
try:
|
||||
PodManager(job).delete()
|
||||
except Exception:
|
||||
logger.exception("Failed to delete orphaned pod {} from {}".format(
|
||||
job.log_format, group
|
||||
))
|
||||
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
def awx_isolated_heartbeat():
|
||||
local_hostname = settings.CLUSTER_HOST_ID
|
||||
@@ -704,6 +730,7 @@ class BaseTask(object):
|
||||
|
||||
def __init__(self):
|
||||
self.cleanup_paths = []
|
||||
self.parent_workflow_job_id = None
|
||||
|
||||
def update_model(self, pk, _attempt=0, **updates):
|
||||
"""Reload the model instance from the database and update the
|
||||
@@ -876,12 +903,8 @@ class BaseTask(object):
|
||||
show_paths = self.proot_show_paths + local_paths + \
|
||||
settings.AWX_PROOT_SHOW_PATHS
|
||||
|
||||
# Help the user out by including the collections path inside the bubblewrap environment
|
||||
if getattr(settings, 'AWX_ANSIBLE_COLLECTIONS_PATHS', []):
|
||||
show_paths.extend(settings.AWX_ANSIBLE_COLLECTIONS_PATHS)
|
||||
|
||||
pi_path = settings.AWX_PROOT_BASE_PATH
|
||||
if not self.instance.is_isolated():
|
||||
if not self.instance.is_isolated() and not self.instance.is_containerized:
|
||||
pi_path = tempfile.mkdtemp(
|
||||
prefix='ansible_runner_pi_',
|
||||
dir=settings.AWX_PROOT_BASE_PATH
|
||||
@@ -908,6 +931,31 @@ class BaseTask(object):
|
||||
process_isolation_params['process_isolation_ro_paths'].append(instance.ansible_virtualenv_path)
|
||||
return process_isolation_params
|
||||
|
||||
def build_params_resource_profiling(self, instance, private_data_dir):
|
||||
resource_profiling_params = {}
|
||||
if self.should_use_resource_profiling(instance):
|
||||
cpu_poll_interval = settings.AWX_RESOURCE_PROFILING_CPU_POLL_INTERVAL
|
||||
mem_poll_interval = settings.AWX_RESOURCE_PROFILING_MEMORY_POLL_INTERVAL
|
||||
pid_poll_interval = settings.AWX_RESOURCE_PROFILING_PID_POLL_INTERVAL
|
||||
|
||||
results_dir = os.path.join(private_data_dir, 'artifacts/playbook_profiling')
|
||||
if not os.path.isdir(results_dir):
|
||||
os.makedirs(results_dir, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
|
||||
|
||||
logger.debug('Collected the following resource profiling intervals: cpu: {} mem: {} pid: {}'
|
||||
.format(cpu_poll_interval, mem_poll_interval, pid_poll_interval))
|
||||
|
||||
resource_profiling_params.update({'resource_profiling': True,
|
||||
'resource_profiling_base_cgroup': 'ansible-runner',
|
||||
'resource_profiling_cpu_poll_interval': cpu_poll_interval,
|
||||
'resource_profiling_memory_poll_interval': mem_poll_interval,
|
||||
'resource_profiling_pid_poll_interval': pid_poll_interval,
|
||||
'resource_profiling_results_dir': results_dir})
|
||||
else:
|
||||
logger.debug('Resource profiling not enabled for task')
|
||||
|
||||
return resource_profiling_params
|
||||
|
||||
def _write_extra_vars_file(self, private_data_dir, vars, safe_dict={}):
|
||||
env_path = os.path.join(private_data_dir, 'env')
|
||||
try:
|
||||
@@ -966,13 +1014,14 @@ class BaseTask(object):
|
||||
if self.should_use_proot(instance):
|
||||
env['PROOT_TMP_DIR'] = settings.AWX_PROOT_BASE_PATH
|
||||
env['AWX_PRIVATE_DATA_DIR'] = private_data_dir
|
||||
|
||||
if 'ANSIBLE_COLLECTIONS_PATHS' in env:
|
||||
env['ANSIBLE_COLLECTIONS_PATHS'] += os.pathsep + os.pathsep.join(settings.AWX_ANSIBLE_COLLECTIONS_PATHS)
|
||||
else:
|
||||
env['ANSIBLE_COLLECTIONS_PATHS'] = os.pathsep.join(settings.AWX_ANSIBLE_COLLECTIONS_PATHS)
|
||||
return env
|
||||
|
||||
def should_use_resource_profiling(self, job):
|
||||
'''
|
||||
Return whether this task should use resource profiling
|
||||
'''
|
||||
return False
|
||||
|
||||
def should_use_proot(self, instance):
|
||||
'''
|
||||
Return whether this task should use proot.
|
||||
@@ -1057,6 +1106,19 @@ class BaseTask(object):
|
||||
'''
|
||||
Hook for any steps to run after job/task is marked as complete.
|
||||
'''
|
||||
job_profiling_dir = os.path.join(private_data_dir, 'artifacts/playbook_profiling')
|
||||
awx_profiling_dir = '/var/log/tower/playbook_profiling/'
|
||||
if not os.path.exists(awx_profiling_dir):
|
||||
os.mkdir(awx_profiling_dir)
|
||||
if os.path.isdir(job_profiling_dir):
|
||||
shutil.copytree(job_profiling_dir, os.path.join(awx_profiling_dir, str(instance.pk)))
|
||||
|
||||
if instance.is_containerized:
|
||||
from awx.main.scheduler.kubernetes import PodManager # prevent circular import
|
||||
pm = PodManager(instance)
|
||||
logger.debug(f"Deleting pod {pm.pod_name}")
|
||||
pm.delete()
|
||||
|
||||
|
||||
def event_handler(self, event_data):
|
||||
#
|
||||
@@ -1078,6 +1140,8 @@ class BaseTask(object):
|
||||
if event_data.get(self.event_data_key, None):
|
||||
if self.event_data_key != 'job_id':
|
||||
event_data.pop('parent_uuid', None)
|
||||
if self.parent_workflow_job_id:
|
||||
event_data['workflow_job_id'] = self.parent_workflow_job_id
|
||||
should_write_event = False
|
||||
event_data.setdefault(self.event_data_key, self.instance.id)
|
||||
self.dispatcher.dispatch(event_data)
|
||||
@@ -1149,6 +1213,18 @@ class BaseTask(object):
|
||||
'''
|
||||
Run the job/task and capture its output.
|
||||
'''
|
||||
self.instance = self.model.objects.get(pk=pk)
|
||||
containerized = self.instance.is_containerized
|
||||
pod_manager = None
|
||||
if containerized:
|
||||
# Here we are trying to launch a pod before transitioning the job into a running
|
||||
# state. For some scenarios (like waiting for resources to become available) we do this
|
||||
# rather than marking the job as error or failed. This is not always desirable. Cases
|
||||
# such as invalid authentication should surface as an error.
|
||||
pod_manager = self.deploy_container_group_pod(self.instance)
|
||||
if not pod_manager:
|
||||
return
|
||||
|
||||
# self.instance because of the update_model pattern and when it's used in callback handlers
|
||||
self.instance = self.update_model(pk, status='running',
|
||||
start_args='') # blank field to remove encrypted passwords
|
||||
@@ -1167,6 +1243,11 @@ class BaseTask(object):
|
||||
private_data_dir = None
|
||||
isolated_manager_instance = None
|
||||
|
||||
# store a reference to the parent workflow job (if any) so we can include
|
||||
# it in event data JSON
|
||||
if self.instance.spawned_by_workflow:
|
||||
self.parent_workflow_job_id = self.instance.get_workflow_job().id
|
||||
|
||||
try:
|
||||
isolated = self.instance.is_isolated()
|
||||
self.instance.send_notification_templates("running")
|
||||
@@ -1202,6 +1283,8 @@ class BaseTask(object):
|
||||
self.build_extra_vars_file(self.instance, private_data_dir)
|
||||
args = self.build_args(self.instance, private_data_dir, passwords)
|
||||
cwd = self.build_cwd(self.instance, private_data_dir)
|
||||
resource_profiling_params = self.build_params_resource_profiling(self.instance,
|
||||
private_data_dir)
|
||||
process_isolation_params = self.build_params_process_isolation(self.instance,
|
||||
private_data_dir,
|
||||
cwd)
|
||||
@@ -1241,9 +1324,14 @@ class BaseTask(object):
|
||||
'pexpect_timeout': getattr(settings, 'PEXPECT_TIMEOUT', 5),
|
||||
'suppress_ansible_output': True,
|
||||
**process_isolation_params,
|
||||
**resource_profiling_params,
|
||||
},
|
||||
}
|
||||
|
||||
if containerized:
|
||||
# We don't want HOME passed through to container groups.
|
||||
params['envvars'].pop('HOME')
|
||||
|
||||
if isinstance(self.instance, AdHocCommand):
|
||||
params['module'] = self.build_module_name(self.instance)
|
||||
params['module_args'] = self.build_module_args(self.instance)
|
||||
@@ -1262,7 +1350,7 @@ class BaseTask(object):
|
||||
if not params[v]:
|
||||
del params[v]
|
||||
|
||||
if self.instance.is_isolated() is True:
|
||||
if self.instance.is_isolated() or containerized:
|
||||
module_args = None
|
||||
if 'module_args' in params:
|
||||
# if it's adhoc, copy the module args
|
||||
@@ -1273,10 +1361,12 @@ class BaseTask(object):
|
||||
params.pop('inventory'),
|
||||
os.path.join(private_data_dir, 'inventory')
|
||||
)
|
||||
|
||||
ansible_runner.utils.dump_artifacts(params)
|
||||
isolated_manager_instance = isolated_manager.IsolatedManager(
|
||||
cancelled_callback=lambda: self.update_model(self.instance.pk).cancel_flag,
|
||||
check_callback=self.check_handler,
|
||||
pod_manager=pod_manager
|
||||
)
|
||||
status, rc = isolated_manager_instance.run(self.instance,
|
||||
private_data_dir,
|
||||
@@ -1330,6 +1420,42 @@ class BaseTask(object):
|
||||
raise AwxTaskError.TaskError(self.instance, rc)
|
||||
|
||||
|
||||
def deploy_container_group_pod(self, task):
|
||||
from awx.main.scheduler.kubernetes import PodManager # Avoid circular import
|
||||
pod_manager = PodManager(self.instance)
|
||||
self.cleanup_paths.append(pod_manager.kube_config)
|
||||
try:
|
||||
log_name = task.log_format
|
||||
logger.debug(f"Launching pod for {log_name}.")
|
||||
pod_manager.deploy()
|
||||
except (ApiException, Exception) as exc:
|
||||
if isinstance(exc, ApiException) and exc.status == 403:
|
||||
try:
|
||||
if 'exceeded quota' in json.loads(exc.body)['message']:
|
||||
# If the k8s cluster does not have capacity, we move the
|
||||
# job back into pending and wait until the next run of
|
||||
# the task manager. This does not exactly play well with
|
||||
# our current instance group precendence logic, since it
|
||||
# will just sit here forever if kubernetes returns this
|
||||
# error.
|
||||
logger.warn(exc.body)
|
||||
logger.warn(f"Could not launch pod for {log_name}. Exceeded quota.")
|
||||
self.update_model(task.pk, status='pending')
|
||||
return
|
||||
except Exception:
|
||||
logger.exception(f"Unable to handle response from Kubernetes API for {log_name}.")
|
||||
|
||||
logger.exception(f"Error when launching pod for {log_name}")
|
||||
self.update_model(task.pk, status='error', result_traceback=traceback.format_exc())
|
||||
return
|
||||
|
||||
self.update_model(task.pk, execution_node=pod_manager.pod_name)
|
||||
return pod_manager
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@task()
|
||||
class RunJob(BaseTask):
|
||||
'''
|
||||
@@ -1474,13 +1600,23 @@ class RunJob(BaseTask):
|
||||
if authorize:
|
||||
env['ANSIBLE_NET_AUTH_PASS'] = network_cred.get_input('authorize_password', default='')
|
||||
|
||||
for env_key, folder in (
|
||||
('ANSIBLE_COLLECTIONS_PATHS', 'requirements_collections'),
|
||||
('ANSIBLE_ROLES_PATH', 'requirements_roles')):
|
||||
paths = []
|
||||
path_vars = (
|
||||
('ANSIBLE_COLLECTIONS_PATHS', 'collections_paths', 'requirements_collections', '~/.ansible/collections:/usr/share/ansible/collections'),
|
||||
('ANSIBLE_ROLES_PATH', 'roles_path', 'requirements_roles', '~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles'))
|
||||
|
||||
config_values = read_ansible_config(job.project.get_project_path(), list(map(lambda x: x[1], path_vars)))
|
||||
|
||||
for env_key, config_setting, folder, default in path_vars:
|
||||
paths = default.split(':')
|
||||
if env_key in env:
|
||||
paths.append(env[env_key])
|
||||
paths.append(os.path.join(private_data_dir, folder))
|
||||
for path in env[env_key].split(':'):
|
||||
if path not in paths:
|
||||
paths = [env[env_key]] + paths
|
||||
elif config_setting in config_values:
|
||||
for path in config_values[config_setting].split(':'):
|
||||
if path not in paths:
|
||||
paths = [config_values[config_setting]] + paths
|
||||
paths = [os.path.join(private_data_dir, folder)] + paths
|
||||
env[env_key] = os.pathsep.join(paths)
|
||||
|
||||
return env
|
||||
@@ -1595,10 +1731,18 @@ class RunJob(BaseTask):
|
||||
d[r'Vault password \({}\):\s*?$'.format(vault_id)] = k
|
||||
return d
|
||||
|
||||
def should_use_resource_profiling(self, job):
|
||||
'''
|
||||
Return whether this task should use resource profiling
|
||||
'''
|
||||
return settings.AWX_RESOURCE_PROFILING_ENABLED
|
||||
|
||||
def should_use_proot(self, job):
|
||||
'''
|
||||
Return whether this task should use proot.
|
||||
'''
|
||||
if job.is_containerized:
|
||||
return False
|
||||
return getattr(settings, 'AWX_PROOT_ENABLED', False)
|
||||
|
||||
def pre_run_hook(self, job, private_data_dir):
|
||||
@@ -1659,6 +1803,7 @@ class RunJob(BaseTask):
|
||||
if job.is_isolated() is True:
|
||||
pu_ig = pu_ig.controller
|
||||
pu_en = settings.CLUSTER_HOST_ID
|
||||
|
||||
sync_metafields = dict(
|
||||
launch_type="sync",
|
||||
job_type='run',
|
||||
@@ -1696,29 +1841,11 @@ class RunJob(BaseTask):
|
||||
# up-to-date with project, job is running project current version
|
||||
if job_revision:
|
||||
job = self.update_model(job.pk, scm_revision=job_revision)
|
||||
|
||||
# copy the project directory
|
||||
runner_project_folder = os.path.join(private_data_dir, 'project')
|
||||
if job.project.scm_type == 'git':
|
||||
git_repo = git.Repo(project_path)
|
||||
if not os.path.exists(runner_project_folder):
|
||||
os.mkdir(runner_project_folder)
|
||||
tmp_branch_name = 'awx_internal/{}'.format(uuid4())
|
||||
# always clone based on specific job revision
|
||||
if not job.scm_revision:
|
||||
raise RuntimeError('Unexpectedly could not determine a revision to run from project.')
|
||||
source_branch = git_repo.create_head(tmp_branch_name, job.scm_revision)
|
||||
# git clone must take file:// syntax for source repo or else options like depth will be ignored
|
||||
source_as_uri = Path(project_path).as_uri()
|
||||
git.Repo.clone_from(
|
||||
source_as_uri, runner_project_folder, branch=source_branch,
|
||||
depth=1, single_branch=True, # shallow, do not copy full history
|
||||
recursive=True # include submodules
|
||||
# Project update does not copy the folder, so copy here
|
||||
RunProjectUpdate.make_local_copy(
|
||||
project_path, os.path.join(private_data_dir, 'project'),
|
||||
job.project.scm_type, job_revision
|
||||
)
|
||||
# force option is necessary because remote refs are not counted, although no information is lost
|
||||
git_repo.delete_head(tmp_branch_name, force=True)
|
||||
else:
|
||||
copy_tree(project_path, runner_project_folder)
|
||||
|
||||
if job.inventory.kind == 'smart':
|
||||
# cache smart inventory memberships so that the host_filter query is not
|
||||
@@ -1737,8 +1864,9 @@ class RunJob(BaseTask):
|
||||
os.path.join(private_data_dir, 'artifacts', str(job.id), 'fact_cache'),
|
||||
fact_modification_times,
|
||||
)
|
||||
if isolated_manager_instance:
|
||||
if isolated_manager_instance and not job.is_containerized:
|
||||
isolated_manager_instance.cleanup()
|
||||
|
||||
try:
|
||||
inventory = job.inventory
|
||||
except Inventory.DoesNotExist:
|
||||
@@ -1830,6 +1958,24 @@ class RunProjectUpdate(BaseTask):
|
||||
env['TMP'] = settings.AWX_PROOT_BASE_PATH
|
||||
env['PROJECT_UPDATE_ID'] = str(project_update.pk)
|
||||
env['ANSIBLE_CALLBACK_PLUGINS'] = self.get_path_to('..', 'plugins', 'callback')
|
||||
env['ANSIBLE_GALAXY_IGNORE'] = True
|
||||
# Set up the fallback server, which is the normal Ansible Galaxy by default
|
||||
galaxy_servers = list(settings.FALLBACK_GALAXY_SERVERS)
|
||||
# If private galaxy URL is non-blank, that means this feature is enabled
|
||||
if settings.PRIMARY_GALAXY_URL:
|
||||
galaxy_servers = [{'id': 'primary_galaxy'}] + galaxy_servers
|
||||
for key in GALAXY_SERVER_FIELDS:
|
||||
value = getattr(settings, 'PRIMARY_GALAXY_{}'.format(key.upper()))
|
||||
if value:
|
||||
galaxy_servers[0][key] = value
|
||||
for server in galaxy_servers:
|
||||
for key in GALAXY_SERVER_FIELDS:
|
||||
if not server.get(key):
|
||||
continue
|
||||
env_key = ('ANSIBLE_GALAXY_SERVER_{}_{}'.format(server.get('id', 'unnamed'), key)).upper()
|
||||
env[env_key] = server[key]
|
||||
# now set the precedence of galaxy servers
|
||||
env['ANSIBLE_GALAXY_SERVER_LIST'] = ','.join([server.get('id', 'unnamed') for server in galaxy_servers])
|
||||
return env
|
||||
|
||||
def _build_scm_url_extra_vars(self, project_update):
|
||||
@@ -1895,8 +2041,8 @@ class RunProjectUpdate(BaseTask):
|
||||
extra_vars.update(extra_vars_new)
|
||||
|
||||
scm_branch = project_update.scm_branch
|
||||
branch_override = bool(project_update.scm_branch != project_update.project.scm_branch)
|
||||
if project_update.job_type == 'run' and scm_branch and (not branch_override):
|
||||
branch_override = bool(scm_branch and project_update.scm_branch != project_update.project.scm_branch)
|
||||
if project_update.job_type == 'run' and (not branch_override):
|
||||
scm_branch = project_update.project.scm_revision
|
||||
elif not scm_branch:
|
||||
scm_branch = {'hg': 'tip'}.get(project_update.scm_type, 'HEAD')
|
||||
@@ -2064,15 +2210,51 @@ class RunProjectUpdate(BaseTask):
|
||||
git_repo = git.Repo(project_path)
|
||||
self.original_branch = git_repo.active_branch
|
||||
|
||||
@staticmethod
|
||||
def make_local_copy(project_path, destination_folder, scm_type, scm_revision):
|
||||
if scm_type == 'git':
|
||||
git_repo = git.Repo(project_path)
|
||||
if not os.path.exists(destination_folder):
|
||||
os.mkdir(destination_folder, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
|
||||
tmp_branch_name = 'awx_internal/{}'.format(uuid4())
|
||||
# always clone based on specific job revision
|
||||
if not scm_revision:
|
||||
raise RuntimeError('Unexpectedly could not determine a revision to run from project.')
|
||||
source_branch = git_repo.create_head(tmp_branch_name, scm_revision)
|
||||
# git clone must take file:// syntax for source repo or else options like depth will be ignored
|
||||
source_as_uri = Path(project_path).as_uri()
|
||||
git.Repo.clone_from(
|
||||
source_as_uri, destination_folder, branch=source_branch,
|
||||
depth=1, single_branch=True, # shallow, do not copy full history
|
||||
)
|
||||
# submodules copied in loop because shallow copies from local HEADs are ideal
|
||||
# and no git clone submodule options are compatible with minimum requirements
|
||||
for submodule in git_repo.submodules:
|
||||
subrepo_path = os.path.abspath(os.path.join(project_path, submodule.path))
|
||||
subrepo_destination_folder = os.path.abspath(os.path.join(destination_folder, submodule.path))
|
||||
subrepo_uri = Path(subrepo_path).as_uri()
|
||||
git.Repo.clone_from(subrepo_uri, subrepo_destination_folder, depth=1, single_branch=True)
|
||||
# force option is necessary because remote refs are not counted, although no information is lost
|
||||
git_repo.delete_head(tmp_branch_name, force=True)
|
||||
else:
|
||||
copy_tree(project_path, destination_folder)
|
||||
|
||||
def post_run_hook(self, instance, status):
|
||||
if self.original_branch:
|
||||
# for git project syncs, non-default branches can be problems
|
||||
# restore to branch the repo was on before this run
|
||||
try:
|
||||
self.original_branch.checkout()
|
||||
except Exception:
|
||||
# this could have failed due to dirty tree, but difficult to predict all cases
|
||||
logger.exception('Failed to restore project repo to prior state after {}'.format(instance.log_format))
|
||||
if self.job_private_data_dir:
|
||||
# copy project folder before resetting to default branch
|
||||
# because some git-tree-specific resources (like submodules) might matter
|
||||
self.make_local_copy(
|
||||
instance.get_project_path(check_if_exists=False), os.path.join(self.job_private_data_dir, 'project'),
|
||||
instance.scm_type, self.playbook_new_revision
|
||||
)
|
||||
if self.original_branch:
|
||||
# for git project syncs, non-default branches can be problems
|
||||
# restore to branch the repo was on before this run
|
||||
try:
|
||||
self.original_branch.checkout()
|
||||
except Exception:
|
||||
# this could have failed due to dirty tree, but difficult to predict all cases
|
||||
logger.exception('Failed to restore project repo to prior state after {}'.format(instance.log_format))
|
||||
self.release_lock(instance)
|
||||
p = instance.project
|
||||
if self.playbook_new_revision:
|
||||
@@ -2234,7 +2416,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper()),])
|
||||
# Add arguments for the source inventory script
|
||||
args.append('--source')
|
||||
args.append(self.psuedo_build_inventory(inventory_update, private_data_dir))
|
||||
args.append(self.pseudo_build_inventory(inventory_update, private_data_dir))
|
||||
if src == 'custom':
|
||||
args.append("--custom")
|
||||
args.append('-v%d' % inventory_update.verbosity)
|
||||
@@ -2245,7 +2427,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
def build_inventory(self, inventory_update, private_data_dir):
|
||||
return None # what runner expects in order to not deal with inventory
|
||||
|
||||
def psuedo_build_inventory(self, inventory_update, private_data_dir):
|
||||
def pseudo_build_inventory(self, inventory_update, private_data_dir):
|
||||
"""Inventory imports are ran through a management command
|
||||
we pass the inventory in args to that command, so this is not considered
|
||||
to be "Ansible" inventory (by runner) even though it is
|
||||
@@ -2518,6 +2700,8 @@ class RunAdHocCommand(BaseTask):
|
||||
'''
|
||||
Return whether this task should use proot.
|
||||
'''
|
||||
if ad_hoc_command.is_containerized:
|
||||
return False
|
||||
return getattr(settings, 'AWX_PROOT_ENABLED', False)
|
||||
|
||||
def final_run_hook(self, adhoc_job, status, private_data_dir, fact_modification_times, isolated_manager_instance=None):
|
||||
|
||||
@@ -154,12 +154,12 @@ def mk_job_template(name, job_type='run',
|
||||
organization=None, inventory=None,
|
||||
credential=None, network_credential=None,
|
||||
cloud_credential=None, persisted=True, extra_vars='',
|
||||
project=None, spec=None):
|
||||
project=None, spec=None, webhook_service=''):
|
||||
if extra_vars:
|
||||
extra_vars = json.dumps(extra_vars)
|
||||
|
||||
jt = JobTemplate(name=name, job_type=job_type, extra_vars=extra_vars,
|
||||
playbook='helloworld.yml')
|
||||
webhook_service=webhook_service, playbook='helloworld.yml')
|
||||
|
||||
jt.inventory = inventory
|
||||
if jt.inventory is None:
|
||||
@@ -200,11 +200,13 @@ def mk_workflow_job(status='new', workflow_job_template=None, extra_vars={},
|
||||
return job
|
||||
|
||||
|
||||
def mk_workflow_job_template(name, extra_vars='', spec=None, organization=None, persisted=True):
|
||||
def mk_workflow_job_template(name, extra_vars='', spec=None, organization=None, persisted=True,
|
||||
webhook_service=''):
|
||||
if extra_vars:
|
||||
extra_vars = json.dumps(extra_vars)
|
||||
|
||||
wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars, organization=organization)
|
||||
wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars, organization=organization,
|
||||
webhook_service=webhook_service)
|
||||
|
||||
wfjt.survey_spec = spec
|
||||
if wfjt.survey_spec:
|
||||
|
||||
@@ -197,7 +197,7 @@ def create_survey_spec(variables=None, default_type='integer', required=True, mi
|
||||
#
|
||||
|
||||
|
||||
def create_job_template(name, roles=None, persisted=True, **kwargs):
|
||||
def create_job_template(name, roles=None, persisted=True, webhook_service='', **kwargs):
|
||||
Objects = generate_objects(["job_template", "jobs",
|
||||
"organization",
|
||||
"inventory",
|
||||
@@ -252,11 +252,10 @@ def create_job_template(name, roles=None, persisted=True, **kwargs):
|
||||
else:
|
||||
spec = None
|
||||
|
||||
jt = mk_job_template(name, project=proj,
|
||||
inventory=inv, credential=cred,
|
||||
jt = mk_job_template(name, project=proj, inventory=inv, credential=cred,
|
||||
network_credential=net_cred, cloud_credential=cloud_cred,
|
||||
job_type=job_type, spec=spec, extra_vars=extra_vars,
|
||||
persisted=persisted)
|
||||
persisted=persisted, webhook_service=webhook_service)
|
||||
|
||||
if 'jobs' in kwargs:
|
||||
for i in kwargs['jobs']:
|
||||
@@ -401,7 +400,7 @@ def generate_workflow_job_template_nodes(workflow_job_template,
|
||||
|
||||
|
||||
# TODO: Implement survey and jobs
|
||||
def create_workflow_job_template(name, organization=None, persisted=True, **kwargs):
|
||||
def create_workflow_job_template(name, organization=None, persisted=True, webhook_service='', **kwargs):
|
||||
Objects = generate_objects(["workflow_job_template",
|
||||
"workflow_job_template_nodes",
|
||||
"survey",], kwargs)
|
||||
@@ -418,7 +417,8 @@ def create_workflow_job_template(name, organization=None, persisted=True, **kwar
|
||||
organization=organization,
|
||||
spec=spec,
|
||||
extra_vars=extra_vars,
|
||||
persisted=persisted)
|
||||
persisted=persisted,
|
||||
webhook_service=webhook_service)
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -496,9 +496,6 @@ def test_falsey_field_data(get, post, organization, admin, field_value):
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('kind, extraneous', [
|
||||
['ssh', 'ssh_key_unlock'],
|
||||
['scm', 'ssh_key_unlock'],
|
||||
['net', 'ssh_key_unlock'],
|
||||
['net', 'authorize_password'],
|
||||
])
|
||||
def test_field_dependencies(get, post, organization, admin, kind, extraneous):
|
||||
|
||||
@@ -127,3 +127,53 @@ def test_post_wfjt_running_notification(get, post, admin, notification_template,
|
||||
response = get(url, admin)
|
||||
assert response.status_code == 200
|
||||
assert len(response.data['results']) == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_search_on_notification_configuration_is_prevented(get, admin):
|
||||
url = reverse('api:notification_template_list')
|
||||
response = get(url, {'notification_configuration__regex': 'ABCDEF'}, admin)
|
||||
assert response.status_code == 403
|
||||
assert response.data == {"detail": "Filtering on notification_configuration is not allowed."}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_wfjt_approval_notification(get, admin, workflow_job_template):
|
||||
url = reverse('api:workflow_job_template_notification_templates_approvals_list', kwargs={'pk': workflow_job_template.pk})
|
||||
response = get(url, admin)
|
||||
assert response.status_code == 200
|
||||
assert len(response.data['results']) == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_post_wfjt_approval_notification(get, post, admin, notification_template, workflow_job_template):
|
||||
url = reverse('api:workflow_job_template_notification_templates_approvals_list', kwargs={'pk': workflow_job_template.pk})
|
||||
response = post(url,
|
||||
dict(id=notification_template.id,
|
||||
associate=True),
|
||||
admin)
|
||||
assert response.status_code == 204
|
||||
response = get(url, admin)
|
||||
assert response.status_code == 200
|
||||
assert len(response.data['results']) == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_org_approval_notification(get, admin, organization):
|
||||
url = reverse('api:organization_notification_templates_approvals_list', kwargs={'pk': organization.pk})
|
||||
response = get(url, admin)
|
||||
assert response.status_code == 200
|
||||
assert len(response.data['results']) == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_post_org_approval_notification(get, post, admin, notification_template, organization):
|
||||
url = reverse('api:organization_notification_templates_approvals_list', kwargs={'pk': organization.pk})
|
||||
response = post(url,
|
||||
dict(id=notification_template.id,
|
||||
associate=True),
|
||||
admin)
|
||||
assert response.status_code == 204
|
||||
response = get(url, admin)
|
||||
assert response.status_code == 200
|
||||
assert len(response.data['results']) == 1
|
||||
|
||||
260
awx/main/tests/functional/api/test_webhooks.py
Normal file
260
awx/main/tests/functional/api/test_webhooks.py
Normal file
@@ -0,0 +1,260 @@
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models.mixins import WebhookTemplateMixin
|
||||
from awx.main.models.credential import Credential, CredentialType
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"user_role, expect", [
|
||||
('superuser', 200),
|
||||
('org admin', 200),
|
||||
('jt admin', 200),
|
||||
('jt execute', 403),
|
||||
('org member', 403),
|
||||
]
|
||||
)
|
||||
def test_get_webhook_key_jt(organization_factory, job_template_factory, get, user_role, expect):
|
||||
objs = organization_factory("org", superusers=['admin'], users=['user'])
|
||||
jt = job_template_factory("jt", organization=objs.organization,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
if user_role == 'superuser':
|
||||
user = objs.superusers.admin
|
||||
else:
|
||||
user = objs.users.user
|
||||
grant_obj = objs.organization if user_role.startswith('org') else jt
|
||||
getattr(grant_obj, '{}_role'.format(user_role.split()[1])).members.add(user)
|
||||
|
||||
url = reverse('api:webhook_key', kwargs={'model_kwarg': 'job_templates', 'pk': jt.pk})
|
||||
response = get(url, user=user, expect=expect)
|
||||
if expect < 400:
|
||||
assert response.data == {'webhook_key': ''}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"user_role, expect", [
|
||||
('superuser', 200),
|
||||
('org admin', 200),
|
||||
('jt admin', 200),
|
||||
('jt execute', 403),
|
||||
('org member', 403),
|
||||
]
|
||||
)
|
||||
def test_get_webhook_key_wfjt(organization_factory, workflow_job_template_factory, get, user_role, expect):
|
||||
objs = organization_factory("org", superusers=['admin'], users=['user'])
|
||||
wfjt = workflow_job_template_factory("wfjt", organization=objs.organization).workflow_job_template
|
||||
if user_role == 'superuser':
|
||||
user = objs.superusers.admin
|
||||
else:
|
||||
user = objs.users.user
|
||||
grant_obj = objs.organization if user_role.startswith('org') else wfjt
|
||||
getattr(grant_obj, '{}_role'.format(user_role.split()[1])).members.add(user)
|
||||
|
||||
url = reverse('api:webhook_key', kwargs={'model_kwarg': 'workflow_job_templates', 'pk': wfjt.pk})
|
||||
response = get(url, user=user, expect=expect)
|
||||
if expect < 400:
|
||||
assert response.data == {'webhook_key': ''}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"user_role, expect", [
|
||||
('superuser', 201),
|
||||
('org admin', 201),
|
||||
('jt admin', 201),
|
||||
('jt execute', 403),
|
||||
('org member', 403),
|
||||
]
|
||||
)
|
||||
def test_post_webhook_key_jt(organization_factory, job_template_factory, post, user_role, expect):
|
||||
objs = organization_factory("org", superusers=['admin'], users=['user'])
|
||||
jt = job_template_factory("jt", organization=objs.organization,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
if user_role == 'superuser':
|
||||
user = objs.superusers.admin
|
||||
else:
|
||||
user = objs.users.user
|
||||
grant_obj = objs.organization if user_role.startswith('org') else jt
|
||||
getattr(grant_obj, '{}_role'.format(user_role.split()[1])).members.add(user)
|
||||
|
||||
url = reverse('api:webhook_key', kwargs={'model_kwarg': 'job_templates', 'pk': jt.pk})
|
||||
response = post(url, {}, user=user, expect=expect)
|
||||
if expect < 400:
|
||||
assert bool(response.data.get('webhook_key'))
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"user_role, expect", [
|
||||
('superuser', 201),
|
||||
('org admin', 201),
|
||||
('jt admin', 201),
|
||||
('jt execute', 403),
|
||||
('org member', 403),
|
||||
]
|
||||
)
|
||||
def test_post_webhook_key_wfjt(organization_factory, workflow_job_template_factory, post, user_role, expect):
|
||||
objs = organization_factory("org", superusers=['admin'], users=['user'])
|
||||
wfjt = workflow_job_template_factory("wfjt", organization=objs.organization).workflow_job_template
|
||||
if user_role == 'superuser':
|
||||
user = objs.superusers.admin
|
||||
else:
|
||||
user = objs.users.user
|
||||
grant_obj = objs.organization if user_role.startswith('org') else wfjt
|
||||
getattr(grant_obj, '{}_role'.format(user_role.split()[1])).members.add(user)
|
||||
|
||||
url = reverse('api:webhook_key', kwargs={'model_kwarg': 'workflow_job_templates', 'pk': wfjt.pk})
|
||||
response = post(url, {}, user=user, expect=expect)
|
||||
if expect < 400:
|
||||
assert bool(response.data.get('webhook_key'))
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"service", [s for s, _ in WebhookTemplateMixin.SERVICES]
|
||||
)
|
||||
def test_set_webhook_service(organization_factory, job_template_factory, patch, service):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
jt = job_template_factory("jt", organization=objs.organization,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
admin = objs.superusers.admin
|
||||
assert (jt.webhook_service, jt.webhook_key) == ('', '')
|
||||
|
||||
url = reverse('api:job_template_detail', kwargs={'pk': jt.pk})
|
||||
patch(url, {'webhook_service': service}, user=admin, expect=200)
|
||||
jt.refresh_from_db()
|
||||
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"service", [s for s, _ in WebhookTemplateMixin.SERVICES]
|
||||
)
|
||||
def test_unset_webhook_service(organization_factory, job_template_factory, patch, service):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
jt = job_template_factory("jt", organization=objs.organization, webhook_service=service,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
admin = objs.superusers.admin
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
|
||||
url = reverse('api:job_template_detail', kwargs={'pk': jt.pk})
|
||||
patch(url, {'webhook_service': ''}, user=admin, expect=200)
|
||||
jt.refresh_from_db()
|
||||
|
||||
assert (jt.webhook_service, jt.webhook_key) == ('', '')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"service", [s for s, _ in WebhookTemplateMixin.SERVICES]
|
||||
)
|
||||
def test_set_webhook_credential(organization_factory, job_template_factory, patch, service):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
jt = job_template_factory("jt", organization=objs.organization, webhook_service=service,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
admin = objs.superusers.admin
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
|
||||
cred_type = CredentialType.defaults['{}_token'.format(service)]()
|
||||
cred_type.save()
|
||||
cred = Credential.objects.create(credential_type=cred_type, name='test-cred',
|
||||
inputs={'token': 'secret'})
|
||||
|
||||
url = reverse('api:job_template_detail', kwargs={'pk': jt.pk})
|
||||
patch(url, {'webhook_credential': cred.pk}, user=admin, expect=200)
|
||||
jt.refresh_from_db()
|
||||
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
assert jt.webhook_credential == cred
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"service,token", [
|
||||
(s, WebhookTemplateMixin.SERVICES[i - 1][0]) for i, (s, _) in enumerate(WebhookTemplateMixin.SERVICES)
|
||||
]
|
||||
)
|
||||
def test_set_wrong_service_webhook_credential(organization_factory, job_template_factory, patch, service, token):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
jt = job_template_factory("jt", organization=objs.organization, webhook_service=service,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
admin = objs.superusers.admin
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
|
||||
cred_type = CredentialType.defaults['{}_token'.format(token)]()
|
||||
cred_type.save()
|
||||
cred = Credential.objects.create(credential_type=cred_type, name='test-cred',
|
||||
inputs={'token': 'secret'})
|
||||
|
||||
url = reverse('api:job_template_detail', kwargs={'pk': jt.pk})
|
||||
response = patch(url, {'webhook_credential': cred.pk}, user=admin, expect=400)
|
||||
jt.refresh_from_db()
|
||||
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
assert jt.webhook_credential is None
|
||||
assert response.data == {'webhook_credential': ["Must match the selected webhook service."]}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"service", [s for s, _ in WebhookTemplateMixin.SERVICES]
|
||||
)
|
||||
def test_set_webhook_credential_without_service(organization_factory, job_template_factory, patch, service):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
jt = job_template_factory("jt", organization=objs.organization,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
admin = objs.superusers.admin
|
||||
assert jt.webhook_service == ''
|
||||
assert jt.webhook_key == ''
|
||||
|
||||
cred_type = CredentialType.defaults['{}_token'.format(service)]()
|
||||
cred_type.save()
|
||||
cred = Credential.objects.create(credential_type=cred_type, name='test-cred',
|
||||
inputs={'token': 'secret'})
|
||||
|
||||
url = reverse('api:job_template_detail', kwargs={'pk': jt.pk})
|
||||
response = patch(url, {'webhook_credential': cred.pk}, user=admin, expect=400)
|
||||
jt.refresh_from_db()
|
||||
|
||||
assert jt.webhook_service == ''
|
||||
assert jt.webhook_key == ''
|
||||
assert jt.webhook_credential is None
|
||||
assert response.data == {'webhook_credential': ["Must match the selected webhook service."]}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"service", [s for s, _ in WebhookTemplateMixin.SERVICES]
|
||||
)
|
||||
def test_unset_webhook_service_with_credential(organization_factory, job_template_factory, patch, service):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
jt = job_template_factory("jt", organization=objs.organization, webhook_service=service,
|
||||
inventory='test_inv', project='test_proj').job_template
|
||||
admin = objs.superusers.admin
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
|
||||
cred_type = CredentialType.defaults['{}_token'.format(service)]()
|
||||
cred_type.save()
|
||||
cred = Credential.objects.create(credential_type=cred_type, name='test-cred',
|
||||
inputs={'token': 'secret'})
|
||||
jt.webhook_credential = cred
|
||||
jt.save()
|
||||
|
||||
url = reverse('api:job_template_detail', kwargs={'pk': jt.pk})
|
||||
response = patch(url, {'webhook_service': ''}, user=admin, expect=400)
|
||||
jt.refresh_from_db()
|
||||
|
||||
assert jt.webhook_service == service
|
||||
assert jt.webhook_key != ''
|
||||
assert jt.webhook_credential == cred
|
||||
assert response.data == {'webhook_credential': ["Must match the selected webhook service."]}
|
||||
@@ -203,6 +203,13 @@ def organization(instance):
|
||||
return Organization.objects.create(name="test-org", description="test-org-desc")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def credentialtype_kube():
|
||||
kube = CredentialType.defaults['kubernetes_bearer_token']()
|
||||
kube.save()
|
||||
return kube
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def credentialtype_ssh():
|
||||
ssh = CredentialType.defaults['ssh']()
|
||||
@@ -336,6 +343,12 @@ def other_external_credential(credentialtype_external):
|
||||
inputs={'url': 'http://testhost.com', 'token': 'secret2'})
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def kube_credential(credentialtype_kube):
|
||||
return Credential.objects.create(credential_type=credentialtype_kube, name='kube-cred',
|
||||
inputs={'host': 'my.cluster', 'bearer_token': 'my-token', 'verify_ssl': False})
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventory(organization):
|
||||
return organization.inventories.create(name="test-inv")
|
||||
|
||||
@@ -147,6 +147,39 @@ class TestMetaVars:
|
||||
assert data['awx_schedule_id'] == schedule.pk
|
||||
assert 'awx_user_name' not in data
|
||||
|
||||
def test_scheduled_workflow_job_node_metavars(self, workflow_job_template):
|
||||
schedule = Schedule.objects.create(
|
||||
name='job-schedule',
|
||||
rrule='DTSTART:20171129T155939z\nFREQ=MONTHLY',
|
||||
unified_job_template=workflow_job_template
|
||||
)
|
||||
|
||||
workflow_job = WorkflowJob.objects.create(
|
||||
name='workflow-job',
|
||||
workflow_job_template=workflow_job_template,
|
||||
schedule=schedule
|
||||
)
|
||||
|
||||
job = Job.objects.create(
|
||||
launch_type='workflow'
|
||||
)
|
||||
workflow_job.workflow_nodes.create(job=job)
|
||||
assert job.awx_meta_vars() == {
|
||||
'awx_job_id': job.id,
|
||||
'tower_job_id': job.id,
|
||||
'awx_job_launch_type': 'workflow',
|
||||
'tower_job_launch_type': 'workflow',
|
||||
'awx_workflow_job_name': 'workflow-job',
|
||||
'tower_workflow_job_name': 'workflow-job',
|
||||
'awx_workflow_job_id': workflow_job.id,
|
||||
'tower_workflow_job_id': workflow_job.id,
|
||||
'awx_parent_job_schedule_id': schedule.id,
|
||||
'tower_parent_job_schedule_id': schedule.id,
|
||||
'awx_parent_job_schedule_name': 'job-schedule',
|
||||
'tower_parent_job_schedule_name': 'job-schedule',
|
||||
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_event_processing_not_finished():
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
|
||||
# Python
|
||||
import pytest
|
||||
from unittest import mock
|
||||
import json
|
||||
|
||||
# AWX
|
||||
from awx.main.models.workflow import (
|
||||
@@ -248,7 +250,6 @@ class TestWorkflowJobTemplate:
|
||||
test_view = WorkflowJobTemplateNodeSuccessNodesList()
|
||||
nodes = wfjt.workflow_job_template_nodes.all()
|
||||
# test cycle validation
|
||||
print(nodes[0].success_nodes.get(id=nodes[1].id).failure_nodes.get(id=nodes[2].id))
|
||||
assert test_view.is_valid_relation(nodes[2], nodes[0]) == {'Error': 'Cycle detected.'}
|
||||
|
||||
def test_always_success_failure_creation(self, wfjt, admin, get):
|
||||
@@ -270,6 +271,103 @@ class TestWorkflowJobTemplate:
|
||||
wfjt2.validate_unique()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestWorkflowJobTemplatePrompts:
|
||||
"""These are tests for prompts that live on the workflow job template model
|
||||
not the node, prompts apply for entire workflow
|
||||
"""
|
||||
@pytest.fixture
|
||||
def wfjt_prompts(self):
|
||||
return WorkflowJobTemplate.objects.create(
|
||||
ask_inventory_on_launch=True,
|
||||
ask_variables_on_launch=True,
|
||||
ask_limit_on_launch=True,
|
||||
ask_scm_branch_on_launch=True
|
||||
)
|
||||
|
||||
@pytest.fixture
|
||||
def prompts_data(self, inventory):
|
||||
return dict(
|
||||
inventory=inventory,
|
||||
extra_vars={'foo': 'bar'},
|
||||
limit='webservers',
|
||||
scm_branch='release-3.3'
|
||||
)
|
||||
|
||||
def test_apply_workflow_job_prompts(self, workflow_job_template, wfjt_prompts, prompts_data, inventory):
|
||||
# null or empty fields used
|
||||
workflow_job = workflow_job_template.create_unified_job()
|
||||
assert workflow_job.limit is None
|
||||
assert workflow_job.inventory is None
|
||||
assert workflow_job.scm_branch is None
|
||||
|
||||
# fields from prompts used
|
||||
workflow_job = workflow_job_template.create_unified_job(**prompts_data)
|
||||
assert json.loads(workflow_job.extra_vars) == {'foo': 'bar'}
|
||||
assert workflow_job.limit == 'webservers'
|
||||
assert workflow_job.inventory == inventory
|
||||
assert workflow_job.scm_branch == 'release-3.3'
|
||||
|
||||
# non-null fields from WFJT used
|
||||
workflow_job_template.inventory = inventory
|
||||
workflow_job_template.limit = 'fooo'
|
||||
workflow_job_template.scm_branch = 'bar'
|
||||
workflow_job = workflow_job_template.create_unified_job()
|
||||
assert workflow_job.limit == 'fooo'
|
||||
assert workflow_job.inventory == inventory
|
||||
assert workflow_job.scm_branch == 'bar'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_process_workflow_job_prompts(self, inventory, workflow_job_template, wfjt_prompts, prompts_data):
|
||||
accepted, rejected, errors = workflow_job_template._accept_or_ignore_job_kwargs(**prompts_data)
|
||||
assert accepted == {}
|
||||
assert rejected == prompts_data
|
||||
assert errors
|
||||
accepted, rejected, errors = wfjt_prompts._accept_or_ignore_job_kwargs(**prompts_data)
|
||||
assert accepted == prompts_data
|
||||
assert rejected == {}
|
||||
assert not errors
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_set_all_the_prompts(self, post, organization, inventory, org_admin):
|
||||
r = post(
|
||||
url = reverse('api:workflow_job_template_list'),
|
||||
data = dict(
|
||||
name='My new workflow',
|
||||
organization=organization.id,
|
||||
inventory=inventory.id,
|
||||
limit='foooo',
|
||||
ask_limit_on_launch=True,
|
||||
scm_branch='bar',
|
||||
ask_scm_branch_on_launch=True
|
||||
),
|
||||
user = org_admin,
|
||||
expect = 201
|
||||
)
|
||||
wfjt = WorkflowJobTemplate.objects.get(id=r.data['id'])
|
||||
assert wfjt.char_prompts == {
|
||||
'limit': 'foooo', 'scm_branch': 'bar'
|
||||
}
|
||||
assert wfjt.ask_scm_branch_on_launch is True
|
||||
assert wfjt.ask_limit_on_launch is True
|
||||
|
||||
launch_url = r.data['related']['launch']
|
||||
with mock.patch('awx.main.queue.CallbackQueueDispatcher.dispatch', lambda self, obj: None):
|
||||
r = post(
|
||||
url = launch_url,
|
||||
data = dict(
|
||||
scm_branch = 'prompt_branch',
|
||||
limit = 'prompt_limit'
|
||||
),
|
||||
user = org_admin,
|
||||
expect=201
|
||||
)
|
||||
assert r.data['limit'] == 'prompt_limit'
|
||||
assert r.data['scm_branch'] == 'prompt_branch'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_workflow_ancestors(organization):
|
||||
# Spawn order of templates grandparent -> parent -> child
|
||||
|
||||
@@ -0,0 +1,56 @@
|
||||
import subprocess
|
||||
import yaml
|
||||
import base64
|
||||
|
||||
from unittest import mock # noqa
|
||||
import pytest
|
||||
|
||||
from awx.main.scheduler.kubernetes import PodManager
|
||||
from awx.main.utils import (
|
||||
create_temporary_fifo,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def containerized_job(default_instance_group, kube_credential, job_template_factory):
|
||||
default_instance_group.credential = kube_credential
|
||||
default_instance_group.save()
|
||||
objects = job_template_factory('jt', organization='org1', project='proj',
|
||||
inventory='inv', credential='cred',
|
||||
jobs=['my_job'])
|
||||
jt = objects.job_template
|
||||
jt.instance_groups.add(default_instance_group)
|
||||
|
||||
j1 = objects.jobs['my_job']
|
||||
j1.instance_group = default_instance_group
|
||||
j1.status = 'pending'
|
||||
j1.save()
|
||||
return j1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_containerized_job(containerized_job):
|
||||
assert containerized_job.is_containerized
|
||||
assert containerized_job.instance_group.is_containerized
|
||||
assert containerized_job.instance_group.credential.kubernetes
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_kubectl_ssl_verification(containerized_job):
|
||||
cred = containerized_job.instance_group.credential
|
||||
cred.inputs['verify_ssl'] = True
|
||||
key_material = subprocess.run('openssl genrsa 2> /dev/null',
|
||||
shell=True, check=True,
|
||||
stdout=subprocess.PIPE)
|
||||
key = create_temporary_fifo(key_material.stdout)
|
||||
cmd = f"""
|
||||
openssl req -x509 -sha256 -new -nodes \
|
||||
-key {key} -subj '/C=US/ST=North Carolina/L=Durham/O=Ansible/OU=AWX Development/CN=awx.localhost'
|
||||
"""
|
||||
cert = subprocess.run(cmd.strip(), shell=True, check=True, stdout=subprocess.PIPE)
|
||||
cred.inputs['ssl_ca_cert'] = cert.stdout
|
||||
cred.save()
|
||||
pm = PodManager(containerized_job)
|
||||
config = yaml.load(open(pm.kube_config), Loader=yaml.FullLoader)
|
||||
ca_data = config['clusters'][0]['cluster']['certificate-authority-data']
|
||||
assert cert.stdout == base64.b64decode(ca_data.encode())
|
||||
@@ -82,9 +82,12 @@ def test_default_cred_types():
|
||||
'cloudforms',
|
||||
'conjur',
|
||||
'gce',
|
||||
'github_token',
|
||||
'gitlab_token',
|
||||
'hashivault_kv',
|
||||
'hashivault_ssh',
|
||||
'insights',
|
||||
'kubernetes_bearer_token',
|
||||
'net',
|
||||
'openstack',
|
||||
'rhv',
|
||||
@@ -137,7 +140,6 @@ def test_credential_creation(organization_factory):
|
||||
[PKCS8_PRIVATE_KEY, None, True], # unencrypted PKCS8 key, no unlock pass
|
||||
[PKCS8_PRIVATE_KEY, 'passme', False], # unencrypted PKCS8 key, unlock pass
|
||||
[None, None, True], # no key, no unlock pass
|
||||
[None, 'super-secret', False], # no key, unlock pass
|
||||
['INVALID-KEY-DATA', None, False], # invalid key data
|
||||
[EXAMPLE_PRIVATE_KEY.replace('=', '\u003d'), None, True], # automatically fix JSON-encoded GCE keys
|
||||
])
|
||||
|
||||
@@ -262,7 +262,6 @@ def test_inventory_update_injected_content(this_kind, script_or_plugin, inventor
|
||||
"""
|
||||
private_data_dir = envvars.pop('AWX_PRIVATE_DATA_DIR')
|
||||
assert envvars.pop('ANSIBLE_INVENTORY_ENABLED') == ('auto' if use_plugin else 'script')
|
||||
assert envvars.pop('ANSIBLE_COLLECTIONS_PATHS') == os.pathsep.join(settings.AWX_ANSIBLE_COLLECTIONS_PATHS)
|
||||
set_files = bool(os.getenv("MAKE_INVENTORY_REFERENCE_FILES", 'false').lower()[0] not in ['f', '0'])
|
||||
env, content = read_content(private_data_dir, envvars, inventory_update)
|
||||
base_dir = os.path.join(DATA, script_or_plugin)
|
||||
|
||||
@@ -2,7 +2,7 @@ import pytest
|
||||
from unittest import mock
|
||||
import json
|
||||
|
||||
from awx.main.models import Job, Instance
|
||||
from awx.main.models import Job, Instance, JobHostSummary
|
||||
from awx.main.tasks import cluster_node_heartbeat
|
||||
from django.test.utils import override_settings
|
||||
|
||||
@@ -47,6 +47,24 @@ def test_job_notification_data(inventory, machine_credential, project):
|
||||
assert json.loads(notification_data['extra_vars'])['SSN'] == encrypted_str
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_notification_host_data(inventory, machine_credential, project, job_template, host):
|
||||
job = Job.objects.create(
|
||||
job_template=job_template, inventory=inventory, name='hi world', project=project
|
||||
)
|
||||
JobHostSummary.objects.create(job=job, host=host, changed=1, dark=2, failures=3, ok=4, processed=3, skipped=2, rescued=1, ignored=0)
|
||||
assert job.notification_data()['hosts'] == {'single-host':
|
||||
{'failed': True,
|
||||
'changed': 1,
|
||||
'dark': 2,
|
||||
'failures': 3,
|
||||
'ok': 4,
|
||||
'processed': 3,
|
||||
'skipped': 2,
|
||||
'rescued': 1,
|
||||
'ignored': 0}}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestLaunchConfig:
|
||||
|
||||
|
||||
@@ -150,3 +150,24 @@ def test_org_admin_edit_sys_auditor(org_admin, alice, organization):
|
||||
organization.member_role.members.add(alice)
|
||||
access = UserAccess(org_admin)
|
||||
assert not access.can_change(obj=alice, data=dict(is_system_auditor='true'))
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_admin_can_delete_orphan(org_admin, alice):
|
||||
access = UserAccess(org_admin)
|
||||
assert access.can_delete(alice)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_admin_can_delete_group_member(org_admin, org_member):
|
||||
access = UserAccess(org_admin)
|
||||
assert access.can_delete(org_member)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_admin_cannot_delete_member_attached_to_other_group(org_admin, org_member):
|
||||
other_org = Organization.objects.create(name="other-org", description="other-org-desc")
|
||||
access = UserAccess(org_admin)
|
||||
other_org.member_role.members.add(org_member)
|
||||
assert not access.can_delete(org_member)
|
||||
|
||||
@@ -29,6 +29,7 @@ def job_template(mocker):
|
||||
mock_jt.pk = 5
|
||||
mock_jt.host_config_key = '9283920492'
|
||||
mock_jt.validation_errors = mock_JT_resource_data
|
||||
mock_jt.webhook_service = ''
|
||||
return mock_jt
|
||||
|
||||
|
||||
@@ -50,6 +51,7 @@ class TestJobTemplateSerializerGetRelated():
|
||||
'schedules',
|
||||
'activity_stream',
|
||||
'launch',
|
||||
'webhook_key',
|
||||
'notification_templates_started',
|
||||
'notification_templates_success',
|
||||
'notification_templates_error',
|
||||
|
||||
@@ -32,6 +32,7 @@ class TestWorkflowJobTemplateSerializerGetRelated():
|
||||
'workflow_jobs',
|
||||
'launch',
|
||||
'workflow_nodes',
|
||||
'webhook_key',
|
||||
])
|
||||
def test_get_related(self, mocker, test_get_related, workflow_job_template, related_resource_name):
|
||||
test_get_related(WorkflowJobTemplateSerializer,
|
||||
|
||||
@@ -3,7 +3,7 @@ import pytest
|
||||
from awx.main.models.jobs import JobTemplate
|
||||
from awx.main.models import Inventory, CredentialType, Credential, Project
|
||||
from awx.main.models.workflow import (
|
||||
WorkflowJobTemplate, WorkflowJobTemplateNode, WorkflowJobOptions,
|
||||
WorkflowJobTemplate, WorkflowJobTemplateNode,
|
||||
WorkflowJob, WorkflowJobNode
|
||||
)
|
||||
from unittest import mock
|
||||
@@ -33,11 +33,11 @@ class TestWorkflowJobInheritNodesMixin():
|
||||
def test__create_workflow_job_nodes(self, mocker, job_template_nodes):
|
||||
workflow_job_node_create = mocker.patch('awx.main.models.WorkflowJobTemplateNode.create_workflow_job_node')
|
||||
|
||||
mixin = WorkflowJobOptions()
|
||||
mixin._create_workflow_nodes(job_template_nodes)
|
||||
workflow_job = WorkflowJob()
|
||||
workflow_job._create_workflow_nodes(job_template_nodes)
|
||||
|
||||
for job_template_node in job_template_nodes:
|
||||
workflow_job_node_create.assert_any_call(workflow_job=mixin)
|
||||
workflow_job_node_create.assert_any_call(workflow_job=workflow_job)
|
||||
|
||||
class TestMapWorkflowJobNodes():
|
||||
@pytest.fixture
|
||||
@@ -236,4 +236,4 @@ class TestWorkflowJobNodeJobKWARGS:
|
||||
|
||||
|
||||
def test_get_ask_mapping_integrity():
|
||||
assert list(WorkflowJobTemplate.get_ask_mapping().keys()) == ['extra_vars', 'inventory']
|
||||
assert list(WorkflowJobTemplate.get_ask_mapping().keys()) == ['extra_vars', 'inventory', 'limit', 'scm_branch']
|
||||
|
||||
55
awx/main/tests/unit/scheduler/test_kubernetes.py
Normal file
55
awx/main/tests/unit/scheduler/test_kubernetes.py
Normal file
@@ -0,0 +1,55 @@
|
||||
import pytest
|
||||
from unittest import mock
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.models import (
|
||||
InstanceGroup,
|
||||
Job,
|
||||
JobTemplate,
|
||||
Project,
|
||||
Inventory,
|
||||
)
|
||||
from awx.main.scheduler.kubernetes import PodManager
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def container_group():
|
||||
instance_group = mock.Mock(InstanceGroup(name='container-group'))
|
||||
|
||||
return instance_group
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job(container_group):
|
||||
return Job(pk=1,
|
||||
id=1,
|
||||
project=Project(),
|
||||
instance_group=container_group,
|
||||
inventory=Inventory(),
|
||||
job_template=JobTemplate(id=1, name='foo'))
|
||||
|
||||
|
||||
def test_default_pod_spec(job):
|
||||
default_image = PodManager(job).pod_definition['spec']['containers'][0]['image']
|
||||
assert default_image == settings.AWX_CONTAINER_GROUP_DEFAULT_IMAGE
|
||||
|
||||
|
||||
def test_custom_pod_spec(job):
|
||||
job.instance_group.pod_spec_override = """
|
||||
spec:
|
||||
containers:
|
||||
- image: my-custom-image
|
||||
"""
|
||||
custom_image = PodManager(job).pod_definition['spec']['containers'][0]['image']
|
||||
assert custom_image == 'my-custom-image'
|
||||
|
||||
|
||||
def test_pod_manager_namespace_property(job):
|
||||
pm = PodManager(job)
|
||||
assert pm.namespace == settings.AWX_CONTAINER_GROUP_DEFAULT_NAMESPACE
|
||||
|
||||
job.instance_group.pod_spec_override = """
|
||||
metadata:
|
||||
namespace: my-namespace
|
||||
"""
|
||||
assert PodManager(job).namespace == 'my-namespace'
|
||||
@@ -11,6 +11,7 @@ class FakeObject(object):
|
||||
|
||||
class Job(FakeObject):
|
||||
task_impact = 43
|
||||
is_containerized = False
|
||||
|
||||
def log_format(self):
|
||||
return 'job 382 (fake)'
|
||||
|
||||
@@ -130,6 +130,8 @@ def test_send_notifications_list(mock_notifications_filter, mock_job_get, mocker
|
||||
('VMWARE_PASSWORD', 'SECRET'),
|
||||
('API_SECRET', 'SECRET'),
|
||||
('CALLBACK_CONNECTION', 'amqp://tower:password@localhost:5672/tower'),
|
||||
('ANSIBLE_GALAXY_SERVER_PRIMARY_GALAXY_PASSWORD', 'SECRET'),
|
||||
('ANSIBLE_GALAXY_SERVER_PRIMARY_GALAXY_TOKEN', 'SECRET'),
|
||||
])
|
||||
def test_safe_env_filtering(key, value):
|
||||
assert build_safe_env({key: value})[key] == tasks.HIDDEN_PASSWORD
|
||||
@@ -366,6 +368,7 @@ class TestGenericRun():
|
||||
|
||||
task = tasks.RunJob()
|
||||
task.update_model = mock.Mock(return_value=job)
|
||||
task.model.objects.get = mock.Mock(return_value=job)
|
||||
task.build_private_data_files = mock.Mock(side_effect=OSError())
|
||||
|
||||
with mock.patch('awx.main.tasks.copy_tree'):
|
||||
@@ -385,6 +388,7 @@ class TestGenericRun():
|
||||
|
||||
task = tasks.RunJob()
|
||||
task.update_model = mock.Mock(wraps=update_model_wrapper)
|
||||
task.model.objects.get = mock.Mock(return_value=job)
|
||||
task.build_private_data_files = mock.Mock()
|
||||
|
||||
with mock.patch('awx.main.tasks.copy_tree'):
|
||||
@@ -444,7 +448,6 @@ class TestGenericRun():
|
||||
settings.AWX_PROOT_HIDE_PATHS = ['/AWX_PROOT_HIDE_PATHS1', '/AWX_PROOT_HIDE_PATHS2']
|
||||
settings.ANSIBLE_VENV_PATH = '/ANSIBLE_VENV_PATH'
|
||||
settings.AWX_VENV_PATH = '/AWX_VENV_PATH'
|
||||
settings.AWX_ANSIBLE_COLLECTIONS_PATHS = ['/AWX_COLLECTION_PATH1', '/AWX_COLLECTION_PATH2']
|
||||
|
||||
process_isolation_params = task.build_params_process_isolation(job, private_data_dir, cwd)
|
||||
assert True is process_isolation_params['process_isolation']
|
||||
@@ -454,10 +457,6 @@ class TestGenericRun():
|
||||
"The per-job private data dir should be in the list of directories the user can see."
|
||||
assert cwd in process_isolation_params['process_isolation_show_paths'], \
|
||||
"The current working directory should be in the list of directories the user can see."
|
||||
assert '/AWX_COLLECTION_PATH1' in process_isolation_params['process_isolation_show_paths'], \
|
||||
"AWX global collection directory 1 of 2 should get added to the list of directories the user can see."
|
||||
assert '/AWX_COLLECTION_PATH2' in process_isolation_params['process_isolation_show_paths'], \
|
||||
"AWX global collection directory 2 of 2 should get added to the list of directories the user can see."
|
||||
|
||||
for p in [settings.AWX_PROOT_BASE_PATH,
|
||||
'/etc/tower',
|
||||
@@ -474,6 +473,36 @@ class TestGenericRun():
|
||||
assert '/AWX_VENV_PATH' in process_isolation_params['process_isolation_ro_paths']
|
||||
assert 2 == len(process_isolation_params['process_isolation_ro_paths'])
|
||||
|
||||
|
||||
@mock.patch('os.makedirs')
|
||||
def test_build_params_resource_profiling(self, os_makedirs):
|
||||
job = Job(project=Project(), inventory=Inventory())
|
||||
task = tasks.RunJob()
|
||||
task.should_use_resource_profiling = lambda job: True
|
||||
task.instance = job
|
||||
|
||||
resource_profiling_params = task.build_params_resource_profiling(task.instance, '/fake_private_data_dir')
|
||||
assert resource_profiling_params['resource_profiling'] is True
|
||||
assert resource_profiling_params['resource_profiling_base_cgroup'] == 'ansible-runner'
|
||||
assert resource_profiling_params['resource_profiling_cpu_poll_interval'] == '0.25'
|
||||
assert resource_profiling_params['resource_profiling_memory_poll_interval'] == '0.25'
|
||||
assert resource_profiling_params['resource_profiling_pid_poll_interval'] == '0.25'
|
||||
assert resource_profiling_params['resource_profiling_results_dir'] == '/fake_private_data_dir/artifacts/playbook_profiling'
|
||||
|
||||
|
||||
@pytest.mark.parametrize("scenario, profiling_enabled", [
|
||||
('global_setting', True),
|
||||
('default', False)])
|
||||
def test_should_use_resource_profiling(self, scenario, profiling_enabled, settings):
|
||||
job = Job(project=Project(), inventory=Inventory())
|
||||
task = tasks.RunJob()
|
||||
task.instance = job
|
||||
|
||||
if scenario == 'global_setting':
|
||||
settings.AWX_RESOURCE_PROFILING_ENABLED = True
|
||||
|
||||
assert task.should_use_resource_profiling(task.instance) == profiling_enabled
|
||||
|
||||
def test_created_by_extra_vars(self):
|
||||
job = Job(created_by=User(pk=123, username='angry-spud'))
|
||||
|
||||
@@ -517,20 +546,6 @@ class TestGenericRun():
|
||||
env = task.build_env(job, private_data_dir)
|
||||
assert env['FOO'] == 'BAR'
|
||||
|
||||
def test_awx_task_env_respects_ansible_collections_paths(self, patch_Job, private_data_dir):
|
||||
job = Job(project=Project(), inventory=Inventory())
|
||||
|
||||
task = tasks.RunJob()
|
||||
task._write_extra_vars_file = mock.Mock()
|
||||
|
||||
with mock.patch('awx.main.tasks.settings.AWX_ANSIBLE_COLLECTIONS_PATHS', ['/AWX_COLLECTION_PATH']):
|
||||
with mock.patch('awx.main.tasks.settings.AWX_TASK_ENV', {'ANSIBLE_COLLECTIONS_PATHS': '/MY_COLLECTION1:/MY_COLLECTION2'}):
|
||||
env = task.build_env(job, private_data_dir)
|
||||
used_paths = env['ANSIBLE_COLLECTIONS_PATHS'].split(':')
|
||||
assert used_paths[-1].endswith('/requirements_collections')
|
||||
used_paths.pop()
|
||||
assert used_paths == ['/MY_COLLECTION1', '/MY_COLLECTION2', '/AWX_COLLECTION_PATH']
|
||||
|
||||
def test_valid_custom_virtualenv(self, patch_Job, private_data_dir):
|
||||
job = Job(project=Project(), inventory=Inventory())
|
||||
|
||||
@@ -565,6 +580,7 @@ class TestAdhocRun(TestJobExecution):
|
||||
|
||||
task = tasks.RunAdHocCommand()
|
||||
task.update_model = mock.Mock(wraps=adhoc_update_model_wrapper)
|
||||
task.model.objects.get = mock.Mock(return_value=adhoc_job)
|
||||
task.build_inventory = mock.Mock()
|
||||
|
||||
with pytest.raises(Exception):
|
||||
|
||||
@@ -5,11 +5,15 @@
|
||||
import codecs
|
||||
import re
|
||||
import os
|
||||
import logging
|
||||
from itertools import islice
|
||||
from configparser import ConfigParser
|
||||
|
||||
# Django
|
||||
from django.utils.encoding import smart_str
|
||||
|
||||
logger = logging.getLogger('awx.main.utils.ansible')
|
||||
|
||||
|
||||
__all__ = ['skip_directory', 'could_be_playbook', 'could_be_inventory']
|
||||
|
||||
@@ -97,3 +101,20 @@ def could_be_inventory(project_path, dir_path, filename):
|
||||
except IOError:
|
||||
return None
|
||||
return inventory_rel_path
|
||||
|
||||
|
||||
def read_ansible_config(project_path, variables_of_interest):
|
||||
fnames = ['/etc/ansible/ansible.cfg']
|
||||
if project_path:
|
||||
fnames.insert(0, os.path.join(project_path, 'ansible.cfg'))
|
||||
values = {}
|
||||
try:
|
||||
parser = ConfigParser()
|
||||
parser.read(fnames)
|
||||
if 'defaults' in parser:
|
||||
for var in variables_of_interest:
|
||||
if var in parser['defaults']:
|
||||
values[var] = parser['defaults'][var]
|
||||
except Exception:
|
||||
logger.exception('Failed to read ansible configuration(s) {}'.format(fnames))
|
||||
return values
|
||||
|
||||
@@ -19,9 +19,14 @@ from functools import reduce, wraps
|
||||
from decimal import Decimal
|
||||
|
||||
# Django
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.functional import cached_property
|
||||
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField
|
||||
from django.db.models.fields.related_descriptors import (
|
||||
ForwardManyToOneDescriptor,
|
||||
ManyToManyDescriptor
|
||||
)
|
||||
from django.db.models.query import QuerySet
|
||||
from django.db.models import Q
|
||||
|
||||
@@ -33,18 +38,22 @@ from django.apps import apps
|
||||
|
||||
logger = logging.getLogger('awx.main.utils')
|
||||
|
||||
__all__ = ['get_object_or_400', 'camelcase_to_underscore', 'underscore_to_camelcase', 'memoize', 'memoize_delete',
|
||||
'get_ansible_version', 'get_ssh_version', 'get_licenser', 'get_awx_version', 'update_scm_url',
|
||||
'get_type_for_model', 'get_model_for_type', 'copy_model_by_class', 'region_sorting',
|
||||
'copy_m2m_relationships', 'prefetch_page_capabilities', 'to_python_boolean',
|
||||
'ignore_inventory_computed_fields', 'ignore_inventory_group_removal',
|
||||
'_inventory_updates', 'get_pk_from_dict', 'getattrd', 'getattr_dne', 'NoDefaultProvided',
|
||||
'get_current_apps', 'set_current_apps',
|
||||
'extract_ansible_vars', 'get_search_fields', 'get_system_task_capacity', 'get_cpu_capacity', 'get_mem_capacity',
|
||||
'wrap_args_with_proot', 'build_proot_temp_dir', 'check_proot_installed', 'model_to_dict',
|
||||
'model_instance_diff', 'parse_yaml_or_json', 'RequireDebugTrueOrTest',
|
||||
'has_model_field_prefetched', 'set_environ', 'IllegalArgumentError', 'get_custom_venv_choices', 'get_external_account',
|
||||
'task_manager_bulk_reschedule', 'schedule_task_manager', 'classproperty', 'create_temporary_fifo']
|
||||
__all__ = [
|
||||
'get_object_or_400', 'camelcase_to_underscore', 'underscore_to_camelcase', 'memoize',
|
||||
'memoize_delete', 'get_ansible_version', 'get_ssh_version', 'get_licenser',
|
||||
'get_awx_version', 'update_scm_url', 'get_type_for_model', 'get_model_for_type',
|
||||
'copy_model_by_class', 'region_sorting', 'copy_m2m_relationships',
|
||||
'prefetch_page_capabilities', 'to_python_boolean', 'ignore_inventory_computed_fields',
|
||||
'ignore_inventory_group_removal', '_inventory_updates', 'get_pk_from_dict', 'getattrd',
|
||||
'getattr_dne', 'NoDefaultProvided', 'get_current_apps', 'set_current_apps',
|
||||
'extract_ansible_vars', 'get_search_fields', 'get_system_task_capacity',
|
||||
'get_cpu_capacity', 'get_mem_capacity', 'wrap_args_with_proot', 'build_proot_temp_dir',
|
||||
'check_proot_installed', 'model_to_dict', 'NullablePromptPseudoField',
|
||||
'model_instance_diff', 'parse_yaml_or_json', 'RequireDebugTrueOrTest',
|
||||
'has_model_field_prefetched', 'set_environ', 'IllegalArgumentError',
|
||||
'get_custom_venv_choices', 'get_external_account', 'task_manager_bulk_reschedule',
|
||||
'schedule_task_manager', 'classproperty', 'create_temporary_fifo', 'truncate_stdout',
|
||||
]
|
||||
|
||||
|
||||
def get_object_or_400(klass, *args, **kwargs):
|
||||
@@ -435,6 +444,39 @@ def model_to_dict(obj, serializer_mapping=None):
|
||||
return attr_d
|
||||
|
||||
|
||||
class CharPromptDescriptor:
|
||||
"""Class used for identifying nullable launch config fields from class
|
||||
ex. Schedule.limit
|
||||
"""
|
||||
def __init__(self, field):
|
||||
self.field = field
|
||||
|
||||
|
||||
class NullablePromptPseudoField:
|
||||
"""
|
||||
Interface for pseudo-property stored in `char_prompts` dict
|
||||
Used in LaunchTimeConfig and submodels, defined here to avoid circular imports
|
||||
"""
|
||||
def __init__(self, field_name):
|
||||
self.field_name = field_name
|
||||
|
||||
@cached_property
|
||||
def field_descriptor(self):
|
||||
return CharPromptDescriptor(self)
|
||||
|
||||
def __get__(self, instance, type=None):
|
||||
if instance is None:
|
||||
# for inspection on class itself
|
||||
return self.field_descriptor
|
||||
return instance.char_prompts.get(self.field_name, None)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
if value in (None, {}):
|
||||
instance.char_prompts.pop(self.field_name, None)
|
||||
else:
|
||||
instance.char_prompts[self.field_name] = value
|
||||
|
||||
|
||||
def copy_model_by_class(obj1, Class2, fields, kwargs):
|
||||
'''
|
||||
Creates a new unsaved object of type Class2 using the fields from obj1
|
||||
@@ -442,9 +484,10 @@ def copy_model_by_class(obj1, Class2, fields, kwargs):
|
||||
'''
|
||||
create_kwargs = {}
|
||||
for field_name in fields:
|
||||
# Foreign keys can be specified as field_name or field_name_id.
|
||||
id_field_name = '%s_id' % field_name
|
||||
if hasattr(obj1, id_field_name):
|
||||
descriptor = getattr(Class2, field_name)
|
||||
if isinstance(descriptor, ForwardManyToOneDescriptor): # ForeignKey
|
||||
# Foreign keys can be specified as field_name or field_name_id.
|
||||
id_field_name = '%s_id' % field_name
|
||||
if field_name in kwargs:
|
||||
value = kwargs[field_name]
|
||||
elif id_field_name in kwargs:
|
||||
@@ -454,15 +497,29 @@ def copy_model_by_class(obj1, Class2, fields, kwargs):
|
||||
if hasattr(value, 'id'):
|
||||
value = value.id
|
||||
create_kwargs[id_field_name] = value
|
||||
elif isinstance(descriptor, CharPromptDescriptor):
|
||||
# difficult case of copying one launch config to another launch config
|
||||
new_val = None
|
||||
if field_name in kwargs:
|
||||
new_val = kwargs[field_name]
|
||||
elif hasattr(obj1, 'char_prompts'):
|
||||
if field_name in obj1.char_prompts:
|
||||
new_val = obj1.char_prompts[field_name]
|
||||
elif hasattr(obj1, field_name):
|
||||
# extremely rare case where a template spawns a launch config - sliced jobs
|
||||
new_val = getattr(obj1, field_name)
|
||||
if new_val is not None:
|
||||
create_kwargs.setdefault('char_prompts', {})
|
||||
create_kwargs['char_prompts'][field_name] = new_val
|
||||
elif isinstance(descriptor, ManyToManyDescriptor):
|
||||
continue # not copied in this method
|
||||
elif field_name in kwargs:
|
||||
if field_name == 'extra_vars' and isinstance(kwargs[field_name], dict):
|
||||
create_kwargs[field_name] = json.dumps(kwargs['extra_vars'])
|
||||
elif not isinstance(Class2._meta.get_field(field_name), (ForeignObjectRel, ManyToManyField)):
|
||||
create_kwargs[field_name] = kwargs[field_name]
|
||||
elif hasattr(obj1, field_name):
|
||||
field_obj = obj1._meta.get_field(field_name)
|
||||
if not isinstance(field_obj, ManyToManyField):
|
||||
create_kwargs[field_name] = getattr(obj1, field_name)
|
||||
create_kwargs[field_name] = getattr(obj1, field_name)
|
||||
|
||||
# Apply class-specific extra processing for origination of unified jobs
|
||||
if hasattr(obj1, '_update_unified_job_kwargs') and obj1.__class__ != Class2:
|
||||
@@ -481,7 +538,10 @@ def copy_m2m_relationships(obj1, obj2, fields, kwargs=None):
|
||||
'''
|
||||
for field_name in fields:
|
||||
if hasattr(obj1, field_name):
|
||||
field_obj = obj1._meta.get_field(field_name)
|
||||
try:
|
||||
field_obj = obj1._meta.get_field(field_name)
|
||||
except FieldDoesNotExist:
|
||||
continue
|
||||
if isinstance(field_obj, ManyToManyField):
|
||||
# Many to Many can be specified as field_name
|
||||
src_field_value = getattr(obj1, field_name)
|
||||
@@ -1032,3 +1092,19 @@ def create_temporary_fifo(data):
|
||||
).start()
|
||||
return path
|
||||
|
||||
|
||||
def truncate_stdout(stdout, size):
|
||||
from awx.main.constants import ANSI_SGR_PATTERN
|
||||
|
||||
if size <= 0 or len(stdout) <= size:
|
||||
return stdout
|
||||
|
||||
stdout = stdout[:(size - 1)] + u'\u2026'
|
||||
set_count, reset_count = 0, 0
|
||||
for m in ANSI_SGR_PATTERN.finditer(stdout):
|
||||
if m.group() == u'\u001b[0m':
|
||||
reset_count += 1
|
||||
else:
|
||||
set_count += 1
|
||||
|
||||
return stdout + u'\u001b[0m' * (set_count - reset_count)
|
||||
|
||||
@@ -184,6 +184,8 @@ class LogstashFormatter(LogstashFormatterBase):
|
||||
data_for_log[key] = 'Exception `{}` producing field'.format(e)
|
||||
|
||||
data_for_log['event_display'] = job_event.get_event_display2()
|
||||
if hasattr(job_event, 'workflow_job_id'):
|
||||
data_for_log['workflow_job_id'] = job_event.workflow_job_id
|
||||
|
||||
elif kind == 'system_tracking':
|
||||
data.pop('ansible_python_version', None)
|
||||
|
||||
@@ -294,6 +294,18 @@ class AWXProxyHandler(logging.Handler):
|
||||
super(AWXProxyHandler, self).__init__(**kwargs)
|
||||
self._handler = None
|
||||
self._old_kwargs = {}
|
||||
self._auditor = logging.handlers.RotatingFileHandler(
|
||||
filename='/var/log/tower/external.log',
|
||||
maxBytes=1024 * 1024 * 50, # 50 MB
|
||||
backupCount=5,
|
||||
)
|
||||
|
||||
class WritableLogstashFormatter(LogstashFormatter):
|
||||
@classmethod
|
||||
def serialize(cls, message):
|
||||
return json.dumps(message)
|
||||
|
||||
self._auditor.setFormatter(WritableLogstashFormatter())
|
||||
|
||||
def get_handler_class(self, protocol):
|
||||
return HANDLER_MAPPING.get(protocol, AWXNullHandler)
|
||||
@@ -327,6 +339,9 @@ class AWXProxyHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
if AWXProxyHandler.thread_local.enabled:
|
||||
actual_handler = self.get_handler()
|
||||
if settings.LOG_AGGREGATOR_AUDIT:
|
||||
self._auditor.setLevel(settings.LOG_AGGREGATOR_LEVEL)
|
||||
self._auditor.emit(record)
|
||||
return actual_handler.emit(record)
|
||||
|
||||
def perform_test(self, custom_settings):
|
||||
|
||||
@@ -89,7 +89,9 @@ class ActionModule(ActionBase):
|
||||
playbook_url = '{}/api/remediations/v1/remediations/{}/playbook'.format(
|
||||
insights_url, item['id'])
|
||||
res = session.get(playbook_url, timeout=120)
|
||||
if res.status_code != 200:
|
||||
if res.status_code == 204:
|
||||
continue
|
||||
elif res.status_code != 200:
|
||||
result['failed'] = True
|
||||
result['msg'] = (
|
||||
'Expected {} to return a status code of 200 but returned status '
|
||||
|
||||
@@ -20,12 +20,42 @@
|
||||
mode: pull
|
||||
delete: yes
|
||||
recursive: yes
|
||||
when: ansible_kubectl_config is not defined
|
||||
|
||||
- name: Copy daemon log from the isolated host
|
||||
synchronize:
|
||||
src: "{{src}}/daemon.log"
|
||||
dest: "{{src}}/daemon.log"
|
||||
mode: pull
|
||||
when: ansible_kubectl_config is not defined
|
||||
|
||||
- name: Copy artifacts from pod
|
||||
synchronize:
|
||||
src: "{{src}}/artifacts/"
|
||||
dest: "{{src}}/artifacts/"
|
||||
mode: pull
|
||||
delete: yes
|
||||
recursive: yes
|
||||
set_remote_user: no
|
||||
rsync_opts:
|
||||
- "--rsh=$RSH"
|
||||
environment:
|
||||
RSH: "oc rsh --config={{ ansible_kubectl_config }}"
|
||||
delegate_to: localhost
|
||||
when: ansible_kubectl_config is defined
|
||||
|
||||
- name: Copy daemon log from pod
|
||||
synchronize:
|
||||
src: "{{src}}/daemon.log"
|
||||
dest: "{{src}}/daemon.log"
|
||||
mode: pull
|
||||
set_remote_user: no
|
||||
rsync_opts:
|
||||
- "--rsh=$RSH"
|
||||
environment:
|
||||
RSH: "oc rsh --config={{ ansible_kubectl_config }}"
|
||||
delegate_to: localhost
|
||||
when: ansible_kubectl_config is defined
|
||||
|
||||
- name: Fail if previous check determined that process is not alive.
|
||||
fail:
|
||||
|
||||
@@ -75,6 +75,8 @@
|
||||
force: "{{scm_clean}}"
|
||||
username: "{{scm_username|default(omit)}}"
|
||||
password: "{{scm_password|default(omit)}}"
|
||||
environment:
|
||||
LC_ALL: 'en_US.UTF-8'
|
||||
register: svn_result
|
||||
|
||||
- name: Set the svn repository version
|
||||
@@ -126,12 +128,14 @@
|
||||
register: doesRequirementsExist
|
||||
|
||||
- name: fetch galaxy roles from requirements.yml
|
||||
command: ansible-galaxy install -r requirements.yml -p {{roles_destination|quote}}
|
||||
command: ansible-galaxy install -r requirements.yml -p {{roles_destination|quote}}{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}/roles"
|
||||
register: galaxy_result
|
||||
when: doesRequirementsExist.stat.exists
|
||||
changed_when: "'was installed successfully' in galaxy_result.stdout"
|
||||
environment:
|
||||
ANSIBLE_FORCE_COLOR: False
|
||||
|
||||
when: roles_enabled|bool
|
||||
delegate_to: localhost
|
||||
@@ -142,12 +146,15 @@
|
||||
register: doesCollectionRequirementsExist
|
||||
|
||||
- name: fetch galaxy collections from collections/requirements.yml
|
||||
command: ansible-galaxy collection install -r requirements.yml -p {{collections_destination|quote}}
|
||||
command: ansible-galaxy collection install -r requirements.yml -p {{collections_destination|quote}}{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}/collections"
|
||||
register: galaxy_collection_result
|
||||
when: doesCollectionRequirementsExist.stat.exists
|
||||
changed_when: "'Installing ' in galaxy_collection_result.stdout"
|
||||
environment:
|
||||
ANSIBLE_FORCE_COLOR: False
|
||||
ANSIBLE_COLLECTIONS_PATHS: "{{ collections_destination }}"
|
||||
|
||||
when: collections_enabled|bool
|
||||
delegate_to: localhost
|
||||
|
||||
@@ -11,12 +11,25 @@
|
||||
secret: "{{ lookup('pipe', 'cat ' + src + '/env/ssh_key') }}"
|
||||
|
||||
tasks:
|
||||
|
||||
- name: synchronize job environment with isolated host
|
||||
synchronize:
|
||||
copy_links: true
|
||||
src: "{{src}}"
|
||||
dest: "{{dest}}"
|
||||
copy_links: yes
|
||||
src: "{{ src }}"
|
||||
dest: "{{ dest }}"
|
||||
when: ansible_kubectl_config is not defined
|
||||
|
||||
- name: synchronize job environment with remote job container
|
||||
synchronize:
|
||||
copy_links: yes
|
||||
src: "{{ src }}"
|
||||
dest: "{{ dest }}"
|
||||
set_remote_user: no
|
||||
rsync_opts:
|
||||
- "--rsh=$RSH"
|
||||
environment:
|
||||
RSH: "oc rsh --config={{ ansible_kubectl_config }}"
|
||||
delegate_to: localhost
|
||||
when: ansible_kubectl_config is defined
|
||||
|
||||
- local_action: stat path="{{src}}/env/ssh_key"
|
||||
register: key
|
||||
@@ -26,7 +39,7 @@
|
||||
when: key.stat.exists
|
||||
|
||||
- name: spawn the playbook
|
||||
command: "ansible-runner start {{src}} -p {{playbook}} -i {{ident}}"
|
||||
command: "ansible-runner start {{src}} -p '{{playbook}}' -i {{ident}}"
|
||||
when: playbook is defined
|
||||
|
||||
- name: spawn the adhoc command
|
||||
|
||||
@@ -39,8 +39,9 @@ import uuid
|
||||
from time import time
|
||||
|
||||
from jinja2 import Environment
|
||||
from six import integer_types, PY3
|
||||
from six.moves import configparser
|
||||
|
||||
from ansible.module_utils.six import integer_types, PY3
|
||||
from ansible.module_utils.six.moves import configparser
|
||||
|
||||
try:
|
||||
import argparse
|
||||
@@ -152,7 +153,7 @@ class VMWareInventory(object):
|
||||
try:
|
||||
text = str(text)
|
||||
except UnicodeEncodeError:
|
||||
text = text.encode('ascii', 'ignore')
|
||||
text = text.encode('utf-8')
|
||||
print('%s %s' % (datetime.datetime.now(), text))
|
||||
|
||||
def show(self):
|
||||
@@ -186,14 +187,14 @@ class VMWareInventory(object):
|
||||
|
||||
def write_to_cache(self, data):
|
||||
''' Dump inventory to json file '''
|
||||
with open(self.cache_path_cache, 'wb') as f:
|
||||
f.write(json.dumps(data))
|
||||
with open(self.cache_path_cache, 'w') as f:
|
||||
f.write(json.dumps(data, indent=2))
|
||||
|
||||
def get_inventory_from_cache(self):
|
||||
''' Read in jsonified inventory '''
|
||||
|
||||
jdata = None
|
||||
with open(self.cache_path_cache, 'rb') as f:
|
||||
with open(self.cache_path_cache, 'r') as f:
|
||||
jdata = f.read()
|
||||
return json.loads(jdata)
|
||||
|
||||
@@ -343,10 +344,22 @@ class VMWareInventory(object):
|
||||
'pwd': self.password,
|
||||
'port': int(self.port)}
|
||||
|
||||
if hasattr(ssl, 'SSLContext') and not self.validate_certs:
|
||||
if self.validate_certs and hasattr(ssl, 'SSLContext'):
|
||||
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
||||
context.verify_mode = ssl.CERT_REQUIRED
|
||||
context.check_hostname = True
|
||||
kwargs['sslContext'] = context
|
||||
elif self.validate_certs and not hasattr(ssl, 'SSLContext'):
|
||||
sys.exit('pyVim does not support changing verification mode with python < 2.7.9. Either update '
|
||||
'python or use validate_certs=false.')
|
||||
elif not self.validate_certs and hasattr(ssl, 'SSLContext'):
|
||||
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
||||
context.verify_mode = ssl.CERT_NONE
|
||||
context.check_hostname = False
|
||||
kwargs['sslContext'] = context
|
||||
elif not self.validate_certs and not hasattr(ssl, 'SSLContext'):
|
||||
# Python 2.7.9 < or RHEL/CentOS 7.4 <
|
||||
pass
|
||||
|
||||
return self._get_instances(kwargs)
|
||||
|
||||
@@ -390,7 +403,7 @@ class VMWareInventory(object):
|
||||
instances = [x for x in instances if x.name == self.args.host]
|
||||
|
||||
instance_tuples = []
|
||||
for instance in sorted(instances):
|
||||
for instance in instances:
|
||||
if self.guest_props:
|
||||
ifacts = self.facts_from_proplist(instance)
|
||||
else:
|
||||
@@ -614,7 +627,14 @@ class VMWareInventory(object):
|
||||
lastref = lastref[x]
|
||||
else:
|
||||
lastref[x] = val
|
||||
|
||||
if self.args.debug:
|
||||
self.debugl("For %s" % vm.name)
|
||||
for key in list(rdata.keys()):
|
||||
if isinstance(rdata[key], dict):
|
||||
for ikey in list(rdata[key].keys()):
|
||||
self.debugl("Property '%s.%s' has value '%s'" % (key, ikey, rdata[key][ikey]))
|
||||
else:
|
||||
self.debugl("Property '%s' has value '%s'" % (key, rdata[key]))
|
||||
return rdata
|
||||
|
||||
def facts_from_vobj(self, vobj, level=0):
|
||||
@@ -685,7 +705,7 @@ class VMWareInventory(object):
|
||||
if vobj.isalnum():
|
||||
rdata = vobj
|
||||
else:
|
||||
rdata = vobj.decode('ascii', 'ignore')
|
||||
rdata = vobj.encode('utf-8').decode('utf-8')
|
||||
elif issubclass(type(vobj), bool) or isinstance(vobj, bool):
|
||||
rdata = vobj
|
||||
elif issubclass(type(vobj), integer_types) or isinstance(vobj, integer_types):
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user